text
stringlengths 2
99.9k
| meta
dict |
---|---|
/*
* Copyright 2019-present HiveMQ GmbH
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hivemq.websocket;
import io.netty.buffer.ByteBuf;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.MessageToMessageEncoder;
import io.netty.handler.codec.http.websocketx.BinaryWebSocketFrame;
import io.netty.util.ReferenceCountUtil;
import java.util.List;
/**
* @author Lukas Brandl
*/
public class MQTTWebsocketEncoder extends MessageToMessageEncoder<ByteBuf> {
@Override
protected void encode(final ChannelHandlerContext ctx, final ByteBuf msg, final List<Object> out) throws Exception {
final BinaryWebSocketFrame binaryWebSocketFrame = new BinaryWebSocketFrame(msg);
ReferenceCountUtil.retain(binaryWebSocketFrame);
out.add(binaryWebSocketFrame);
}
}
| {
"pile_set_name": "Github"
} |
#include <stdio_ext.h>
#include <stdlib.h>
#include <string.h>
int
main (void)
{
FILE *fp;
const char teststring[] = "hello world";
char buf[3072];
int result = 0;
char readbuf[256];
/* Open a file. */
fp = tmpfile ();
/* Set a buffer. */
if (setvbuf (fp, buf, _IOFBF, sizeof buf) == EOF)
{
printf ("setvbuf failed: %m\n");
exit (1);
}
/* Get the buffer size. */
if (__fbufsize (fp) != sizeof buf)
{
printf ("__fbusize() reported a buffer size of %Zd bytes;"
" we installed a buffer with %Zd bytes\n",
__fbufsize (fp), sizeof buf);
result = 1;
}
/* Write something and read it back. */
if (fputs (teststring, fp) == EOF)
{
printf ("writing to new stream failed: %m\n");
exit (1);
}
rewind (fp);
if (fgets (readbuf, sizeof readbuf, fp) == NULL)
{
printf ("reading from new stream failed: %m\n");
exit (1);
}
if (strcmp (readbuf, teststring) != 0)
{
puts ("not the correct string read");
exit (1);
}
/* The file must be opened for reading and writing. */
if (__freading (fp) == 0)
{
puts ("__freading() reported stream is not last read from");
result = 1;
}
if (__fwriting (fp) != 0)
{
puts ("__fwriting() reported stream is write-only or last written to");
result = 1;
}
rewind (fp);
if (fputs (teststring, fp) == EOF)
{
printf ("writing(2) to new stream failed: %m\n");
exit (1);
}
if (__fwriting (fp) == 0)
{
puts ("__fwriting() doe snot reported stream is last written to");
result = 1;
}
if (__freading (fp) != 0)
{
puts ("__freading() reported stream is last read from");
result = 1;
}
if (__freadable (fp) == 0)
{
puts ("__freading() reported stream is last readable");
result = 1;
}
if (__fwritable (fp) == 0)
{
puts ("__freading() reported stream is last writable");
result = 1;
}
/* The string we wrote above should still be in the buffer. */
if (__fpending (fp) != strlen (teststring))
{
printf ("__fpending() returned %Zd; expected %Zd\n",
__fpending (fp), strlen (teststring));
result = 1;
}
/* Discard all the output. */
__fpurge (fp);
/* And check again. */
if (__fpending (fp) != 0)
{
printf ("__fpending() returned %Zd; expected 0\n",
__fpending (fp));
result = 1;
}
/* Find out whether buffer is line buffered. */
if (__flbf (fp) != 0)
{
puts ("__flbf() reports line buffered but it is fully buffered");
result = 1;
}
if (setvbuf (fp, buf, _IOLBF, sizeof buf) == EOF)
{
printf ("setvbuf(2) failed: %m\n");
exit (1);
}
if (__flbf (fp) == 0)
{
puts ("__flbf() reports file is not line buffered");
result = 1;
}
if (setvbuf (fp, NULL, _IONBF, 0) == EOF)
{
printf ("setvbuf(3) failed: %m\n");
exit (1);
}
if (__flbf (fp) != 0)
{
puts ("__flbf() reports line buffered but it is not buffered");
result = 1;
}
fclose (fp);
return result;
}
| {
"pile_set_name": "Github"
} |
/*******************************************************************************
* Copyright (c) 2015, 2020 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
*******************************************************************************/
package com.ibm.ws.app.manager;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.Map;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ConcurrentHashMap;
import org.osgi.service.component.ComponentContext;
import org.osgi.service.component.annotations.Component;
import org.osgi.service.component.annotations.ConfigurationPolicy;
import org.osgi.service.component.annotations.Modified;
import com.ibm.ws.app.manager.internal.AppManagerConstants;
@Component(service = ApplicationManager.class,
immediate = true,
configurationPid = AppManagerConstants.MANAGEMENT_PID,
configurationPolicy = ConfigurationPolicy.REQUIRE,
property = "service.vendor=IBM")
public class ApplicationManager {
private boolean expandApps;
private boolean useJandex;
private long startTimeout;
private long stopTimeout;
private File extractedLog;
private ConcurrentMap<String, ExtractedLogData> extractsDataLog = new ConcurrentHashMap<>();
protected void activate(ComponentContext compcontext, Map<String, Object> properties) {
modified(compcontext, properties);
extractedLog = compcontext.getBundleContext().getBundle().getDataFile("expandApps");
if (expandApps && extractedLog.exists()) {
try (DataInputStream din = new DataInputStream(new FileInputStream(extractedLog))) {
long format = din.readLong();
if (format == 0) {
int len = din.readInt();
for (int i = 0; i < len; i++) {
String id = din.readUTF();
long updated = din.readLong();
long size = din.readLong();
extractsDataLog.put(id, new ExtractedLogData(id, updated, size));
}
}
} catch (IOException ioe) {
// If we get a failure assume the file is corrupted and delete
// worst case is we will reextract.
extractedLog.delete();
}
}
}
/**
* DS method to deactivate this component
*
* @param compcontext the context of this component
*/
protected void deactivate(ComponentContext compcontext) {
if (expandApps) {
try (DataOutputStream dout = new DataOutputStream(new FileOutputStream(extractedLog))) {
dout.writeLong(0); // file format version
dout.writeInt(extractsDataLog.size()); // number of entries
for (ExtractedLogData data : extractsDataLog.values()) {
dout.writeUTF(data.id); // the id
dout.writeLong(data.lastUpdated); // the last time it was updated
dout.writeLong(data.size); // the file size
}
} catch (IOException ioe) {
// If we hit this just delete the file on the assumption that
// we will just reextract which is less efficient, but not
// a total failure.
extractedLog.delete();
}
} else if (extractedLog.exists()) {
extractedLog.delete(); // attempt to delete since we aren't running expanded apps.
}
}
/**
* DS method to modify the configuration of this component
*
* @param compcontext the context of this component
* @param properties the updated configuration properties
*/
@Modified
protected void modified(ComponentContext compcontext, Map<String, Object> properties) {
Boolean autoExpandValue = (Boolean) properties.get("autoExpand");
setExpandApps(autoExpandValue == null ? false : autoExpandValue);
Boolean useJandexValue = getProperty(properties, "useJandex", false);
setUseJandex(useJandexValue == null ? false : useJandexValue);
//System.setProperty("com.ibm.ws.jandex.enable", useJandexValue.toString()); // Temporary -- REMOVE THIS LATER ////
long startTimeoutValue = getProperty(properties, "startTimeout", 30L);
setStartTimeout(startTimeoutValue);
long stopTimeoutValue = getProperty(properties, "stopTimeout", 30L);
setStopTimeout(stopTimeoutValue);
ApplicationStateCoordinator.setApplicationStartTimeout(startTimeoutValue);
ApplicationStateCoordinator.setApplicationStopTimeout(stopTimeoutValue);
}
//get a property and if not set, use the supplied default
@SuppressWarnings("unchecked")
private <T> T getProperty(Map<String, Object> properties, String name, T deflt) {
T value = deflt;
try {
T prop = (T) properties.get(name);
if (prop != null) {
value = prop;
}
} catch (ClassCastException e) {
//auto FFDC and allow the default value to be returned so that the server still starts
}
return value;
}
/**
* @return true if the app should expand, false otherwise
*/
public boolean shouldExpand(String id, File warFile, File expandedDir) {
boolean result = true;
long lastModified = -1;
long size = -1;
ExtractedLogData data = extractsDataLog.get(id);
lastModified = warFile.lastModified();
size = warFile.length();
if (expandedDir.exists()) {
if (data != null) {
result = data.lastUpdated != lastModified || data.size != size;
}
}
if (result) {
extractsDataLog.put(id, new ExtractedLogData(id, lastModified, size));
}
return result;
}
/**
* @return
*/
public boolean getExpandApps() {
return this.expandApps;
}
/**
* @param b
*/
private void setExpandApps(boolean b) {
this.expandApps = b;
}
/**
* @return
*/
public boolean getUseJandex() {
return this.useJandex;
}
/**
* @param b
*/
private void setUseJandex(boolean b) {
this.useJandex = b;
}
/**
* @return
*/
public long getStartTimeout() {
return this.startTimeout;
}
/**
* @param b
*/
private void setStartTimeout(long b) {
this.startTimeout = b;
}
/**
* @return
*/
public long getStopTimeout() {
return this.stopTimeout;
}
/**
* @param b
*/
private void setStopTimeout(long b) {
this.stopTimeout = b;
}
private static class ExtractedLogData {
private String id;
private long lastUpdated;
private long size;
public ExtractedLogData(String id, long updated, long size) {
this.id = id;
this.lastUpdated = updated;
this.size = size;
}
}
}
| {
"pile_set_name": "Github"
} |
module ActiveMerchant #:nodoc:
module Billing #:nodoc:
# Base class representation of cryptographic payment data tokens that may be used for EMV-style transactions
# like Apple Pay. Payment data may be transmitted via any data type, and may also be padded
# with metadata specific to the cryptographer. This metadata should be parsed and interpreted in concrete
# implementations of your given cryptographer. Like credit cards, you must also return a string representing
# the token's type, like 'apple_pay' or 'stripe' should your target payment gateway process these tokens.
class PaymentToken
attr_reader :payment_data
def initialize(payment_data, options = {})
@payment_data = payment_data
@metadata = options.with_indifferent_access
end
def type
raise NotImplementedError
end
end
end
end
| {
"pile_set_name": "Github"
} |
[[_appendix_ast_transformations]]
= AST Transformations
The following list summarizes all AST transformations available to Groovy-based
projects when the `griffon-groovy-compile-{griffon-version}.jar` dependency is
added to a project:
- <<_models_change_listener_transformation,@ChangeListener>>
- <<_events_eventpublisher_transformation,@EventPublisher>>
- <<_models_invalidation_listener_transformation,@InvalidationListener>>
- <<_models_list_change_listener_transformation,@ListChangeListener>>
- <<_models_map_change_listener_transformation,@MapChangeListener>>
- <<_internationalization_message_source_transformation,@MessageSourceAware>>
- <<_mvc_mvcaware_transformation,@MVCAware>>
- <<_models_observable_transformation,@Observable>>
- <<_models_fxobservable_transformation,@FXObservable>>
- <<_models_property_listener_transformation,@PropertyListener>>
- <<_resources_resource_resolver_transformation,@ResourceResolverAware>>
- <<_resources_resources_aware_transformation,@ResourcesAware>>
- <<_threading_annotation,@Threading>>
- <<_threading_transformation,@ThreadingAware>>
- <<_models_vetoable_transformation,@Vetoable>>
| {
"pile_set_name": "Github"
} |
//go:generate struct-markdown
package common
import (
"fmt"
"github.com/hashicorp/packer/common"
"github.com/hashicorp/packer/template/interpolate"
)
type OutputConfig struct {
// This is the path on your local machine (the one running Packer) to the
// directory where the resulting virtual machine will be created.
// This may be relative or absolute. If relative, the path is relative to
// the working directory when packer is executed.
//
// If you are running a remote esx build, the output_dir is the path on your
// local machine (the machine running Packer) to which Packer will export
// the vm if you have `"skip_export": false`. If you want to manage the
// virtual machine's path on the remote datastore, use `remote_output_dir`.
//
// This directory must not exist or be empty prior to running
// the builder.
//
// By default this is output-BUILDNAME where "BUILDNAME" is the name of the
// build.
OutputDir string `mapstructure:"output_directory" required:"false"`
// This is the directoy on your remote esx host where you will save your
// vm, relative to your remote_datastore.
//
// This option's default value is your `vm_name`, and the final path of your
// vm will be vmfs/volumes/$remote_datastore/$vm_name/$vm_name.vmx where
// `$remote_datastore` and `$vm_name` match their corresponding template
// options
//
// For example, setting `"remote_output_directory": "path/to/subdir`
// will create a directory `/vmfs/volumes/remote_datastore/path/to/subdir`.
//
// Packer will not create the remote datastore for you; it must already
// exist. However, Packer will create all directories defined in the option
// that do not currently exist.
//
// This option will be ignored unless you are building on a remote esx host.
RemoteOutputDir string `mapstructure:"remote_output_directory" required:"false"`
}
func (c *OutputConfig) Prepare(ctx *interpolate.Context, pc *common.PackerConfig) []error {
if c.OutputDir == "" {
c.OutputDir = fmt.Sprintf("output-%s", pc.PackerBuildName)
}
return nil
}
| {
"pile_set_name": "Github"
} |
/**
* Copyright 2018 The AMP HTML Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS-IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// FORK of DefinatelyTyped definitions for `google-closure-compiler`
// Type definitions for google-closure-compiler
// Project: https://github.com/chadkillingsworth/closure-compiler-npm
// Definitions by: Evan Martin <http://neugierig.org>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
/// <reference types="node" />
declare module 'google-closure-compiler' {
import * as child_process from 'child_process';
// The "json_streams" compiler flag lets the compiler accept/produce
// arrays of JSON objects in this shape for input/output.
interface JSONStreamFile {
path: string;
src: string;
srcmap?: string; // TODO(evan): pass through source maps.
}
interface Compiler {
JAR_PATH: string | null;
javaPath: string;
logger: (...args: any[]) => void;
spawnOptions: { [key: string]: string };
run(
callback?: (exitCode: number, stdout: string, stderr: string) => void,
): child_process.ChildProcess;
getFullCommand(): string;
}
type CompileOption = string | boolean;
type CompileOptions = string[] | { [key: string]: CompileOption | CompileOption[] };
export const compiler: {
new (opts: CompileOptions | string[], extraCommandArgs?: string[]): Compiler;
JAR_PATH: string;
COMPILER_PATH: string;
CONTRIB_PATH: string;
};
}
| {
"pile_set_name": "Github"
} |
/*
* The MIT License
*
* Copyright (c) 2016, CloudBees, Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
@Library('zot-stuff@master')
import org.foo.Zot
@Library('zot-stuff@master')
import org.foo.Zot
def z = new Zot(steps)
pipeline {
agent any
stages {
stage ('prepare') {
steps {
script {
z.echo("hello")
}
}
}
}
}
| {
"pile_set_name": "Github"
} |
/*
Copyright 2015 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Package parser provides code to parse go files, type-check them, extract the
// types.
package parser // import "k8s.io/gengo/parser"
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<COLLADA xmlns="http://www.collada.org/2005/11/COLLADASchema" version="1.4.1" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<asset>
<created>2017-03-08T13:35:39</created>
<modified>2017-03-08T13:35:39</modified>
</asset>
<library_animations>
<animation>
<source id="source_0">
<float_array id="float_array_0" count="1">0</float_array>
<technique_common>
<accessor source="#float_array" count="1"/>
</technique_common>
</source>
<source id="source_2">
<IDREF_array count="1">node_2 unknown</IDREF_array>
</source>
<sampler id="sampler_0">
<input semantic="INPUT" source="#source_0"/>
</sampler>
<channel source="#sampler_0" target="node_1/translate.X"/>
</animation>
</library_animations>
<library_cameras>
<camera id="camera_0">
<optics>
<technique_common>
<perspective>
<xfov>90</xfov>
<yfov>90</yfov>
<znear>1</znear>
<zfar>INF</zfar>
</perspective>
</technique_common>
</optics>
</camera>
</library_cameras>
<library_controllers>
<controller id="controller_0">
<skin source="#geometry_0">
<source id="joints">
<Name_array id="name_array_0" count="1">Root</Name_array>
<technique_common>
<accessor source="#name_array_0" count="1"/>
</technique_common>
</source>
<source id="weights">
<float_array id="float_array_1" count="1">0</float_array>
<technique_common>
<accessor source="#float_array_1" count="1"/>
</technique_common>
</source>
<source id="inv_bind_mats">
<float_array id="float_array_2" count="16">1 0 0 0 0 1 0 0 0 0 1 0 0 0 0 1</float_array>
<technique_common>
<accessor source="#float_array_2" count="1"/>
</technique_common>
</source>
<joints>
<input semantic="JOINT" source="#joints"/>
<input semantic="INV_BIND_MATRIX" source="#inv_bind_mats"/>
</joints>
<vertex_weights count="1">
<input offset="0" semantic="JOINT" source="#joints"/>
<input offset="0" semantic="WEIGHT" source="#weights"/>
<vcount>1</vcount>
<v>0 0</v>
</vertex_weights>
</skin>
</controller>
</library_controllers>
<library_effects>
<effect id="effect_0">
<profile_GLSL>
<include sid="shader" url="./shader.glsl"/>
<technique sid="shaded">
<pass/>
</technique>
</profile_GLSL>
</effect>
</library_effects>
<library_geometries>
<geometry id="geometry_0">
<mesh>
<source id="source_1">
<float_array id="float_array_3" count="1">0</float_array>
<technique_common>
<accessor source="#float_array_3" count="1" stride="1">
<param name="X" type="float"/>
</accessor>
</technique_common>
</source>
<vertices id="vertices_0">
<input semantic="POSITION" source="#float_array_3"/>
</vertices>
<triangles material="initialShadingGroup" count="1">
<input semantic="VERTEX" source="#float_array_3" offset="0"/>
<p>0 0 0</p>
</triangles>
</mesh>
</geometry>
<geometry id="geometry_1">
<convex_mesh convex_hull_of="#geometry_0"/>
</geometry>
</library_geometries>
<library_images>


</library_images>
<library_nodes>
<node id="node_0"/>
</library_nodes>
<library_physics_models>
<physics_model id="physics_model_0">
<rigid_body sid="rigid_body_a">
<technique_common>
<dynamic>0</dynamic>
<mass>1</mass>
<mass_frame>
<translate sid="translate">0 0 0</translate>
</mass_frame>
<inertia>1 1 1</inertia>
<physics_material id="physics_material_0">
<technique_common>
<dynamic_friction>0.5</dynamic_friction>
<restitution>0.5</restitution>
<static_friction>0.5</static_friction>
</technique_common>
</physics_material>
<shape>
<density>1</density>
<box>
<half_extents>1 1 1</half_extents>
</box>
</shape>
</technique_common>
</rigid_body>
</physics_model>
</library_physics_models>
<library_physics_scenes>
<physics_scene id="physics_scene_0">
<instance_physics_model url="#physics_model_0" parent="#physics_model_0">
<instance_rigid_body body="rigid_body_a" target="#node_1">
<technique_common/>
</instance_rigid_body>
</instance_physics_model>
<technique_common>
<gravity>0 -9.81 0</gravity>
</technique_common>
</physics_scene>
</library_physics_scenes>
<library_visual_scenes>
<visual_scene>
<node id="node_1">
<translate sid="translate">0 0 0</translate>
<instance_node url="#node_0"/>
</node>
<node id="node_2">
<instance_controller url="#controller_0">
<skeleton>#node_3</skeleton>
</instance_controller>
<node id="node_3" sid="Root"/>
</node>
<node id="node_3">
<instance_node url="./links_error_ref.dae#unknown"/>
</node>
<node id="node_4">
<instance_node url="./links_error_ref2.dae#unknown"/>
</node>
<evaluate_scene>
<render camera_node="./unknown.dae#camera_0"/>
</evaluate_scene>
</visual_scene>
</library_visual_scenes>
</COLLADA> | {
"pile_set_name": "Github"
} |
<?php
/*
* Copyright 2014 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
class Google_Service_CloudResourceManager_Operation extends Google_Model
{
public $done;
protected $errorType = 'Google_Service_CloudResourceManager_Status';
protected $errorDataType = '';
public $metadata;
public $name;
public $response;
public function setDone($done)
{
$this->done = $done;
}
public function getDone()
{
return $this->done;
}
/**
* @param Google_Service_CloudResourceManager_Status
*/
public function setError(Google_Service_CloudResourceManager_Status $error)
{
$this->error = $error;
}
/**
* @return Google_Service_CloudResourceManager_Status
*/
public function getError()
{
return $this->error;
}
public function setMetadata($metadata)
{
$this->metadata = $metadata;
}
public function getMetadata()
{
return $this->metadata;
}
public function setName($name)
{
$this->name = $name;
}
public function getName()
{
return $this->name;
}
public function setResponse($response)
{
$this->response = $response;
}
public function getResponse()
{
return $this->response;
}
}
| {
"pile_set_name": "Github"
} |
package daemon
import (
"github.com/viant/toolbox/url"
"strings"
)
//StartRequest represents service request start
type StartRequest struct {
Target *url.Resource `required:"true" description:"target host"` //target host
Service string `required:"true" ` //service name
Exclusion string `description:"optional exclusion fragment in case there are more then one matching provided name service"` //exclusion if there is more than one service matching service group
}
//StartResponse represents daemon start response
type StartResponse struct {
*Info
}
//StatusRequest represents status request
type StatusRequest struct {
Target *url.Resource `required:"true" description:"target host"` //target host
Service string `required:"true" ` //service name
Exclusion string //exclusion if there is more than one service matching service group
}
//StatusResponse represent status response
type StatusResponse struct {
*Info
}
//Info represents a service info
type Info struct {
Service string //requested service name
Path string //path
Pid int //process if
Type int //type
Domain string //command how service was launched
State string //state
Launched bool
}
//StopRequest represents a stop request.
type StopRequest struct {
Target *url.Resource `required:"true" description:"target host"` //target host
Service string `required:"true"` //service name
Exclusion string //exclusion if there is more than one service matching service group
}
//StopResponse represents a stop response
type StopResponse struct {
*Info
}
//IsActive returns true if service is running
func (s *Info) IsActive() bool {
return strings.ToLower(s.State) == "running"
}
| {
"pile_set_name": "Github"
} |
HTTP/1.1 200 OK
Server: nginx
Date: Mon, 22 Dec 2014 14:16:12 GMT
Content-Type: text/html
Connection: close
<html>
<head><title>200 OK</title></head>
<body bgcolor="white">
<center><h1>200 OK</h1></center>
<hr><center>nginx</center>
</body>
</html>
| {
"pile_set_name": "Github"
} |
package ods;
import java.util.AbstractMap;
import java.util.Map;
import java.util.Set;
public class USetMap<K,V> extends AbstractMap<K,V> {
/**
* FIXME: Doesn't have the same equals() and hashCode()
* that the API demands
* @author morin
*/
protected class Entry implements Map.Entry<K, V> {
K k;
V v;
public Entry(K k, V v) {
this.k = k;
this.v = v;
}
public boolean equals(Entry x) {
return x.k.equals(k);
}
public int hashCode() {
return k.hashCode();
}
@Override
public K getKey() {
return k;
}
@Override
public V getValue() {
return v;
}
@Override
public V setValue(V value) {
return v = value;
}
}
protected USet<Map.Entry<K, V>> s;
public USetMap(USet<Map.Entry<K, V>> s) {
this.s = s;
}
public V put(K k, V v) {
Entry p = new Entry(k, v);
Map.Entry<K, V> q = s.remove(p);
s.add(p);
return q == null ? null : q.getValue();
}
@SuppressWarnings("unchecked")
public V remove(Object k) {
Map.Entry<K,V> p = s.remove(new Entry((K)k, null));
return p == null ? null : p.getValue();
}
@SuppressWarnings("unchecked")
public boolean containsKey(Object k) {
return s.find(new Entry((K)k, null)) != null;
}
public int size() {
return s.size();
}
public Set<Map.Entry<K, V>> entrySet() {
return new USetSet<Map.Entry<K, V>>(s);
}
}
| {
"pile_set_name": "Github"
} |
<?php
declare(strict_types=1);
/**
* This file is part of Hyperf.
*
* @link https://www.hyperf.io
* @document https://hyperf.wiki
* @contact [email protected]
* @license https://github.com/hyperf/hyperf/blob/master/LICENSE
*/
namespace Hyperf\Nats\Listener;
use Hyperf\Contract\StdoutLoggerInterface;
use Hyperf\Event\Contract\ListenerInterface;
use Hyperf\Nats\Event\AfterSubscribe;
class AfterSubscribeListener implements ListenerInterface
{
/**
* @var StdoutLoggerInterface
*/
protected $logger;
public function __construct(StdoutLoggerInterface $logger)
{
$this->logger = $logger;
}
public function listen(): array
{
return [
AfterSubscribe::class,
];
}
/**
* @param AfterSubscribe $event
*/
public function process(object $event)
{
$this->logger->warning(sprintf(
'NatsConsumer[%s] subscribe timeout. Try again after 1 ms.',
$event->getConsumer()->getName()
));
}
}
| {
"pile_set_name": "Github"
} |
/* Functions for the advimage plugin popup */
var preloadImg = null;
var orgImageWidth, orgImageHeight;
function preinit() {
// Initialize
tinyMCE.setWindowArg('mce_windowresize', false);
// Import external list url javascript
var url = tinyMCE.getParam("external_image_list_url");
if (url != null) {
// Fix relative
if (url.charAt(0) != '/' && url.indexOf('://') == -1)
url = tinyMCE.documentBasePath + "/" + url;
document.write('<sc'+'ript language="javascript" type="text/javascript" src="' + url + '"></sc'+'ript>');
}
}
function convertURL(url, node, on_save) {
return eval("tinyMCEPopup.windowOpener." + tinyMCE.settings['urlconverter_callback'] + "(url, node, on_save);");
}
function getImageSrc(str) {
var pos = -1;
if (!str)
return "";
if ((pos = str.indexOf('this.src=')) != -1) {
var src = str.substring(pos + 10);
src = src.substring(0, src.indexOf('\''));
if (tinyMCE.getParam('convert_urls'))
src = convertURL(src, null, true);
return src;
}
return "";
}
function init() {
tinyMCEPopup.resizeToInnerSize();
var formObj = document.forms[0];
var inst = tinyMCE.getInstanceById(tinyMCE.getWindowArg('editor_id'));
var elm = inst.getFocusElement();
var action = "insert";
var html = "";
// Image list src
html = getImageListHTML('imagelistsrc','src','onSelectMainImage');
if (html == "")
document.getElementById("imagelistsrcrow").style.display = 'none';
else
document.getElementById("imagelistsrccontainer").innerHTML = html;
// Image list oversrc
html = getImageListHTML('imagelistover','onmouseoversrc');
if (html == "")
document.getElementById("imagelistoverrow").style.display = 'none';
else
document.getElementById("imagelistovercontainer").innerHTML = html;
// Image list outsrc
html = getImageListHTML('imagelistout','onmouseoutsrc');
if (html == "")
document.getElementById("imagelistoutrow").style.display = 'none';
else
document.getElementById("imagelistoutcontainer").innerHTML = html;
// Src browser
html = getBrowserHTML('srcbrowser','src','image','advimage');
document.getElementById("srcbrowsercontainer").innerHTML = html;
// Over browser
html = getBrowserHTML('oversrcbrowser','onmouseoversrc','image','advimage');
document.getElementById("onmouseoversrccontainer").innerHTML = html;
// Out browser
html = getBrowserHTML('outsrcbrowser','onmouseoutsrc','image','advimage');
document.getElementById("onmouseoutsrccontainer").innerHTML = html;
// Longdesc browser
html = getBrowserHTML('longdescbrowser','longdesc','file','advimage');
document.getElementById("longdesccontainer").innerHTML = html;
// Resize some elements
if (isVisible('srcbrowser'))
document.getElementById('src').style.width = '260px';
if (isVisible('oversrcbrowser'))
document.getElementById('onmouseoversrc').style.width = '260px';
if (isVisible('outsrcbrowser'))
document.getElementById('onmouseoutsrc').style.width = '260px';
if (isVisible('longdescbrowser'))
document.getElementById('longdesc').style.width = '180px';
// Check action
if (elm != null && elm.nodeName == "IMG")
action = "update";
formObj.insert.value = tinyMCE.getLang('lang_' + action, 'Insert', true);
if (action == "update") {
var src = tinyMCE.getAttrib(elm, 'src');
var onmouseoversrc = getImageSrc(tinyMCE.cleanupEventStr(tinyMCE.getAttrib(elm, 'onmouseover')));
var onmouseoutsrc = getImageSrc(tinyMCE.cleanupEventStr(tinyMCE.getAttrib(elm, 'onmouseout')));
src = convertURL(src, elm, true);
// Use mce_src if found
var mceRealSrc = tinyMCE.getAttrib(elm, 'mce_src');
if (mceRealSrc != "") {
src = mceRealSrc;
if (tinyMCE.getParam('convert_urls'))
src = convertURL(src, elm, true);
}
if (onmouseoversrc != "" && tinyMCE.getParam('convert_urls'))
onmouseoversrc = convertURL(onmouseoversrc, elm, true);
if (onmouseoutsrc != "" && tinyMCE.getParam('convert_urls'))
onmouseoutsrc = convertURL(onmouseoutsrc, elm, true);
// Setup form data
var style = tinyMCE.parseStyle(tinyMCE.getAttrib(elm, "style"));
// Store away old size
orgImageWidth = trimSize(getStyle(elm, 'width'))
orgImageHeight = trimSize(getStyle(elm, 'height'));
formObj.src.value = src;
formObj.alt.value = tinyMCE.getAttrib(elm, 'alt');
formObj.title.value = tinyMCE.getAttrib(elm, 'title');
formObj.border.value = trimSize(getStyle(elm, 'border', 'borderWidth'));
formObj.vspace.value = tinyMCE.getAttrib(elm, 'vspace');
formObj.hspace.value = tinyMCE.getAttrib(elm, 'hspace');
formObj.width.value = orgImageWidth;
formObj.height.value = orgImageHeight;
formObj.onmouseoversrc.value = onmouseoversrc;
formObj.onmouseoutsrc.value = onmouseoutsrc;
formObj.id.value = tinyMCE.getAttrib(elm, 'id');
formObj.dir.value = tinyMCE.getAttrib(elm, 'dir');
formObj.lang.value = tinyMCE.getAttrib(elm, 'lang');
formObj.longdesc.value = tinyMCE.getAttrib(elm, 'longdesc');
formObj.usemap.value = tinyMCE.getAttrib(elm, 'usemap');
formObj.style.value = tinyMCE.serializeStyle(style);
// Select by the values
if (tinyMCE.isMSIE)
selectByValue(formObj, 'align', getStyle(elm, 'align', 'styleFloat'));
else
selectByValue(formObj, 'align', getStyle(elm, 'align', 'cssFloat'));
addClassesToList('classlist', 'advimage_styles');
selectByValue(formObj, 'classlist', tinyMCE.getAttrib(elm, 'class'));
selectByValue(formObj, 'imagelistsrc', src);
selectByValue(formObj, 'imagelistover', onmouseoversrc);
selectByValue(formObj, 'imagelistout', onmouseoutsrc);
updateStyle();
showPreviewImage(src, true);
changeAppearance();
window.focus();
} else
addClassesToList('classlist', 'advimage_styles');
// If option enabled default contrain proportions to checked
if (tinyMCE.getParam("advimage_constrain_proportions", true))
formObj.constrain.checked = true;
// Check swap image if valid data
if (formObj.onmouseoversrc.value != "" || formObj.onmouseoutsrc.value != "")
setSwapImageDisabled(false);
else
setSwapImageDisabled(true);
}
function setSwapImageDisabled(state) {
var formObj = document.forms[0];
formObj.onmousemovecheck.checked = !state;
setBrowserDisabled('overbrowser', state);
setBrowserDisabled('outbrowser', state);
if (formObj.imagelistover)
formObj.imagelistover.disabled = state;
if (formObj.imagelistout)
formObj.imagelistout.disabled = state;
formObj.onmouseoversrc.disabled = state;
formObj.onmouseoutsrc.disabled = state;
}
function setAttrib(elm, attrib, value) {
var formObj = document.forms[0];
var valueElm = formObj.elements[attrib];
if (typeof(value) == "undefined" || value == null) {
value = "";
if (valueElm)
value = valueElm.value;
}
if (value != "") {
elm.setAttribute(attrib, value);
if (attrib == "style")
attrib = "style.cssText";
if (attrib == "longdesc")
attrib = "longDesc";
if (attrib == "width") {
attrib = "style.width";
value = value + "px";
value = value.replace(/%px/g, 'px');
}
if (attrib == "height") {
attrib = "style.height";
value = value + "px";
value = value.replace(/%px/g, 'px');
}
if (attrib == "class")
attrib = "className";
eval('elm.' + attrib + "=value;");
} else {
if (attrib == 'class')
elm.className = '';
elm.removeAttribute(attrib);
}
}
function makeAttrib(attrib, value) {
var formObj = document.forms[0];
var valueElm = formObj.elements[attrib];
if (typeof(value) == "undefined" || value == null) {
value = "";
if (valueElm)
value = valueElm.value;
}
if (value == "")
return "";
// XML encode it
value = value.replace(/&/g, '&');
value = value.replace(/\"/g, '"');
value = value.replace(/</g, '<');
value = value.replace(/>/g, '>');
return ' ' + attrib + '="' + value + '"';
}
function insertAction() {
var inst = tinyMCE.getInstanceById(tinyMCE.getWindowArg('editor_id'));
var elm = inst.getFocusElement();
var formObj = document.forms[0];
var src = formObj.src.value;
var onmouseoversrc = formObj.onmouseoversrc.value;
var onmouseoutsrc = formObj.onmouseoutsrc.value;
if (!AutoValidator.validate(formObj)) {
alert(tinyMCE.getLang('lang_invalid_data'));
return false;
}
if (tinyMCE.getParam("accessibility_warnings")) {
if (formObj.alt.value == "" && !confirm(tinyMCE.getLang('lang_advimage_missing_alt', '', true)))
return;
}
if (onmouseoversrc && onmouseoversrc != "")
onmouseoversrc = "this.src='" + convertURL(onmouseoversrc, tinyMCE.imgElement) + "';";
if (onmouseoutsrc && onmouseoutsrc != "")
onmouseoutsrc = "this.src='" + convertURL(onmouseoutsrc, tinyMCE.imgElement) + "';";
if (elm != null && elm.nodeName == "IMG") {
setAttrib(elm, 'src', convertURL(src, tinyMCE.imgElement));
setAttrib(elm, 'mce_src', src);
setAttrib(elm, 'alt');
setAttrib(elm, 'title');
setAttrib(elm, 'border');
setAttrib(elm, 'vspace');
setAttrib(elm, 'hspace');
setAttrib(elm, 'width');
setAttrib(elm, 'height');
setAttrib(elm, 'onmouseover', onmouseoversrc);
setAttrib(elm, 'onmouseout', onmouseoutsrc);
setAttrib(elm, 'id');
setAttrib(elm, 'dir');
setAttrib(elm, 'lang');
setAttrib(elm, 'longdesc');
setAttrib(elm, 'usemap');
setAttrib(elm, 'style');
setAttrib(elm, 'class', getSelectValue(formObj, 'classlist'));
setAttrib(elm, 'align', getSelectValue(formObj, 'align'));
//tinyMCEPopup.execCommand("mceRepaint");
// Repaint if dimensions changed
if (formObj.width.value != orgImageWidth || formObj.height.value != orgImageHeight)
inst.repaint();
// Refresh in old MSIE
if (tinyMCE.isMSIE5)
elm.outerHTML = elm.outerHTML;
} else {
var html = "<img";
html += makeAttrib('src', convertURL(src, tinyMCE.imgElement));
html += makeAttrib('mce_src', src);
html += makeAttrib('alt');
html += makeAttrib('title');
html += makeAttrib('border');
html += makeAttrib('vspace');
html += makeAttrib('hspace');
html += makeAttrib('width');
html += makeAttrib('height');
html += makeAttrib('onmouseover', onmouseoversrc);
html += makeAttrib('onmouseout', onmouseoutsrc);
html += makeAttrib('id');
html += makeAttrib('dir');
html += makeAttrib('lang');
html += makeAttrib('longdesc');
html += makeAttrib('usemap');
html += makeAttrib('style');
html += makeAttrib('class', getSelectValue(formObj, 'classlist'));
html += makeAttrib('align', getSelectValue(formObj, 'align'));
html += " />";
tinyMCEPopup.execCommand("mceInsertContent", false, html);
}
tinyMCE._setEventsEnabled(inst.getBody(), false);
tinyMCEPopup.close();
}
function cancelAction() {
tinyMCEPopup.close();
}
function changeAppearance() {
var formObj = document.forms[0];
var img = document.getElementById('alignSampleImg');
if (img) {
img.align = formObj.align.value;
img.border = formObj.border.value;
img.hspace = formObj.hspace.value;
img.vspace = formObj.vspace.value;
}
}
function changeMouseMove() {
var formObj = document.forms[0];
setSwapImageDisabled(!formObj.onmousemovecheck.checked);
}
function updateStyle() {
var formObj = document.forms[0];
var st = tinyMCE.parseStyle(formObj.style.value);
if (tinyMCE.getParam('inline_styles', false)) {
st['width'] = formObj.width.value == '' ? '' : formObj.width.value + "px";
st['height'] = formObj.height.value == '' ? '' : formObj.height.value + "px";
st['border-width'] = formObj.border.value == '' ? '' : formObj.border.value + "px";
st['margin-top'] = formObj.vspace.value == '' ? '' : formObj.vspace.value + "px";
st['margin-bottom'] = formObj.vspace.value == '' ? '' : formObj.vspace.value + "px";
st['margin-left'] = formObj.hspace.value == '' ? '' : formObj.hspace.value + "px";
st['margin-right'] = formObj.hspace.value == '' ? '' : formObj.hspace.value + "px";
} else {
st['width'] = st['height'] = st['border-width'] = null;
if (st['margin-top'] == st['margin-bottom'])
st['margin-top'] = st['margin-bottom'] = null;
if (st['margin-left'] == st['margin-right'])
st['margin-left'] = st['margin-right'] = null;
}
formObj.style.value = tinyMCE.serializeStyle(st);
}
function styleUpdated() {
var formObj = document.forms[0];
var st = tinyMCE.parseStyle(formObj.style.value);
if (st['width'])
formObj.width.value = st['width'].replace('px', '');
if (st['height'])
formObj.height.value = st['height'].replace('px', '');
if (st['margin-top'] && st['margin-top'] == st['margin-bottom'])
formObj.vspace.value = st['margin-top'].replace('px', '');
if (st['margin-left'] && st['margin-left'] == st['margin-right'])
formObj.hspace.value = st['margin-left'].replace('px', '');
if (st['border-width'])
formObj.border.value = st['border-width'].replace('px', '');
}
function changeHeight() {
var formObj = document.forms[0];
if (!formObj.constrain.checked || !preloadImg) {
updateStyle();
return;
}
if (formObj.width.value == "" || formObj.height.value == "")
return;
var temp = (parseInt(formObj.width.value) / parseInt(preloadImg.width)) * preloadImg.height;
formObj.height.value = temp.toFixed(0);
updateStyle();
}
function changeWidth() {
var formObj = document.forms[0];
if (!formObj.constrain.checked || !preloadImg) {
updateStyle();
return;
}
if (formObj.width.value == "" || formObj.height.value == "")
return;
var temp = (parseInt(formObj.height.value) / parseInt(preloadImg.height)) * preloadImg.width;
formObj.width.value = temp.toFixed(0);
updateStyle();
}
function onSelectMainImage(target_form_element, name, value) {
var formObj = document.forms[0];
formObj.alt.value = name;
formObj.title.value = name;
resetImageData();
showPreviewImage(formObj.elements[target_form_element].value, false);
}
function showPreviewImage(src, start) {
var formObj = document.forms[0];
selectByValue(document.forms[0], 'imagelistsrc', src);
var elm = document.getElementById('prev');
var src = src == "" ? src : tinyMCE.convertRelativeToAbsoluteURL(tinyMCE.settings['base_href'], src);
if (!start && tinyMCE.getParam("advimage_update_dimensions_onchange", true))
resetImageData();
if (src == "")
elm.innerHTML = "";
else
elm.innerHTML = '<img id="previewImg" src="' + src + '" border="0" onload="updateImageData(' + start + ');" onerror="resetImageData();" />'
}
function updateImageData(start) {
var formObj = document.forms[0];
preloadImg = document.getElementById('previewImg');
if (!start && formObj.width.value == "")
formObj.width.value = preloadImg.width;
if (!start && formObj.height.value == "")
formObj.height.value = preloadImg.height;
updateStyle();
}
function resetImageData() {
var formObj = document.forms[0];
formObj.width.value = formObj.height.value = "";
}
function getSelectValue(form_obj, field_name) {
var elm = form_obj.elements[field_name];
if (elm == null || elm.options == null)
return "";
return elm.options[elm.selectedIndex].value;
}
function getImageListHTML(elm_id, target_form_element, onchange_func) {
if (typeof(tinyMCEImageList) == "undefined" || tinyMCEImageList.length == 0)
return "";
var html = "";
html += '<select id="' + elm_id + '" name="' + elm_id + '"';
html += ' class="mceImageList" onfocus="tinyMCE.addSelectAccessibility(event, this, window);" onchange="this.form.' + target_form_element + '.value=';
html += 'this.options[this.selectedIndex].value;';
if (typeof(onchange_func) != "undefined")
html += onchange_func + '(\'' + target_form_element + '\',this.options[this.selectedIndex].text,this.options[this.selectedIndex].value);';
html += '"><option value="">---</option>';
for (var i=0; i<tinyMCEImageList.length; i++)
html += '<option value="' + tinyMCEImageList[i][1] + '">' + tinyMCEImageList[i][0] + '</option>';
html += '</select>';
return html;
// tinyMCE.debug('-- image list start --', html, '-- image list end --');
}
// While loading
preinit();
| {
"pile_set_name": "Github"
} |
package huawei.android.widget.pattern;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.drawable.Drawable;
import android.text.TextPaint;
import android.text.TextUtils;
import android.util.SparseArray;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.GridView;
import android.widget.ImageView;
import android.widget.ListAdapter;
import android.widget.TextView;
import huawei.android.widget.loader.ResLoaderUtil;
import java.util.List;
public class HwGridPatternView {
public static final int LAYOUT_STYLE_LARGE = 1;
public static final int LAYOUT_STYLE_LARGE_DOUBLE_SUBTITLE = 3;
public static final int LAYOUT_STYLE_MEDIUM = 2;
private static final int WIDTH_DIV = 2;
private HwGridPatternViewAdapter<HwGridPatternViewBean> mAdapter;
private Context mContext;
public HwGridPatternView(Context context) {
this.mContext = context;
}
public void setData(List<HwGridPatternViewBean> datas, GridView gridView, int layoutStyle) {
String layoutName = "hwpattern_gridpattern_l";
if (layoutStyle == 1) {
layoutName = "hwpattern_gridpattern_l";
} else if (layoutStyle == 2) {
layoutName = "hwpattern_gridpattern_m";
} else if (layoutStyle == 3) {
layoutName = "hwpattern_gridpattern_double_subtitle";
}
this.mAdapter = new HwGridPatternViewAdapter<HwGridPatternViewBean>(this.mContext, datas, ResLoaderUtil.getLayoutId(this.mContext, layoutName)) {
/* class huawei.android.widget.pattern.HwGridPatternView.AnonymousClass1 */
public void convert(ViewHolder holder, HwGridPatternViewBean gridBean) {
TextView subTitleTv;
if (holder != null && gridBean != null) {
int playBackViewId = ResLoaderUtil.getViewId(HwGridPatternView.this.mContext, "hwpattern_gridpattern_play_back");
int playIconId = ResLoaderUtil.getViewId(HwGridPatternView.this.mContext, "hwpattern_gridpattern_play_icon");
int titleIconId = ResLoaderUtil.getViewId(HwGridPatternView.this.mContext, "hwpattern_gridpattern_title_icon");
holder.setImageDrawable(playBackViewId, gridBean.getPlayBackDrawable());
holder.setImageDrawable(playIconId, gridBean.getPlayIconDrawable());
holder.setImageDrawable(titleIconId, gridBean.getTitleIconDrawable());
int titleId = ResLoaderUtil.getViewId(HwGridPatternView.this.mContext, "hwpattern_gridpattern_title");
int subtititleId = ResLoaderUtil.getViewId(HwGridPatternView.this.mContext, "hwpattern_gridpattern_subtitle");
int secondSubtitleId = ResLoaderUtil.getViewId(HwGridPatternView.this.mContext, "hwpattern_gridpattern_subtitle_second");
int buttonId = ResLoaderUtil.getViewId(HwGridPatternView.this.mContext, "hwpattern_gridpattern_button");
holder.setText(titleId, gridBean.getTitle());
holder.setText(subtititleId, gridBean.getSubTitle());
holder.setText(secondSubtitleId, gridBean.getSubTitleSecond());
holder.setText(buttonId, gridBean.getButtonText());
holder.setOnClickListener(buttonId, gridBean.getButtonListener());
if (!TextUtils.isEmpty(gridBean.getSubTitleSecond()) && !TextUtils.isEmpty(gridBean.getSubTitle()) && (subTitleTv = (TextView) holder.getView(subtititleId)) != null) {
subTitleTv.post(HwGridPatternView.this.getAction(holder, gridBean, subTitleTv, (TextView) holder.getView(secondSubtitleId)));
}
}
}
};
if (gridView != null) {
gridView.setAdapter((ListAdapter) this.mAdapter);
}
}
/* access modifiers changed from: private */
/* access modifiers changed from: public */
private Runnable getAction(final ViewHolder holder, final HwGridPatternViewBean gridBean, final TextView subTitleTv, final TextView subTitleSecondTv) {
return new Runnable() {
/* class huawei.android.widget.pattern.HwGridPatternView.AnonymousClass2 */
public void run() {
ViewGroup.MarginLayoutParams layoutParams;
int subtitleLayoutId = ResLoaderUtil.getViewId(HwGridPatternView.this.mContext, "hwpattern_gridpattern_subtitle_layout");
int dividingId = ResLoaderUtil.getViewId(HwGridPatternView.this.mContext, "hwpattern_gridpattern_subtitle_dividing");
if (subTitleTv != null && subTitleSecondTv != null) {
View subtitleLayout = holder.getView(subtitleLayoutId);
View dividingView = holder.getView(dividingId);
if (subtitleLayout != null) {
int dividingWidth = 0;
if (dividingView != null) {
ViewGroup.LayoutParams params = dividingView.getLayoutParams();
if (params instanceof ViewGroup.MarginLayoutParams) {
layoutParams = (ViewGroup.MarginLayoutParams) params;
} else {
layoutParams = new ViewGroup.MarginLayoutParams(params);
}
dividingWidth = dividingView.getWidth() + layoutParams.getMarginStart() + layoutParams.getMarginEnd();
}
TextPaint textPaint = new TextPaint(subTitleTv.getPaint());
float subTitleTvWidth = textPaint.measureText(gridBean.getSubTitle());
float subTitleSecondTvWidth = textPaint.measureText(gridBean.getSubTitleSecond());
float availableWidth = (float) (subtitleLayout.getMeasuredWidth() - dividingWidth);
float halfAvailableWidth = availableWidth / 2.0f;
if (subTitleTvWidth + subTitleSecondTvWidth <= availableWidth) {
subTitleTv.setWidth((int) subTitleTvWidth);
} else if (subTitleTvWidth < halfAvailableWidth) {
subTitleTv.setWidth((int) subTitleTvWidth);
} else if (subTitleSecondTvWidth < halfAvailableWidth) {
subTitleTv.setWidth((int) (availableWidth - subTitleSecondTvWidth));
} else {
subTitleTv.setWidth((int) halfAvailableWidth);
}
}
}
}
};
}
public abstract class HwGridPatternViewAdapter<T> extends BaseAdapter {
private Context mContext;
private int mItemLayoutId;
private List<T> mLists;
private ViewHolder mViewHolder;
public abstract void convert(ViewHolder viewHolder, T t);
public HwGridPatternViewAdapter(Context context, List<T> list, int itemLayoutId) {
this.mContext = context;
this.mLists = list;
this.mItemLayoutId = itemLayoutId;
}
public int getCount() {
List<T> list = this.mLists;
if (list == null) {
return 0;
}
return list.size();
}
public T getItem(int position) {
List<T> list = this.mLists;
if (list == null || position >= list.size()) {
return null;
}
return this.mLists.get(position);
}
public long getItemId(int position) {
return (long) position;
}
public View getView(int position, View convertView, ViewGroup parent) {
ViewHolder holder;
if (convertView == null) {
holder = new ViewHolder(this.mContext, parent, this.mItemLayoutId, position);
} else {
holder = (ViewHolder) convertView.getTag();
}
convert(holder, getItem(position));
return holder.getConvertView();
}
}
/* access modifiers changed from: private */
public class ViewHolder {
private Context mContext;
private View mConvertView;
private final SparseArray<View> mViews;
private ViewHolder(Context context, ViewGroup parent, int itemLayoutId, int position) {
this.mContext = context;
this.mViews = new SparseArray<>();
this.mConvertView = LayoutInflater.from(this.mContext).inflate(itemLayoutId, parent, false);
View view = this.mConvertView;
if (view != null) {
view.setTag(this);
}
}
/* access modifiers changed from: private */
/* access modifiers changed from: public */
private <T extends View> T getView(int viewId) {
T t;
View view;
View view2 = (T) this.mViews.get(viewId);
if (!(view2 != null || (view = this.mConvertView) == null || (view2 = (T) view.findViewById(viewId)) == null)) {
this.mViews.put(viewId, view2);
}
return t;
}
public View getConvertView() {
return this.mConvertView;
}
public ViewHolder setText(int viewId, String text) {
TextView view = (TextView) getView(viewId);
if (view != null) {
view.setText(text);
}
return this;
}
public ViewHolder setOnClickListener(int viewId, View.OnClickListener listener) {
View view = getView(viewId);
if (view != null) {
view.setOnClickListener(listener);
}
return this;
}
public ViewHolder setImageDrawable(int viewId, Drawable drawable) {
ImageView view = (ImageView) getView(viewId);
if (view != null) {
view.setImageDrawable(drawable);
}
return this;
}
public ViewHolder setImageBitmap(int viewId, Bitmap bitmap) {
ImageView view = (ImageView) getView(viewId);
if (view != null) {
view.setImageBitmap(bitmap);
}
return this;
}
}
}
| {
"pile_set_name": "Github"
} |
// Copyright (c) 2016-2017, Intel Corporation.
// Sample client that works with the linux server found in iotivity-constrained
// This sample will find the resource, then periodically retrieve the resource
// To run it on the Arduino 101, you'll need to connect via BLE with your
// host machine (e.g. Linux), then add a new route for the bt0 interface:
// ip -6 route add 2001:db8::/64 dev bt0
var ocf = require('ocf');
var client = ocf.client;
console.log("Started OCF client");
client.on('error', function(error) {
if (error.deviceId)
console.log("Error for device: " + error.deviceId);
});
function onupdate(resource) {
console.log("Resource updated:");
console.log(" deviceId: " + resource.deviceId);
console.log(" resourcePath: " + resource.resourcePath);
if (resource.properties != undefined) {
console.log("Resource property 'state' is " + resource.properties.state);
} else {
console.log("resource.properties not found");
}
}
client.on('update', onupdate);
var lightOn = true;
// TODO: Must save away the timer handle or else GC will destroy it after a few iterations
var t1 = null;
ocf.start();
client.findResources({ resourceType:"core.light" }).then(function(resource) {
console.log("findResources() was successful, deviceId=" + resource.deviceId);
t1 = setInterval(function(resource) {
client.retrieve(resource.deviceId, { observable: false }).then(function(res) {
console.log("retrieve() was successful, deviceId=" + res.deviceId);
}, function(error) {
console.log("retrieve() returned an error: " + error.name);
});
}, 1000, resource);
}, function(error) {
console.log("findResources() returned an error: " + error.name);
});
| {
"pile_set_name": "Github"
} |
/*
Ming, an SWF output library
Copyright (C) 2002 Opaque Industries - http://www.opaque.net/
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include "assembler.h"
#include "compile.h"
#include "actiontypes.h"
int len;
Buffer asmBuffer;
int nLabels;
struct label
{
char *name;
int offset;
};
struct label labels[256];
static int
findLabel(char *l)
{
int i;
for ( i=0; i<nLabels; ++i )
{
if ( strcmp(l, labels[i].name) == 0 )
return i;
}
return -1;
}
static void
addLabel(char *l)
{
int i = findLabel(l);
if ( i == -1 )
{
labels[nLabels].name = strdup(l);
labels[nLabels].offset = len;
++nLabels;
}
else
labels[i].offset = len;
}
int
bufferBranchTarget(Buffer output, char *l)
{
int i = findLabel(l);
if ( i == -1 )
{
i = nLabels;
addLabel(l);
}
return bufferWriteS16(output, i);
}
void
bufferPatchTargets(Buffer buffer)
{
int l, i = 0;
unsigned char *output = buffer->buffer;
while ( i < len )
{
if ( output[i] & 0x80 ) /* then it's a multibyte instruction */
{
if ( output[i] == SWFACTION_JUMP ||
output[i] == SWFACTION_IF )
{
int target, offset;
i += 3; /* plus instruction plus two-byte length */
target = output[i];
offset = labels[target].offset - (i+2);
output[i] = offset & 0xff;
output[++i] = (offset>>8) & 0xff;
++i;
}
else
{
++i;
l = output[i];
++i;
l += output[i]<<8;
i += l+1;
}
}
else
++i;
}
}
/*
* Local variables:
* tab-width: 2
* c-basic-offset: 2
* End:
*/
| {
"pile_set_name": "Github"
} |
# operator[]
* string[meta header]
* std[meta namespace]
* basic_string[meta class]
* function[meta id-type]
```cpp
const_reference operator[](size_type pos) const; // (1) C++03
const_reference operator[](size_type pos) const noexcept; // (1) C++11
reference operator[](size_type pos); // (2) C++03
reference operator[](size_type pos) noexcept; // (2) C++11
```
## 概要
`pos` 番目目の要素への参照を取得する。
## 要件
`pos <=` [`size()`](size.md)
## 戻り値
- C++03
- `pos <` [`size()`](size.md) の場合、`*(`[`begin()`](begin.md) `+ pos)` を返す。
- `pos ==` [`size()`](size.md)の場合、`charT()` の値を持ったオブジェクトへの参照を返す。
- それ以外の場合は、未定義動作。
- C++11以降
- `pos <` [`size()`](size.md) の場合、`*(`[`begin()`](begin.md) `+ pos)` を返す。
- そうでない場合は、`charT()` の値を持ったオブジェクトへの参照を返す。
- 後者の場合、参照を変更するべきではない。
## 例外
投げない
## 計算量
定数時間
## 例
```cpp example
#include <iostream>
#include <string>
int main()
{
std::string s = "hello";
char& c = s[1];
std::cout << c << std::endl;
}
```
* s[1][color ff0000]
### 出力
```
e
```
## 参照
| {
"pile_set_name": "Github"
} |
/*
* JOCL - Java bindings for OpenCL
*
* Copyright (c) 2009-2015 Marco Hutter - http://www.jocl.org
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
package org.jocl;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.util.Locale;
import java.util.UUID;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Utility class for detecting the operating system and architecture
* types, and automatically loading the matching native library
* as a resource or from a file. <br>
* <br>
* This class is not intended to be used by clients.<br>
* <br>
*/
public final class LibUtils
{
// The architecture and OS detection has been adapted from
// http://javablog.co.uk/2007/05/19/making-jni-cross-platform/
// and extended with http://lopica.sourceforge.net/os.html
/**
* The logger used in this class
*/
private static final Logger logger =
Logger.getLogger(LibUtils.class.getName());
/**
* The default log level
*/
private static final Level level = Level.FINE;
/**
* The directory where libraries are expected in JAR files,
* when they are loaded as resources
*/
private static final String LIBRARY_PATH_IN_JAR = "/lib";
/**
* Enumeration of common operating systems, independent of version
* or architecture.
*/
enum OSType
{
ANDROID, APPLE, LINUX, SUN, WINDOWS, UNKNOWN
}
/**
* Enumeration of common CPU architectures.
*/
enum ArchType
{
PPC, PPC_64, SPARC, X86, X86_64, ARM, ARM64, MIPS, MIPS64, RISC, UNKNOWN
}
/**
* Private constructor to prevent instantiation.
*/
private LibUtils()
{
// Private constructor to prevent instantiation.
}
/**
* Loads the specified library. <br>
* <br>
* The method will attempt to load the library using the usual
* <code>System.loadLibrary</code> call. In this case, the specified
* dependent libraries are ignored, because they are assumed to be
* loaded automatically in the same way as the main library.<br>
* <br>
* If the library can <b>not</b> be loaded with the
* <code>System.loadLibrary</code> call, then this method will attempt
* to load the file as a resource (usually one that is contained in
* a JAR file). In this case, the library is assumed to be located
* in subdirectory called <code>"/lib"</code> inside the JAR file.
* The method will try to load a resource that has the platform-specific
* {@link #createLibraryFileName(String) library file name} from
* this directory, extract it into the default directory for temporary
* files, and load the library from there. <br>
* <br>
* In this case, the specified dependent libraries may also be loaded
* as resources. They are assumed to be located in subdirectories
* that are named according to the {@link #osString()} and
* {@link #archString()} of the executing platform. For example, such
* a library may be located in a directory inside the JAR that is
* called <code>"/lib/windows/x86_64"</code>. These dependent libraries
* will be extracted and loaded before the main library is loaded.
*
* @param libraryName The name of the library (without a platform specific
* prefix or file extension)
* @param dependentLibraryNames The names of libraries that the library
* to load depends on. If the library is loaded as a resource, then
* it will be attempted to also load these libraries as resources, as
* described above
* @throws UnsatisfiedLinkError if the native library
* could not be loaded.
*/
public static void loadLibrary(
String libraryName, String ... dependentLibraryNames)
{
logger.log(level, "Loading library: " + libraryName);
// First, try to load the specified library as a file
// that is visible in the default search path
Throwable throwableFromFile;
try
{
logger.log(level, "Loading library as a file");
System.loadLibrary(libraryName);
logger.log(level, "Loading library as a file DONE");
return;
}
catch (Throwable t)
{
logger.log(level, "Loading library as a file FAILED");
throwableFromFile = t;
}
// Now try to load the library by extracting the
// corresponding resource from the JAR file
try
{
logger.log(level, "Loading library as a resource");
loadLibraryResource(LIBRARY_PATH_IN_JAR,
libraryName, "", dependentLibraryNames);
logger.log(level, "Loading library as a resource DONE");
return;
}
catch (Throwable throwableFromResource)
{
logger.log(level, "Loading library as a resource FAILED",
throwableFromResource);
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
pw.println("Error while loading native library \"" +
libraryName + "\"");
pw.println("Operating system name: "+
System.getProperty("os.name"));
pw.println("Architecture : "+
System.getProperty("os.arch"));
pw.println("Architecture bit size: "+
System.getProperty("sun.arch.data.model"));
pw.println("---(start of nested stack traces)---");
pw.println("Stack trace from the attempt to " +
"load the library as a file:");
throwableFromFile.printStackTrace(pw);
pw.println("Stack trace from the attempt to " +
"load the library as a resource:");
throwableFromResource.printStackTrace(pw);
pw.println("---(end of nested stack traces)---");
pw.close();
throw new UnsatisfiedLinkError(sw.toString());
}
}
/**
* Load the library with the given name from a resource.
*
* @param resourceSubdirectoryName The subdirectory where the resource
* is expected
* @param libraryName The library name, e.g. "EXAMPLE-windows-x86"
* @param tempSubdirectoryName The name for the subdirectory in the
* temp directory, where the temporary files for dependent libraries
* should be stored
* @param dependentLibraryNames The names of libraries that the library
* to load depends on, and that may have to be loaded as resources and
* stored as temporary files as well
* @throws Throwable If the library could not be loaded
*/
private static void loadLibraryResource(
String resourceSubdirectoryName,
String libraryName,
String tempSubdirectoryName,
String ... dependentLibraryNames) throws Throwable
{
// First try to load all dependent libraries, recursively
for (String dependentLibraryName : dependentLibraryNames)
{
logger.log(level,
"Library " + libraryName +
" depends on " + dependentLibraryName);
String dependentResourceSubdirectoryName =
resourceSubdirectoryName + "/" +
osString() + "/" +
archString();
String dependentLibraryTempSubDirectoryName =
libraryName+"_dependents" + File.separator +
osString() + File.separator +
archString() + File.separator;
loadLibraryResource(
dependentResourceSubdirectoryName,
dependentLibraryName,
dependentLibraryTempSubDirectoryName);
}
// Now, prepare loading the actual library
String libraryFileName = createLibraryFileName(libraryName);
File libraryTempFile;
if (useUniqueLibraryNames())
{
String uniqueLibraryFileName =
createLibraryFileName(libraryName + "-" + UUID.randomUUID());
libraryTempFile = createTempFile(
tempSubdirectoryName, uniqueLibraryFileName);
}
else
{
libraryTempFile = createTempFile(
tempSubdirectoryName, libraryFileName);
}
// If the temporary file for the library does not exist, create it
if (!libraryTempFile.exists())
{
String libraryResourceName =
resourceSubdirectoryName + "/" + libraryFileName;
logger.log(level,
"Writing resource " + libraryResourceName);
logger.log(level,
"to temporary file " + libraryTempFile);
writeResourceToFile(libraryResourceName, libraryTempFile);
if (trackCreatedTempFiles())
{
LibTracker.track(libraryTempFile);
}
}
// Finally, try to load the library from the temporary file
logger.log(level, "Loading library " + libraryTempFile);
System.load(libraryTempFile.toString());
logger.log(level, "Loading library " + libraryTempFile + " DONE");
}
/**
* Create a file object representing the file with the given name
* in the specified subdirectory of the default "temp" directory.
* If the specified subdirectory does not exist yet, it is created.
*
* @param name The file name
* @return The file
* @throws IOException If the subdirectory can not be created
*/
private static File createTempFile(
String tempSubdirectoryName, String name) throws IOException
{
String tempDirName = System.getProperty("java.io.tmpdir");
File tempSubDirectory =
new File(tempDirName + File.separator + tempSubdirectoryName);
if (!tempSubDirectory.exists())
{
boolean createdDirectory = tempSubDirectory.mkdirs();
if (!createdDirectory)
{
throw new IOException(
"Could not create directory for temporary file: " +
tempSubDirectory);
}
}
String tempFileName = tempSubDirectory + File.separator + name;
File tempFile = new File(tempFileName);
return tempFile;
}
/**
* Obtain an input stream to the resource with the given name, and write
* it to the specified file (which may not be <code>null</code>, and
* may not exist yet)
*
* @param resourceName The name of the resource
* @param file The file to write to
* @throws NullPointerException If the given file is <code>null</code>
* @throws IllegalArgumentException If the given file already exists
* @throws IOException If an IO error occurs
*/
private static void writeResourceToFile(
String resourceName, File file) throws IOException
{
if (file == null)
{
throw new NullPointerException("Target file may not be null");
}
if (file.exists())
{
throw new IllegalArgumentException(
"Target file already exists: "+file);
}
InputStream inputStream =
LibUtils.class.getResourceAsStream(resourceName);
if (inputStream == null)
{
throw new IOException(
"No resource found with name '"+resourceName+"'");
}
OutputStream outputStream = null;
try
{
outputStream = new FileOutputStream(file);
byte[] buffer = new byte[32768];
while (true)
{
int read = inputStream.read(buffer);
if (read < 0)
{
break;
}
outputStream.write(buffer, 0, read);
}
outputStream.flush();
}
finally
{
if (outputStream != null)
{
try
{
outputStream.close();
}
catch (IOException e)
{
logger.log(Level.SEVERE, e.getMessage(), e);
}
}
try
{
inputStream.close();
}
catch (IOException e)
{
logger.log(Level.SEVERE, e.getMessage(), e);
}
}
}
/**
* Returns whether the "uniqueLibaryNames" property was set,
* and the temporary files for the native libraries that are
* loaded as resources should receive a different name each
* time that they are loaded.<br>
* <br>
* PRELIMINARY!
*
* @return Whether the temporary files should receive unique names
*/
private static boolean useUniqueLibraryNames()
{
String uniqueLibraryNames =
System.getProperty("uniqueLibraryNames");
return "true".equals(uniqueLibraryNames);
}
/**
* Returns whether all temporary files that are created should
* be tracked with a {@link LibTracker}
* <br>
* PRELIMINARY!
*
* @return Whether the files should be tracked
*/
private static boolean trackCreatedTempFiles()
{
// Currently, this is only done when unique library names are used
return useUniqueLibraryNames();
}
/**
* Create the full library file name, including the extension
* and prefix, for the given library name. For example, the
* name "EXAMPLE" will become <br>
* EXAMPLE.dll on Windows <br>
* libEXAMPLE.so on Linux <br>
* EXAMPLE.dylib on MacOS <br>
*
* @param libraryName The library name
* @return The full library name, with extension
*/
public static String createLibraryFileName(String libraryName)
{
String libPrefix = createLibraryPrefix();
String libExtension = createLibraryExtension();
String fullName = libPrefix + libraryName + "." + libExtension;
return fullName;
}
/**
* Returns the extension for dynamically linked libraries on the
* current OS. That is, returns <code>"dylib"</code> on Apple,
* <code>"so"</code> on Linux and Sun, and <code>"dll"</code>
* on Windows.
*
* @return The library extension
*/
private static String createLibraryExtension()
{
OSType osType = calculateOS();
switch (osType)
{
case APPLE:
return "dylib";
case ANDROID:
case LINUX:
case SUN:
return "so";
case WINDOWS:
return "dll";
default:
break;
}
return "";
}
/**
* Returns the prefix for dynamically linked libraries on the
* current OS. That is, returns <code>"lib"</code> on Apple,
* Linux and Sun, and the empty String on Windows.
*
* @return The library prefix
*/
private static String createLibraryPrefix()
{
OSType osType = calculateOS();
switch (osType)
{
case ANDROID:
case APPLE:
case LINUX:
case SUN:
return "lib";
case WINDOWS:
return "";
default:
break;
}
return "";
}
/**
* Creates the name for the native library with the given base name for
* the current platform, by appending strings that indicate the current
* operating system and architecture.<br>
* <br>
* The resulting name will be of the form<br>
* <code>baseName-OSType-ArchType</code><br>
* where OSType and ArchType are the <strong>lower case</strong> Strings
* of the respective {@link LibUtils.OSType OSType} and
* {@link LibUtils.ArchType ArcType} enum constants.<br>
* <br>
* For example, the library name with the base name "EXAMPLE" may be<br>
* <code>EXAMPLE-windows-x86</code><br>
* <br>
* Note that the resulting name will not include any platform specific
* prefixes or extensions for the actual name.
*
* @param baseName The base name of the library
* @return The library name
*/
public static String createPlatformLibraryName(String baseName)
{
return baseName + "-" + osString() + "-" + archString();
}
/**
* Returns a the <strong>lower case</strong> String representation of
* the {@link #calculateOS() OSType} of this platform. E.g.
* <code>"windows"</code>.
*
* @return The string describing the operating system
*/
private static String osString()
{
OSType osType = calculateOS();
return osType.toString().toLowerCase(Locale.ENGLISH);
}
/**
* Returns a the <strong>lower case</strong> String representation of
* the {@link #calculateArch() ArchType} of this platform. E.g.
* <code>"x86_64"</code>.
*
* @return The string describing the architecture
*/
private static String archString()
{
ArchType archType = calculateArch();
return archType.toString().toLowerCase(Locale.ENGLISH);
}
/**
* Calculates the current OSType
*
* @return The current OSType
*/
static OSType calculateOS()
{
String vendor = System.getProperty("java.vendor");
if ("The Android Project".equals(vendor))
{
return OSType.ANDROID;
}
String osName = System.getProperty("os.name");
osName = osName.toLowerCase(Locale.ENGLISH);
if (osName.startsWith("mac os"))
{
return OSType.APPLE;
}
if (osName.startsWith("windows"))
{
return OSType.WINDOWS;
}
if (osName.startsWith("linux"))
{
return OSType.LINUX;
}
if (osName.startsWith("sun"))
{
return OSType.SUN;
}
return OSType.UNKNOWN;
}
/**
* Calculates the current ARCHType
*
* @return The current ARCHType
*/
private static ArchType calculateArch()
{
String osArch = System.getProperty("os.arch");
osArch = osArch.toLowerCase(Locale.ENGLISH);
if ("i386".equals(osArch) ||
"x86".equals(osArch) ||
"i686".equals(osArch))
{
return ArchType.X86;
}
if (osArch.startsWith("amd64") || osArch.startsWith("x86_64"))
{
return ArchType.X86_64;
}
if (osArch.startsWith("arm64"))
{
return ArchType.ARM64;
}
if (osArch.startsWith("arm"))
{
return ArchType.ARM;
}
if ("ppc".equals(osArch) || "powerpc".equals(osArch))
{
return ArchType.PPC;
}
if (osArch.startsWith("ppc"))
{
return ArchType.PPC_64;
}
if (osArch.startsWith("sparc"))
{
return ArchType.SPARC;
}
if (osArch.startsWith("mips64"))
{
return ArchType.MIPS64;
}
if (osArch.startsWith("mips"))
{
return ArchType.MIPS;
}
if (osArch.contains("risc"))
{
return ArchType.RISC;
}
return ArchType.UNKNOWN;
}
}
| {
"pile_set_name": "Github"
} |
#pragma once
#include <string>
#include <vector>
#include "test/test_common/environment.h"
#include "gtest/gtest.h"
#include "openssl/ssl.h"
#include "openssl/x509v3.h"
namespace Envoy {
namespace Extensions {
namespace TransportSockets {
namespace Tls {
inline bssl::UniquePtr<X509> readCertFromFile(const std::string& path) {
const std::string& file_content = TestEnvironment::readFileToStringForTest(path);
bssl::UniquePtr<BIO> bio(BIO_new_mem_buf(file_content.c_str(), file_content.size()));
bssl::UniquePtr<X509> cert(PEM_read_bio_X509(bio.get(), nullptr, nullptr, nullptr));
EXPECT_NE(cert, nullptr);
return cert;
}
} // namespace Tls
} // namespace TransportSockets
} // namespace Extensions
} // namespace Envoy
| {
"pile_set_name": "Github"
} |
"""
Helper function for returning the field information that is associated
with a model class. This includes returning all the forward and reverse
relationships and their associated metadata.
Usage: `get_field_info(model)` returns a `FieldInfo` instance.
"""
from collections import OrderedDict, namedtuple
FieldInfo = namedtuple('FieldResult', [
'pk', # Model field instance
'fields', # Dict of field name -> model field instance
'forward_relations', # Dict of field name -> RelationInfo
'reverse_relations', # Dict of field name -> RelationInfo
'fields_and_pk', # Shortcut for 'pk' + 'fields'
'relations' # Shortcut for 'forward_relations' + 'reverse_relations'
])
RelationInfo = namedtuple('RelationInfo', [
'model_field',
'related_model',
'to_many',
'to_field',
'has_through_model',
'reverse'
])
def get_field_info(model):
"""
Given a model class, returns a `FieldInfo` instance, which is a
`namedtuple`, containing metadata about the various field types on the model
including information about their relationships.
"""
opts = model._meta.concrete_model._meta
pk = _get_pk(opts)
fields = _get_fields(opts)
forward_relations = _get_forward_relationships(opts)
reverse_relations = _get_reverse_relationships(opts)
fields_and_pk = _merge_fields_and_pk(pk, fields)
relationships = _merge_relationships(forward_relations, reverse_relations)
return FieldInfo(pk, fields, forward_relations, reverse_relations,
fields_and_pk, relationships)
def _get_pk(opts):
pk = opts.pk
rel = pk.remote_field
while rel and rel.parent_link:
# If model is a child via multi-table inheritance, use parent's pk.
pk = pk.remote_field.model._meta.pk
rel = pk.remote_field
return pk
def _get_fields(opts):
fields = OrderedDict()
for field in [field for field in opts.fields if field.serialize and not field.remote_field]:
fields[field.name] = field
return fields
def _get_to_field(field):
return getattr(field, 'to_fields', None) and field.to_fields[0]
def _get_forward_relationships(opts):
"""
Returns an `OrderedDict` of field names to `RelationInfo`.
"""
forward_relations = OrderedDict()
for field in [field for field in opts.fields if field.serialize and field.remote_field]:
forward_relations[field.name] = RelationInfo(
model_field=field,
related_model=field.remote_field.model,
to_many=False,
to_field=_get_to_field(field),
has_through_model=False,
reverse=False
)
# Deal with forward many-to-many relationships.
for field in [field for field in opts.many_to_many if field.serialize]:
forward_relations[field.name] = RelationInfo(
model_field=field,
related_model=field.remote_field.model,
to_many=True,
# manytomany do not have to_fields
to_field=None,
has_through_model=(
not field.remote_field.through._meta.auto_created
),
reverse=False
)
return forward_relations
def _get_reverse_relationships(opts):
"""
Returns an `OrderedDict` of field names to `RelationInfo`.
"""
reverse_relations = OrderedDict()
all_related_objects = [r for r in opts.related_objects if not r.field.many_to_many]
for relation in all_related_objects:
accessor_name = relation.get_accessor_name()
reverse_relations[accessor_name] = RelationInfo(
model_field=None,
related_model=relation.related_model,
to_many=relation.field.remote_field.multiple,
to_field=_get_to_field(relation.field),
has_through_model=False,
reverse=True
)
# Deal with reverse many-to-many relationships.
all_related_many_to_many_objects = [r for r in opts.related_objects if r.field.many_to_many]
for relation in all_related_many_to_many_objects:
accessor_name = relation.get_accessor_name()
reverse_relations[accessor_name] = RelationInfo(
model_field=None,
related_model=relation.related_model,
to_many=True,
# manytomany do not have to_fields
to_field=None,
has_through_model=(
(getattr(relation.field.remote_field, 'through', None) is not None) and
not relation.field.remote_field.through._meta.auto_created
),
reverse=True
)
return reverse_relations
def _merge_fields_and_pk(pk, fields):
fields_and_pk = OrderedDict()
fields_and_pk['pk'] = pk
fields_and_pk[pk.name] = pk
fields_and_pk.update(fields)
return fields_and_pk
def _merge_relationships(forward_relations, reverse_relations):
return OrderedDict(
list(forward_relations.items()) +
list(reverse_relations.items())
)
def is_abstract_model(model):
"""
Given a model class, returns a boolean True if it is abstract and False if it is not.
"""
return hasattr(model, '_meta') and hasattr(model._meta, 'abstract') and model._meta.abstract
| {
"pile_set_name": "Github"
} |
/*
Copyright 2014 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package meta
import (
"fmt"
"strings"
"k8s.io/apimachinery/pkg/runtime/schema"
utilerrors "k8s.io/apimachinery/pkg/util/errors"
)
// MultiRESTMapper is a wrapper for multiple RESTMappers.
type MultiRESTMapper []RESTMapper
func (m MultiRESTMapper) String() string {
nested := []string{}
for _, t := range m {
currString := fmt.Sprintf("%v", t)
splitStrings := strings.Split(currString, "\n")
nested = append(nested, strings.Join(splitStrings, "\n\t"))
}
return fmt.Sprintf("MultiRESTMapper{\n\t%s\n}", strings.Join(nested, "\n\t"))
}
// ResourceSingularizer converts a REST resource name from plural to singular (e.g., from pods to pod)
// This implementation supports multiple REST schemas and return the first match.
func (m MultiRESTMapper) ResourceSingularizer(resource string) (singular string, err error) {
for _, t := range m {
singular, err = t.ResourceSingularizer(resource)
if err == nil {
return
}
}
return
}
func (m MultiRESTMapper) ResourcesFor(resource schema.GroupVersionResource) ([]schema.GroupVersionResource, error) {
allGVRs := []schema.GroupVersionResource{}
for _, t := range m {
gvrs, err := t.ResourcesFor(resource)
// ignore "no match" errors, but any other error percolates back up
if IsNoMatchError(err) {
continue
}
if err != nil {
return nil, err
}
// walk the existing values to de-dup
for _, curr := range gvrs {
found := false
for _, existing := range allGVRs {
if curr == existing {
found = true
break
}
}
if !found {
allGVRs = append(allGVRs, curr)
}
}
}
if len(allGVRs) == 0 {
return nil, &NoResourceMatchError{PartialResource: resource}
}
return allGVRs, nil
}
func (m MultiRESTMapper) KindsFor(resource schema.GroupVersionResource) (gvk []schema.GroupVersionKind, err error) {
allGVKs := []schema.GroupVersionKind{}
for _, t := range m {
gvks, err := t.KindsFor(resource)
// ignore "no match" errors, but any other error percolates back up
if IsNoMatchError(err) {
continue
}
if err != nil {
return nil, err
}
// walk the existing values to de-dup
for _, curr := range gvks {
found := false
for _, existing := range allGVKs {
if curr == existing {
found = true
break
}
}
if !found {
allGVKs = append(allGVKs, curr)
}
}
}
if len(allGVKs) == 0 {
return nil, &NoResourceMatchError{PartialResource: resource}
}
return allGVKs, nil
}
func (m MultiRESTMapper) ResourceFor(resource schema.GroupVersionResource) (schema.GroupVersionResource, error) {
resources, err := m.ResourcesFor(resource)
if err != nil {
return schema.GroupVersionResource{}, err
}
if len(resources) == 1 {
return resources[0], nil
}
return schema.GroupVersionResource{}, &AmbiguousResourceError{PartialResource: resource, MatchingResources: resources}
}
func (m MultiRESTMapper) KindFor(resource schema.GroupVersionResource) (schema.GroupVersionKind, error) {
kinds, err := m.KindsFor(resource)
if err != nil {
return schema.GroupVersionKind{}, err
}
if len(kinds) == 1 {
return kinds[0], nil
}
return schema.GroupVersionKind{}, &AmbiguousResourceError{PartialResource: resource, MatchingKinds: kinds}
}
// RESTMapping provides the REST mapping for the resource based on the
// kind and version. This implementation supports multiple REST schemas and
// return the first match.
func (m MultiRESTMapper) RESTMapping(gk schema.GroupKind, versions ...string) (*RESTMapping, error) {
allMappings := []*RESTMapping{}
errors := []error{}
for _, t := range m {
currMapping, err := t.RESTMapping(gk, versions...)
// ignore "no match" errors, but any other error percolates back up
if IsNoMatchError(err) {
continue
}
if err != nil {
errors = append(errors, err)
continue
}
allMappings = append(allMappings, currMapping)
}
// if we got exactly one mapping, then use it even if other requested failed
if len(allMappings) == 1 {
return allMappings[0], nil
}
if len(allMappings) > 1 {
var kinds []schema.GroupVersionKind
for _, m := range allMappings {
kinds = append(kinds, m.GroupVersionKind)
}
return nil, &AmbiguousKindError{PartialKind: gk.WithVersion(""), MatchingKinds: kinds}
}
if len(errors) > 0 {
return nil, utilerrors.NewAggregate(errors)
}
return nil, &NoKindMatchError{GroupKind: gk, SearchedVersions: versions}
}
// RESTMappings returns all possible RESTMappings for the provided group kind, or an error
// if the type is not recognized.
func (m MultiRESTMapper) RESTMappings(gk schema.GroupKind, versions ...string) ([]*RESTMapping, error) {
var allMappings []*RESTMapping
var errors []error
for _, t := range m {
currMappings, err := t.RESTMappings(gk, versions...)
// ignore "no match" errors, but any other error percolates back up
if IsNoMatchError(err) {
continue
}
if err != nil {
errors = append(errors, err)
continue
}
allMappings = append(allMappings, currMappings...)
}
if len(errors) > 0 {
return nil, utilerrors.NewAggregate(errors)
}
if len(allMappings) == 0 {
return nil, &NoKindMatchError{GroupKind: gk, SearchedVersions: versions}
}
return allMappings, nil
}
| {
"pile_set_name": "Github"
} |
/*---------------------------------------------------------------------------------------------
* Copyright (c) Bentley Systems, Incorporated. All rights reserved.
* See LICENSE.md in the project root for license terms and full copyright notice.
*--------------------------------------------------------------------------------------------*/
/** @packageDocumentation
* @module Hooks
*/
import { useEffect, useState } from "react";
import { IModelConnection } from "@bentley/imodeljs-frontend";
import { SessionStateActionId } from "../redux/SessionState";
import { SyncUiEventArgs, SyncUiEventDispatcher } from "../syncui/SyncUiEventDispatcher";
import { UiFramework } from "../UiFramework";
/** React hook that maintains the active IModelConnection. For this hook to work properly the
* IModelConnection must be set using UiFramework.setIModelConnection method. This also requires
* that the host app includes the UiFramework reducer into its Redux store.
* @beta
*/
export function useActiveIModelConnection(): IModelConnection | undefined {
const [activeConnection, setActiveConnection] = useState(UiFramework.getIModelConnection());
useEffect(() => {
const handleSyncUiEvent = (args: SyncUiEventArgs): void => {
const eventIds = [SessionStateActionId.SetIModelConnection];
// istanbul ignore else
if (eventIds.some((value: string): boolean => args.eventIds.has(value))) {
setActiveConnection(UiFramework.getIModelConnection());
}
};
SyncUiEventDispatcher.onSyncUiEvent.addListener(handleSyncUiEvent);
return () => {
SyncUiEventDispatcher.onSyncUiEvent.removeListener(handleSyncUiEvent);
};
}, [activeConnection]);
return activeConnection;
}
| {
"pile_set_name": "Github"
} |
/**
* 核心文件
* @author: OF
* @version 1.0.0
*/
layui.config({
base : webroot + "/static/plugins/of/admin/js/",//设定扩展的Layui模块的所在目录,一般用于外部模块扩展
version : '1.0.0'
});
layui.utile={};
layui.buttion={};
/**
* 转义字典工具
*/
layui.utile.toDict = function(dict,value){
var data = layui.sykDict[dict];
var _value = "";
if(!$.isEmpty(data) && !$.isEmpty(dict) && !$.isEmpty(value) && !$.isEmpty(data["labelField"]) && !$.isEmpty(data["valueField"])){
var labelField = data["labelField"];
var valueField = data["valueField"];
var list=data["data"];
//分割方式,默认,
var spaceMode = data["spaceMode"];
if($.isEmpty(spaceMode)){
spaceMode=",";
}
if($.isNumeric(value)){
analysis(value);
}else if($.type(value) == "string"){
//value 多个,分割,循环处理
$.each(value.split(','),function(i,e){
analysis(e);
});
}
function analysis(value){
$.each(list,function(index,elem){
if(elem[valueField] == value){
if(!$.isEmpty(_value)){
_value += spaceMode;
}
if(!$.isEmpty(elem[labelField])){
var css = elem["css"];//样式处理
var style = elem["style"];
if(!$.isEmpty(css) || !$.isEmpty(style)){
_value += "<span class=\""+css+"\" style=\""+style+"\">"+elem[labelField]+"</span>";
}else{
_value += elem[labelField];
}
}
return false;
}
});
}
return _value;
}
return _value;
};
//jquery 插件
(function($){
/**
* 获取token信息
*/
var getToken = function ()
{
var _csrf_code=$('meta[name="_csrf_code"]').attr("content");
var _csrf_name=$('meta[name="_csrf_name"]').attr("content");
var token = {};
token[_csrf_name] = _csrf_code;
return token;
};
$.ajaxSetup({
headers : getToken(),
type: 'POST',
async: true,
dataType : "json",
timeout : 30000
});
/**
* 获取form表单数据
*/
$.fn.getFormData = function (isValid) {
var fieldElem = $(this).find('input,select,textarea'); //获取所有表单域
var data ={};
layui.each(fieldElem, function(index, item){
if(!item.name) return;
if(/^checkbox|radio$/.test(item.type) && !item.checked) return;
var value = item.value;
if(item.type == "checkbox"){//如果多选
if(data[item.name]){
value = data[item.name] + "," + value;
}
}
if(isValid)
{
//如果为true,只需要处理有数据的值
if(!$.isEmpty(value))
{
data[item.name] = value;
}
}
else
{
data[item.name] = value;
}
});
return data;
};
/**
* 设置form表单值
*/
$.fn.setFormData = function (data) {
if(!$.isEmpty(data))
{
$(this)[0].reset();
$(this).autofill(data);
}
};
/**
* 获取datagrid 列集合
*/
$.fn.getDatagridCols = function () {
var colArr = new Array();
var colsArr = new Array();
var formatArr = new Array();//需要格式化的集合
var datagrid_cols = $(this).next(".adminDatagridCols");
if(!$.isEmpty(datagrid_cols))
{
var data = {};
$.each(datagrid_cols.children(),function(i, n){
var _this = $(this);
var type = _this.attr("type");
if(!$.isEmpty(type) && type == "br"){//换行
colArr.push(colsArr);
colsArr = new Array();
data = {};
return true;
}
var toolbar = _this.attr("toolbar");
var col = {};
if(!$.isEmpty(_this.attr("align"))){
col["align"] = _this.attr("align");
}
if(!$.isEmpty(_this.attr("fixed"))){
col["fixed"] = _this.attr("fixed");
}
if(!$.isEmpty(_this.attr("style"))){
col["style"] = _this.attr("style");
}
if(!$.isEmpty(_this.attr("colspan"))){
col["colspan"] = _this.attr("colspan");
}
if(!$.isEmpty(_this.attr("rowspan"))){
col["rowspan"] = _this.attr("rowspan");
}
if($.isEmpty(toolbar)){//普通列
var field = _this.attr("field");
var title = _this.attr("title");
var width = _this.attr("width");
var sort = _this.attr("sort");
var templet = _this.attr("templet");
var checkbox = _this.attr("checkbox");
if(!$.isEmpty(type)){
col["type"] = type;
}
if(!$.isEmpty(field)){
col["field"] = field;
}
if(!$.isEmpty(title)){
col["title"] = title;
}
if(!$.isEmpty(width)){
col["width"] = width;
}
if(!$.isEmpty(sort)){
col["sort"] = sort;
}
if(!$.isEmpty(templet)){
col["templet"] = templet;
}
if(!$.isEmpty(checkbox)){
col["checkbox"] = checkbox;
}
if(!$.isEmpty(_this.attr("LAY_CHECKED"))){
col["LAY_CHECKED"] = _this.attr("LAY_CHECKED");
}
if(!$.isEmpty(_this.attr("edit"))){
col["edit"] = _this.attr("edit");
}
if(!$.isEmpty(_this.attr("event"))){
col["event"] = _this.attr("event");
}
//数据表格字典转换
var dict = _this.attr("dict");
if(!$.isEmpty(dict)){
var dict_type = _this.attr("dict_type");
formatArr.push({'dict':dict,'dict_type':dict_type});
//自定义模板
// col["templet"] = "<div>{{ ${sysUtile.getDictToData('"+dict+"',d."+field+")} }}</div>";
col["templet"] = "<div>{{ layui.utile.toDict('"+dict+"',d."+field+") }}</div>";
}
colsArr.push(col);
}else {//工具条
col["toolbar"] = toolbar;
var width = _this.attr("width");
if(!$.isEmpty(width)){
col["width"] = width;
}
var title = _this.attr("title");
if(!$.isEmpty(title)){
col["title"] = title;
}
colsArr.push(col);
}
});
colArr.push(colsArr);
}
data["colsArr"] = colArr;
data["formatArr"] = formatArr;
return data;
};
$.fn.autofill = function(data, options) {
var settings = {
findbyname: true,
restrict: true
},
self = this;
if ( options ) {
$.extend( settings, options );
}
return this.each(function() {
$.each( data, function(k, v) {
var selector, elt;
if ( settings.findbyname ) { // by name
selector = '[name="'+k+'"]';
elt = ( settings.restrict ) ? self.find( selector ) : $( selector );
if ( elt.length == 1 ) {
elt.val( ( elt.attr("type") == "checkbox" ) ? [v] : v );
} else if ( elt.length > 1 ) {
if(elt.attr("type") == "checkbox"){
if(v){
elt.val(v.split(','));
}
}else{
elt.val([v]);
}
} else {
selector = '[name="'+k+'[]"]';
elt = ( settings.restrict ) ? self.find( selector ) : $( selector );
elt.each(function(){
$(this).val(v);
});
}
} else { // by id
selector = '#'+k;
elt = ( settings.restrict ) ? self.find( selector ) : $( selector );
if ( elt.length == 1 ) {
elt.val( ( elt.attr("type") == "checkbox" ) ? [v] : v );
} else {
var radiofound = false;
// radio
elt = ( settings.restrict ) ? self.find( 'input:radio[name="'+k+'"]' ) : $( 'input:radio[name="'+k+'"]' );
elt.each(function(){
radiofound = true;
if ( this.value == v ) { this.checked = true; }
});
// multi checkbox
if ( !radiofound ) {
elt = ( settings.restrict ) ? self.find( 'input:checkbox[name="'+k+'[]"]' ) : $( 'input:checkbox[name="'+k+'[]"]' );
elt.each(function(){
$(this).val(v);
});
}
}
}
});
});
};
$.extend({
//非空判断
isEmpty: function(value) {
if (value === null || value == undefined || value === '') {
return true;
}
return false;
},
//获取对象指
result: function(object, path, defaultValue) {
var value = "";
if(!$.isEmpty(object) && $.isObject(object) && !$.isEmpty(path)){
var paths = path.split('.');
var length = paths.length;
$.each(paths,function(i,v){
object = object[v];
if(length-1 == i){
value = object;
}
if(!$.isObject(object)){
return false;
}
})
}
if($.isEmpty(value) && !$.isEmpty(defaultValue)){
value = defaultValue;
}
return value;
},
//判断是否obj对象
isObject : function(value) {
var type = typeof value;
return value != null && (type == 'object' || type == 'function');
},
//是否以某个字符开头
startsWith : function(value,target){
return value.indexOf(target) == 0;
},
//设置sessionStorage
setSessionStorage:function(key, data){
sessionStorage.setItem(key, data);
},
//获取sessionStorage
getSessionStorage:function(key){
return sessionStorage.getItem(key) == null ? "" : sessionStorage.getItem(key);
},
//删除sessionStorage
removeSessionStorage:function(key){
sessionStorage.removeItem(key);
},
//清除sessionStorage
clearSessionStorage:function(){
sessionStorage.clear();
},
uuid : function(){
return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function(c) {
var r = Math.random()*16|0, v = c == 'x' ? r : (r&0x3|0x8);
return v.toString(16);
});
}
});
}(jQuery)); | {
"pile_set_name": "Github"
} |
---
title: The Data Science Process
author: clone95
description: In this guide, you will understand the big picture of the Data Science project lifecycle, why and how to perform each step, from collecting the data to putting predictive models in production. You can then use this guide as a "checklist" of to-do steps to perform during your projects.
---
# Index
- [Motivation](#Motivation)
- [Frame the problem](#Frame-the-problem)
- [Collect and prepare the data](#Collect-and-prepare-the-data)
- [Select and train an ML model](#Select-and-train-an-ML-model)
- [Evaluate and fine tune](#Evaluate-and-fine-tune)
- [Launch and mantain the system](#Launch-and-mantain-the-system)
- [Conclusions](#Conclusions)
Let's dive right in!
## Motivation
You can find hundreds of online articles describing the process of developing a Data Science project.
In this article, Virgilio does not aim to give you the details of each phase, which will be dealt with in more technical guides,
but rather aims to give you an overview of the various steps, as well as a sort of checklist to keep in mind when starting a project.
::: tip
As you may have noticed, **the structure of Purgatory reflects the entire Data Science process lifecycle**, with each _section_ corresponding to a macro phase, and each _guide_ within it corresponding to a sub-stage of the process. This content organization is designed to provide clarity to the structure of Purgatorio, clarity to the Data Science process, and to provide a useful checklist to consult whenever you start a new project.
:::
The design of Purgatorio is inspired by the checklist in the magnificent book
[Hands-on Machine Learning with Scikit-Learn, Keras, and TensorFlow](https://www.amazon.it/Hands-Machine-Learning-Scikit-learn-Tensorflow/dp/1492032646), which I would urge you to buy.
**It's worth its weight in bits!**
---
Let's recap what we mean by "the life cycle of the data science process" (from here on, for brevity, we will only call it "process").
A Data Science project is any project that aims to extract knowledge from data
(for example, predict the optimal interest rate for a loan, or how many customers will enter the store tomorrow),
and in doing so, use Machine Learning techniques.
These techniques can be of two types:
- Traditional Machine Learning techniques (statistical models like SVM, decision trees, or clustering)
- Deep Learning Techniques (neural networks)
For now don't worry about the difference between the two types, just know for now that the former has been around for decades,
while the latter is the subject of active experimental research and have only started to be used in the last 6-7 years.
**You are now ready for a short tour of the Data Science process, where I'll give you an overview of what your trip to Purgatorio will be like.**
## Frame the problem
Each Data Science project starts from a phase called Problem Statement or Problem Shaping, i.e. **the process of identifying the problem to be solved and the real-world advantages to be obtained.**
How do you do it? We must be ready to ask the right questions.
Usually, the first questions you ask yourself are:
- What is the scope of the project?
- What is the desired result?
- What data do we have available?
- Do we have evidence that they contain relevant information?
During this phase, close contact between you (the technician) and the domain expert is vital.
If you are developing a project for a company, presumably it will be the source of domain knowledge: learn as much as you can!
If you are developing a project for yourself, look for domain experts to ask questions!
Studying and practicing you will develop a "data mindset", which will help you a lot in this phase. An entire guide is dedicated to this issue.
When you have framed the type of problem you want to solve, it is time to choose objective metrics to evaluate the result of the project: depending on the type of problem addressed, we will see how different metrics provide indications on the performance of the Machine Learning models that we will develop.
Last but not least, it is necessary to make sure that the assumptions we make are correct, the data we have collected is not corrupted or biased, and how the system will be integrated with existing systems and then used.
## Collect and prepare the data
The indispensable raw material for Data Science projects is data (you never would have said so :D ).
::: warning
Collecting, organizing, and then cleaning them is often [the most _onerous phase_](https://fundersclub.com/blog/2017/06/29/hardest-part-ai-cleaning-your-data/) of the whole process.
:::
You need to understand:
- what data is sensitive and what is not
- what the data sources are
- how data have been collected
- how the data from various sources relate to each other
Once the data has been obtained, it is necessary to organize it effectively and **to keep raw version** of it so that you always have at your disposal the "ground truth" of our project.
In fact, a large part of the work of cleaning and preparing the data consists of steps (called pre-processing) that transform them: from raw data just collected, to clean data and ready to be analyzed by a model in the next macro-phase.
Maintaining the various versions of the data, to which the pre-processing steps have been applied, is vital to achieving _reproducible results_ and _maintainable systems_.
The information represented by the data is called "features" (in the simple case of tabular data, each attribute is a feature).
Once the raw data has been cleaned, the feature engineering phase often takes place in which existing data is combined to "suggest" to the models the relationships between them.
For example, if my data contains the date of purchase in a store, I can add the feature "day of the week", indicating with a value from 1 to 7 on which day we are. This could reveal very interesting relationships!
Often _the feature engineering phase is vital for traditional models_, while we will see that for Deep Learning models it is less important, as they have among their advantages to automatically extract relevant features from the data.
Finally, we will see in this section how to automate these "pipelines" of collection, cleaning, and preprocessing.
## Select and train an ML model
Once the data has been cleaned and prepared, it is time to choose an algorithm that must be "trained" on it, and whose output will be a predictive model.
This model can be a statistical model (even if all the models we will see apply statistical concepts) or a Deep Learning model.
Statistical models have been used for decades to predict the future, such as sales in the next quarter, or the climate in a given region.
In general, these models:
- They require statistical knowledge
- Require feature engineering
- They are "simple" in the sense that they are easy to interpret and debug
Deep Learning models (software programs called neural networks) are techniques that have been exploding in recent years, providing excellent performance on problems previously impossible to solve, in general:
- They require empirical and practical knowledge about training neural networks
- Do not necessarily require feature engineering (but may benefit from it)
- They are "complex" in the sense that they are difficult to interpret and debug.
- They are the only solutions in case of equally "complex" problems, such as the vision or understanding of natural language.
We will see in detail various types of statistical models and Deep Learning, with a particular focus on the latter, which over the years are proving a revolution and promise to overturn entire industries, as well as drastically improve the processes that characterize them.
Once the model that best solves our problem has been selected, the training phase takes place, where the model is shown many examples (our data), and its parameters are adjusted in order to obtain satisfactory performance.
## Evaluate and fine-tune
Once we set up the training process and trained the model, we are ready to evaluate it and understand what actual performance it would have in the face of data that it has never seen.
Therefore, before training, the data is divided between training data (those that the model will see) and test data that it has never seen (those on which it will be tested).
This phase is of vital importance to understand **how the model will perform in the real world**, and to understand _if it will actually be useful._
In this section, we will also understand how to choose the best parameters to train the models, so as to maximize their performance in the test phase.
Moreover, we will see how to compose together several predictive models to form a more effective one, and you will learn to use one of the most useful techniques in the real application of Deep Learning: _Transfer Learning_.
This concept consists of applying the knowledge already learned from a problem to a new problem, reducing the amount of data needed, and the computational costs for training.
We'll see how in practice you almost never train a model from scratch, but you take advantage of pre-trained models made available by large companies and researchers.
## Launch and mantain the system
Once the model is sufficiently tested and its performing well it can be used to make predictions.
In this section you will learn how to build models that can scale to hundreds or thousands of users, leveraging the power of the Cloud and the flexibility of the API Rest concept.
You will learn how to distribute your models across multiple machines, and monitor them carefully through metrics and alarms.
In fact, one of the biggest costs of Data Science projects (but we could say software in general) is maintenance.
Here are some questions you need to ask yourself when you put a model into production in the real world:
- **Do users use it**?
- Is the number of users using it controllable, or is there a slowdown?
- How certain is the model of predictions it makes?
- How do I make sure the new input data is appropriate?
- How do I ensure that they are properly preprocessed (yes, automatic pipelines, I'm looking at you)?
- Do the real-world data reflect the training data?
- How do real-world data change over time?
- How do I integrate the new knowledge I collect over time (i.e., the new data from the real world)?
- How do I ensure that the performance of the model does not degrade over time?
These and other questions will be covered in this section.
## Conclusions
You've just had a taste of everything we'll see in the next sections of Purgatorio, but remember there will be much more to learn!
Alongside these guides, you'll be given some to learn how to use libraries and frameworks used by organizations and governments, how to take advantage of the power and flexibility of Cloud vendors, how to read research papers, and much more!
Warm-up your fingers and brain and prepare a coffee (or some thousands...), the journey is about to begin!
| {
"pile_set_name": "Github"
} |
{
"author": "Dmitry Prokashev <[email protected]>",
"name": "natural-scroll",
"description": "Smoothly scroll to the desired position",
"version": "0.2.2",
"keywords": [
"scroll",
"scrolling",
"programmatically",
"menu",
"navigation"
],
"repository": {
"type": "git",
"url": "git://github.com/asvd/naturalScroll.git"
},
"browser":
{
"fs": false,
"child_process": false
},
"main": "naturalScroll.js",
"dependencies": {},
"devDependencies": {},
"optionalDependencies": {},
"engines": {
"node": "*"
}
}
| {
"pile_set_name": "Github"
} |
// Protocol Buffers for Go with Gadgets
//
// Copyright (c) 2013, The GoGo Authors. All rights reserved.
// http://github.com/gogo/protobuf
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
/*
Package gogoproto provides extensions for protocol buffers to achieve:
- fast marshalling and unmarshalling.
- peace of mind by optionally generating test and benchmark code.
- more canonical Go structures.
- less typing by optionally generating extra helper code.
- goprotobuf compatibility
More Canonical Go Structures
A lot of time working with a goprotobuf struct will lead you to a place where you create another struct that is easier to work with and then have a function to copy the values between the two structs.
You might also find that basic structs that started their life as part of an API need to be sent over the wire. With gob, you could just send it. With goprotobuf, you need to make a parallel struct.
Gogoprotobuf tries to fix these problems with the nullable, embed, customtype and customname field extensions.
- nullable, if false, a field is generated without a pointer (see warning below).
- embed, if true, the field is generated as an embedded field.
- customtype, It works with the Marshal and Unmarshal methods, to allow you to have your own types in your struct, but marshal to bytes. For example, custom.Uuid or custom.Fixed128
- customname (beta), Changes the generated fieldname. This is especially useful when generated methods conflict with fieldnames.
- casttype (beta), Changes the generated fieldtype. All generated code assumes that this type is castable to the protocol buffer field type. It does not work for structs or enums.
- castkey (beta), Changes the generated fieldtype for a map key. All generated code assumes that this type is castable to the protocol buffer field type. Only supported on maps.
- castvalue (beta), Changes the generated fieldtype for a map value. All generated code assumes that this type is castable to the protocol buffer field type. Only supported on maps.
Warning about nullable: According to the Protocol Buffer specification, you should be able to tell whether a field is set or unset. With the option nullable=false this feature is lost, since your non-nullable fields will always be set. It can be seen as a layer on top of Protocol Buffers, where before and after marshalling all non-nullable fields are set and they cannot be unset.
Let us look at:
github.com/gogo/protobuf/test/example/example.proto
for a quicker overview.
The following message:
package test;
import "github.com/gogo/protobuf/gogoproto/gogo.proto";
message A {
optional string Description = 1 [(gogoproto.nullable) = false];
optional int64 Number = 2 [(gogoproto.nullable) = false];
optional bytes Id = 3 [(gogoproto.customtype) = "github.com/gogo/protobuf/test/custom.Uuid", (gogoproto.nullable) = false];
}
Will generate a go struct which looks a lot like this:
type A struct {
Description string
Number int64
Id github_com_gogo_protobuf_test_custom.Uuid
}
You will see there are no pointers, since all fields are non-nullable.
You will also see a custom type which marshals to a string.
Be warned it is your responsibility to test your custom types thoroughly.
You should think of every possible empty and nil case for your marshaling, unmarshaling and size methods.
Next we will embed the message A in message B.
message B {
optional A A = 1 [(gogoproto.nullable) = false, (gogoproto.embed) = true];
repeated bytes G = 2 [(gogoproto.customtype) = "github.com/gogo/protobuf/test/custom.Uint128", (gogoproto.nullable) = false];
}
See below that A is embedded in B.
type B struct {
A
G []github_com_gogo_protobuf_test_custom.Uint128
}
Also see the repeated custom type.
type Uint128 [2]uint64
Next we will create a custom name for one of our fields.
message C {
optional int64 size = 1 [(gogoproto.customname) = "MySize"];
}
See below that the field's name is MySize and not Size.
type C struct {
MySize *int64
}
The is useful when having a protocol buffer message with a field name which conflicts with a generated method.
As an example, having a field name size and using the sizer plugin to generate a Size method will cause a go compiler error.
Using customname you can fix this error without changing the field name.
This is typically useful when working with a protocol buffer that was designed before these methods and/or the go language were avialable.
Gogoprotobuf also has some more subtle changes, these could be changed back:
- the generated package name for imports do not have the extra /filename.pb,
but are actually the imports specified in the .proto file.
Gogoprotobuf also has lost some features which should be brought back with time:
- Marshalling and unmarshalling with reflect and without the unsafe package,
this requires work in pointer_reflect.go
Why does nullable break protocol buffer specifications:
The protocol buffer specification states, somewhere, that you should be able to tell whether a
field is set or unset. With the option nullable=false this feature is lost,
since your non-nullable fields will always be set. It can be seen as a layer on top of
protocol buffers, where before and after marshalling all non-nullable fields are set
and they cannot be unset.
Goprotobuf Compatibility:
Gogoprotobuf is compatible with Goprotobuf, because it is compatible with protocol buffers.
Gogoprotobuf generates the same code as goprotobuf if no extensions are used.
The enumprefix, getters and stringer extensions can be used to remove some of the unnecessary code generated by goprotobuf:
- gogoproto_import, if false, the generated code imports github.com/golang/protobuf/proto instead of github.com/gogo/protobuf/proto.
- goproto_enum_prefix, if false, generates the enum constant names without the messagetype prefix
- goproto_enum_stringer (experimental), if false, the enum is generated without the default string method, this is useful for rather using enum_stringer, or allowing you to write your own string method.
- goproto_getters, if false, the message is generated without get methods, this is useful when you would rather want to use face
- goproto_stringer, if false, the message is generated without the default string method, this is useful for rather using stringer, or allowing you to write your own string method.
- goproto_extensions_map (beta), if false, the extensions field is generated as type []byte instead of type map[int32]proto.Extension
- goproto_unrecognized (beta), if false, XXX_unrecognized field is not generated. This is useful in conjunction with gogoproto.nullable=false, to generate structures completely devoid of pointers and reduce GC pressure at the cost of losing information about unrecognized fields.
- goproto_registration (beta), if true, the generated files will register all messages and types against both gogo/protobuf and golang/protobuf. This is necessary when using third-party packages which read registrations from golang/protobuf (such as the grpc-gateway).
Less Typing and Peace of Mind is explained in their specific plugin folders godoc:
- github.com/gogo/protobuf/plugin/<extension_name>
If you do not use any of these extension the code that is generated
will be the same as if goprotobuf has generated it.
The most complete way to see examples is to look at
github.com/gogo/protobuf/test/thetest.proto
Gogoprototest is a seperate project,
because we want to keep gogoprotobuf independant of goprotobuf,
but we still want to test it thoroughly.
*/
package gogoproto
| {
"pile_set_name": "Github"
} |
/*
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER.
*
* Copyright 1997-2007 Sun Microsystems, Inc. All rights reserved.
*
* The contents of this file are subject to the terms of either the GNU
* General Public License Version 2 only ("GPL") or the Common
* Development and Distribution License("CDDL") (collectively, the
* "License"). You may not use this file except in compliance with the
* License. You can obtain a copy of the License at
* http://www.netbeans.org/cddl-gplv2.html
* or nbbuild/licenses/CDDL-GPL-2-CP. See the License for the
* specific language governing permissions and limitations under the
* License. When distributing the software, include this License Header
* Notice in each file and include the License file at
* nbbuild/licenses/CDDL-GPL-2-CP. Sun designates this
* particular file as subject to the "Classpath" exception as provided
* by Sun in the GPL Version 2 section of the License file that
* accompanied this code. If applicable, add the following below the
* License Header, with the fields enclosed by brackets [] replaced by
* your own identifying information:
* "Portions Copyrighted [year] [name of copyright owner]"
*
* Contributor(s):
*
* The Original Software is NetBeans. The Initial Developer of the Original
* Software is Sun Microsystems, Inc. Portions Copyright 1997-2006 Sun
* Microsystems, Inc. All Rights Reserved.
* Portions Copyright 2008 Alexander Coles (Ikonoklastik Productions).
*
* If you wish your version of this file to be governed by only the CDDL
* or only the GPL Version 2, indicate your decision by adding
* "[Contributor] elects to include this software in this distribution
* under the [CDDL or GPL Version 2] license." If you do not indicate a
* single choice of license, a recipient has the option to distribute
* your version of this file under either the CDDL, the GPL Version 2 or
* to extend the choice of license to its licensees as provided above.
* However, if you add GPL Version 2 code and therefore, elected the GPL
* Version 2 license, then the option applies only if the new code is
* made subject to such option by the copyright holder.
*/
package org.nbgit.ui.update;
import java.io.File;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import java.util.Vector;
import javax.swing.ComboBoxModel;
import javax.swing.DefaultComboBoxModel;
import javax.swing.SwingUtilities;
import org.netbeans.api.progress.ProgressHandle;
import org.netbeans.api.progress.ProgressHandleFactory;
import org.nbgit.GitModuleConfig;
import org.nbgit.util.GitCommand;
import org.openide.util.NbBundle;
import org.openide.util.RequestProcessor;
/**
*
* @author Padraig O'Briain
*/
public class RevertModificationsPanel extends javax.swing.JPanel {
private File repository;
private File[] revertFiles;
private RequestProcessor.Task refreshViewTask;
private Thread refreshViewThread;
private static final RequestProcessor rp = new RequestProcessor("GitRevert", 1); // NOI18N
private static final int GIT_REVERT_TARGET_LIMIT = 100;
private List<String[]> revisionMap;
/** Creates new form ReverModificationsPanel */
public RevertModificationsPanel(File repo, File[] files) {
repository = repo;
revertFiles = files;
refreshViewTask = rp.create(new RefreshViewTask());
initComponents();
refreshViewTask.schedule(0);
}
public File[] getRevertFiles() {
return revertFiles;
}
public boolean isBackupRequested() {
return doBackupChxBox.isSelected();
}
public String getSelectedRevision() {
String revStr = (String) revisionsComboBox.getSelectedItem();
if (revStr != null) {
if (revStr.equals(NbBundle.getMessage(RevertModificationsPanel.class, "MSG_Revision_Default")) || // NOI18N
revStr.equals(NbBundle.getMessage(RevertModificationsPanel.class, "MSG_Fetching_Revisions"))) // NOI18N
{
revStr = null;
} else if (revisionMap != null) {
for (String[] entry : revisionMap) {
if (entry[0].equals(revStr)) {
revStr = entry[1];
break;
}
}
}
}
return revStr;
}
/** This method is called from within the constructor to
* initialize the form.
* WARNING: Do NOT modify this code. The content of this method is
* always regenerated by the Form Editor.
*/
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
revisionsLabel = new javax.swing.JLabel();
revisionsComboBox = new javax.swing.JComboBox();
jLabel1 = new javax.swing.JLabel();
jLabel2 = new javax.swing.JLabel();
jPanel1 = new javax.swing.JPanel();
doBackupChxBox = new javax.swing.JCheckBox();
revisionsLabel.setLabelFor(revisionsComboBox);
org.openide.awt.Mnemonics.setLocalizedText(revisionsLabel, org.openide.util.NbBundle.getMessage(RevertModificationsPanel.class, "RevertModificationsPanel.revisionsLabel.text")); // NOI18N
jLabel1.setFont(new java.awt.Font("Dialog", 1, 11));
org.openide.awt.Mnemonics.setLocalizedText(jLabel1, org.openide.util.NbBundle.getMessage(RevertModificationsPanel.class, "RevertModificationsPanel.infoLabel.text")); // NOI18N
jLabel2.setForeground(new java.awt.Color(153, 153, 153));
org.openide.awt.Mnemonics.setLocalizedText(jLabel2, org.openide.util.NbBundle.getMessage(RevertModificationsPanel.class, "RevertModificationsPanel.infoLabel2.text")); // NOI18N
jPanel1.setBorder(javax.swing.BorderFactory.createTitledBorder("Options"));
org.openide.awt.Mnemonics.setLocalizedText(doBackupChxBox, org.openide.util.NbBundle.getMessage(RevertModificationsPanel.class, "RevertModificationsPanel.doBackupChxBox.text")); // NOI18N
org.jdesktop.layout.GroupLayout jPanel1Layout = new org.jdesktop.layout.GroupLayout(jPanel1);
jPanel1.setLayout(jPanel1Layout);
jPanel1Layout.setHorizontalGroup(
jPanel1Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING)
.add(jPanel1Layout.createSequentialGroup()
.addContainerGap()
.add(doBackupChxBox)
.addContainerGap(159, Short.MAX_VALUE))
);
jPanel1Layout.setVerticalGroup(
jPanel1Layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING)
.add(jPanel1Layout.createSequentialGroup()
.add(doBackupChxBox)
.addContainerGap(org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
);
doBackupChxBox.getAccessibleContext().setAccessibleDescription(org.openide.util.NbBundle.getMessage(RevertModificationsPanel.class, "ACSD_doBackupChxBox")); // NOI18N
org.jdesktop.layout.GroupLayout layout = new org.jdesktop.layout.GroupLayout(this);
this.setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING)
.add(layout.createSequentialGroup()
.add(layout.createParallelGroup(org.jdesktop.layout.GroupLayout.TRAILING)
.add(org.jdesktop.layout.GroupLayout.LEADING, layout.createSequentialGroup()
.add(47, 47, 47)
.add(revisionsLabel)
.addPreferredGap(org.jdesktop.layout.LayoutStyle.RELATED)
.add(revisionsComboBox, 0, 334, Short.MAX_VALUE))
.add(org.jdesktop.layout.GroupLayout.LEADING, layout.createSequentialGroup()
.addContainerGap()
.add(layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING)
.add(jLabel2)
.add(jLabel1, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, 515, Short.MAX_VALUE)
.add(jPanel1, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))))
.addContainerGap())
);
layout.setVerticalGroup(
layout.createParallelGroup(org.jdesktop.layout.GroupLayout.LEADING)
.add(layout.createSequentialGroup()
.add(jLabel1, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, 25, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE)
.add(4, 4, 4)
.add(jLabel2)
.add(29, 29, 29)
.add(layout.createParallelGroup(org.jdesktop.layout.GroupLayout.BASELINE)
.add(revisionsLabel)
.add(revisionsComboBox, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(org.jdesktop.layout.LayoutStyle.UNRELATED)
.add(jPanel1, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE, org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, org.jdesktop.layout.GroupLayout.PREFERRED_SIZE)
.addContainerGap(org.jdesktop.layout.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
);
revisionsComboBox.getAccessibleContext().setAccessibleDescription(org.openide.util.NbBundle.getMessage(RevertModificationsPanel.class, "ACSD_revisionsComboBox")); // NOI18N
}// </editor-fold>//GEN-END:initComponents
/**
* Must NOT be run from AWT.
*/
private void setupModels()
{
// XXX attach Cancelable hook
final ProgressHandle ph = ProgressHandleFactory.createHandle(NbBundle.getMessage(RevertModificationsPanel.class, "MSG_Refreshing_Revert_Versions")); // NOI18N
try {
boolean doBackup = GitModuleConfig.getDefault().getBackupOnRevertModifications();
doBackupChxBox.setSelected(doBackup);
Set<String> initialRevsSet = new LinkedHashSet<String>();
initialRevsSet.add(NbBundle.getMessage(RevertModificationsPanel.class, "MSG_Fetching_Revisions")); // NOI18N
ComboBoxModel targetsModel = new DefaultComboBoxModel(new Vector<String>(initialRevsSet));
revisionsComboBox.setModel(targetsModel);
refreshViewThread = Thread.currentThread();
Thread.interrupted(); // clear interupted status
ph.start();
refreshRevisions();
} finally {
SwingUtilities.invokeLater(new Runnable() {
public void run()
{
ph.finish();
refreshViewThread = null;
}
});
}
}
private void refreshRevisions()
{
revisionMap = GitCommand.getRevisionsForFile(repository, revertFiles, GIT_REVERT_TARGET_LIMIT);
Set<String> targetRevsSet = new LinkedHashSet<String>();
if (revisionMap == null)
targetRevsSet.add(NbBundle.getMessage(RevertModificationsPanel.class, "MSG_Revision_Default"));
else
for (String[] entry : revisionMap) {
targetRevsSet.add(entry[0]);
}
ComboBoxModel targetsModel = new DefaultComboBoxModel(new Vector<String>(targetRevsSet));
revisionsComboBox.setModel(targetsModel);
if (targetRevsSet.size() > 0)
revisionsComboBox.setSelectedIndex(0);
}
private class RefreshViewTask implements Runnable {
public void run()
{
setupModels();
}
}
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JCheckBox doBackupChxBox;
private javax.swing.JLabel jLabel1;
private javax.swing.JLabel jLabel2;
private javax.swing.JPanel jPanel1;
private javax.swing.JComboBox revisionsComboBox;
private javax.swing.JLabel revisionsLabel;
// End of variables declaration//GEN-END:variables
}
| {
"pile_set_name": "Github"
} |
$NetBSD: distinfo,v 1.2 2015/11/03 03:28:42 agc Exp $
SHA1 (Proc-WaitStat-1.00.tar.gz) = 2b3c56f1748fbcbf6e164268834adba5865527b4
RMD160 (Proc-WaitStat-1.00.tar.gz) = 9a6ce40c56123e0a5397b4e05e7ce0eddf63344b
SHA512 (Proc-WaitStat-1.00.tar.gz) = 7abc1c3771ec560e6138cc7709ca4527a34c897067e13afd57d096d664d6d11a1d80898a955404d8b583972eae8c80689fd8421adaee9bed2601e47afe9ef235
Size (Proc-WaitStat-1.00.tar.gz) = 3223 bytes
| {
"pile_set_name": "Github"
} |
// Microsoft Visual C++ generated resource script.
//
#include "resource.h"
#define APSTUDIO_READONLY_SYMBOLS
/////////////////////////////////////////////////////////////////////////////
//
// Generated from the TEXTINCLUDE 2 resource.
//
#include "winres.h"
/////////////////////////////////////////////////////////////////////////////
#undef APSTUDIO_READONLY_SYMBOLS
/////////////////////////////////////////////////////////////////////////////
// 中文(简体,中国) resources
#if !defined(AFX_RESOURCE_DLL) || defined(AFX_TARG_CHS)
LANGUAGE LANG_CHINESE, SUBLANG_CHINESE_SIMPLIFIED
#ifdef APSTUDIO_INVOKED
/////////////////////////////////////////////////////////////////////////////
//
// TEXTINCLUDE
//
1 TEXTINCLUDE
BEGIN
"resource.h\0"
END
2 TEXTINCLUDE
BEGIN
"#include ""winres.h""\r\n"
"\0"
END
3 TEXTINCLUDE
BEGIN
"\r\n"
"\0"
END
#endif // APSTUDIO_INVOKED
/////////////////////////////////////////////////////////////////////////////
//
// Icon
//
// Icon with lowest ID value placed first to ensure application icon
// remains consistent on all systems.
IDR_MAINFRAME ICON "Res\\transwnd.ico"
/////////////////////////////////////////////////////////////////////////////
//
// ZIPRES
//
//IDR_ZIPRES ZIPRES "Res\\transwnd.zip"
#endif // 中文(简体,中国) resources
/////////////////////////////////////////////////////////////////////////////
#ifndef APSTUDIO_INVOKED
/////////////////////////////////////////////////////////////////////////////
//
// Generated from the TEXTINCLUDE 3 resource.
//
/////////////////////////////////////////////////////////////////////////////
#endif // not APSTUDIO_INVOKED
| {
"pile_set_name": "Github"
} |
向井杏最新番号
【MOBSP-022】夜のおかずシリーズ真正中出しベストセレクション12
【OKAX-008】100%真正ガチ中出し100連発!大輪姦イカセ4時間SPECIAL
【SSR-039】憧れの競泳水着美人インストラクターは生徒のモッコリ股間に敏感に欲情して密かに誘う!!
【GAR-382】最強肉食ギャル伝説 瀬名あゆむプレミアムBEST
【GAR-363】超ギャル Wブチギレ手コキ!!
【GAR-356】3姉妹のギャルの姉達が両親の留守中僕に性的イタズラをしてきてさらに童貞まで奪われてしまった!!
【GAR-357】トイレメーカーに就職したらドS上司たちの放尿便器奴隷にされた僕
【GAR-354】ギャルブチギレ窒息顔騎制裁!!
【MOBAO-023】絶叫アクメ真正中出し輪姦パーティー 向井杏
【GAR-349】チ◯ポを支配する女生徒たち 射精管理学園 2
【TJT-013】東京家出女子 3 向井杏
【YRZ-074】人間観察ドキュメント Q&A 11
【KDG-018】素人隙まん娘 vol.7
【SCH-007】赤坂高級回春マッサージ 2
【YRZ-070】連れコンお泊り大作戦!! Vol.8 同じバイト仲良し3人
【TJT-011】「はじめまして◆向井杏です!!」</a>2013-02-01プレステージ$$$密着120分钟 | {
"pile_set_name": "Github"
} |
/*
* Copyright 2016 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.gwt.dev.util.arg;
/**
* Controls what local address to bind to.
*/
public interface OptionBindAddress {
String getBindAddress();
String getConnectAddress();
void setBindAddress(String bindAddress);
void setConnectAddress(String connectAddress);
}
| {
"pile_set_name": "Github"
} |
<?php declare(strict_types=1);
/**
* Part of Windwalker project.
*
* @copyright Copyright (C) 2019 .
* @license LGPL-2.0-or-later
*/
namespace Windwalker\Structure\Format;
use HJSON\HJSONParser;
use HJSON\HJSONStringifier;
/**
* The HjsonFormat class.
*
* @since 3.5.4
*/
class HjsonFormat implements FormatInterface
{
/**
* Converts an object into a formatted string.
*
* @param object $struct Data Source Object.
* @param array $options An array of options for the formatter.
*
* @return string Formatted string.
*
* @since 2.0
*/
public static function structToString($struct, array $options = [])
{
return (new HJSONStringifier())->stringify($struct, $options);
}
/**
* Converts a formatted string into an object.
*
* @param string $data Formatted string
* @param array $options An array of options for the formatter.
*
* @return object Data Object
*
* @since 2.0
*/
public static function stringToStruct($data, array $options = [])
{
return (new HJSONParser())->parse($data, $options);
}
}
| {
"pile_set_name": "Github"
} |
/*
* Copyright (c) 2014 Eran Pe'er.
*
* This program is made available under the terms of the MIT License.
*
* Created on Mar 10, 2014
*/
#include <string>
#include "tpunit++.hpp"
#include "fakeit.hpp"
using namespace fakeit;
struct DtorMocking : tpunit::TestFixture
{
DtorMocking() :
TestFixture(
TEST(DtorMocking::mock_virtual_dtor_with_fake), //
TEST(DtorMocking::mock_virtual_dtor_with_when),
TEST(DtorMocking::mock_virtual_dtor_by_assignment),
TEST(DtorMocking::call_dtor_without_delete),
TEST(DtorMocking::spy_dtor),
TEST(DtorMocking::production_takes_ownwership_with_uniqe_ptr)//
)
{
}
struct SomeInterface
{
virtual ~SomeInterface() = default;
};
void mock_virtual_dtor_with_fake() {
Mock<SomeInterface> mock;
Fake(Dtor(mock));
SomeInterface * i = &(mock.get());
delete i;
delete i; // second delete should not throw exception
Verify(Dtor(mock)).Twice();
ASSERT_THROW(Verify(Dtor(mock)).Once(), fakeit::VerificationException);
}
void mock_virtual_dtor_with_when() {
int a = 0;
Mock<SomeInterface> mock;
When(Dtor(mock)).Return().Do([&](){a++; });
SomeInterface * i = &(mock.get());
delete i; // do nothing
delete i; // a++
ASSERT_EQUAL(1, a);
}
void mock_virtual_dtor_by_assignment() {
int a = 0;
Mock<SomeInterface> mock;
Dtor(mock) = [&](){a++; };
SomeInterface * i = &(mock.get());
delete i; // do nothing
delete i; // a++
ASSERT_EQUAL(2, a);
}
void production_takes_ownwership_with_uniqe_ptr() {
Mock<SomeInterface> mock;
Fake(Dtor(mock));
SomeInterface * i = &mock.get();
std::unique_ptr<SomeInterface> ptr(i);
}
void call_dtor_without_delete() {
Mock<SomeInterface> mock;
Fake(Dtor(mock));
SomeInterface * i = &mock.get();
i->~SomeInterface();
Verify(Dtor(mock)).Once();
ASSERT_THROW(Verify(Dtor(mock)).Twice(), fakeit::VerificationException);
}
struct A {
virtual ~A(){}
};
void spy_dtor() {
A a;
Mock<A> mock(a);
Spy(Dtor(mock));
A * i = &mock.get();
delete i;
Verify(Dtor(mock)).Once();
ASSERT_THROW(Verify(Dtor(mock)).Twice(), fakeit::VerificationException);
}
} __DtorMocking;
| {
"pile_set_name": "Github"
} |
<testcase>
<info>
<keywords>
FTP
RETR
LIST
wildcardmatch
ftplistparser
flaky
</keywords>
</info>
#
# Server-side
<reply>
<data>
</data>
</reply>
# Client-side
<client>
<server>
ftp
</server>
<tool>
lib574
</tool>
<name>
FTP wildcard download - changed fnmatch, 2x perform (DOS LIST response)
</name>
<command>
"ftp://%HOSTIP:%FTPPORT/fully_simulated/DOS/*.txt"
</command>
</client>
############################################
# Verify data after the test has been "shot"
<verify>
<errorcode>
0
</errorcode>
# THERE SHOULD NOT BE "SIZE"! and one "USER/PASS"
<protocol>
USER anonymous
PASS [email protected]
PWD
CWD fully_simulated
CWD DOS
EPSV
TYPE A
LIST
EPSV
TYPE I
RETR chmod1
EPSV
RETR chmod2
EPSV
RETR chmod3
EPSV
RETR empty_file.dat
EPSV
RETR file.txt
EPSV
RETR someothertext.txt
CWD /
CWD fully_simulated
CWD DOS
EPSV
TYPE A
LIST
EPSV
TYPE I
RETR chmod1
EPSV
RETR chmod2
EPSV
RETR chmod3
EPSV
RETR empty_file.dat
EPSV
RETR file.txt
EPSV
RETR someothertext.txt
QUIT
</protocol>
<stdout>
This file should have permissions 444
This file should have permissions 666
This file should have permissions 777
This is content of file "file.txt"
Some junk ;-) This file does not really exist.
This file should have permissions 444
This file should have permissions 666
This file should have permissions 777
This is content of file "file.txt"
Some junk ;-) This file does not really exist.
</stdout>
</verify>
</testcase>
| {
"pile_set_name": "Github"
} |
#pragma once
#ifdef __cplusplus
extern "C"
{
#endif
int get_module_base_address(char const *module_filename, void *handle, void **base);
void *elf_hook(char const *library_filename, void const *library_address, char const *function_name, void const *substitution_address);
#ifdef __cplusplus
}
#endif
| {
"pile_set_name": "Github"
} |
{
"extends": "../../../tsconfig.json",
"compilerOptions": {
"outDir": "../build",
"module": "commonjs",
"isolatedModules": false
},
"include": ["./**/*.ts", "../src/**/*.ts"]
}
| {
"pile_set_name": "Github"
} |
package com.vaadin.v7.tests.data.converter;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import org.junit.Test;
import com.vaadin.v7.data.util.converter.Converter.ConversionException;
import com.vaadin.v7.data.util.converter.StringToIntegerConverter;
public class StringToIntegerConverterTest {
StringToIntegerConverter converter = new StringToIntegerConverter();
@Test
public void testNullConversion() {
assertEquals(null, converter.convertToModel(null, Integer.class, null));
}
@Test
public void testEmptyStringConversion() {
assertEquals(null, converter.convertToModel("", Integer.class, null));
}
@Test
public void testValueOutOfRange() {
Double[] values = { Integer.MAX_VALUE * 2.0, Integer.MIN_VALUE * 2.0,
Long.MAX_VALUE * 2.0, Long.MIN_VALUE * 2.0 };
boolean accepted = false;
for (Number value : values) {
try {
converter.convertToModel(String.format("%.0f", value),
Integer.class, null);
accepted = true;
} catch (ConversionException expected) {
}
}
assertFalse("Accepted value outside range of int", accepted);
}
@Test
public void testValueConversion() {
assertEquals(Integer.valueOf(10),
converter.convertToModel("10", Integer.class, null));
}
}
| {
"pile_set_name": "Github"
} |
/*=============================================================================
Copyright (c) 2001-2007 Joel de Guzman
Distributed under the Boost Software License, Version 1.0. (See accompanying
file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
==============================================================================*/
#if !defined(FUSION_INCLUDE_FILTER_IF)
#define FUSION_INCLUDE_FILTER_IF
#include <boost/fusion/support/config.hpp>
#include <boost/fusion/algorithm/transformation/filter_if.hpp>
#endif
| {
"pile_set_name": "Github"
} |
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`datasource/terraform-module getReleases processes real data 1`] = `
Object {
"homepage": "https://registry.terraform.io/modules/hashicorp/consul/aws",
"name": "hashicorp/consul/aws",
"releases": Array [
Object {
"version": "0.0.1",
},
Object {
"version": "0.0.2",
},
Object {
"version": "0.0.3",
},
Object {
"version": "0.0.4",
},
Object {
"version": "0.0.5",
},
Object {
"version": "0.1.0",
},
Object {
"version": "0.1.1",
},
Object {
"version": "0.1.2",
},
Object {
"version": "0.2.0",
},
Object {
"version": "0.2.1",
},
Object {
"version": "0.2.2",
},
Object {
"version": "0.3.0",
},
Object {
"version": "0.3.1",
},
Object {
"version": "0.3.2",
},
Object {
"version": "0.3.3",
},
Object {
"version": "0.3.4",
},
Object {
"version": "0.3.5",
},
Object {
"version": "0.3.6",
},
Object {
"version": "0.3.7",
},
Object {
"version": "0.3.8",
},
Object {
"version": "0.3.9",
},
Object {
"version": "0.3.10",
},
Object {
"version": "0.4.0",
},
],
"sourceUrl": "https://github.com/hashicorp/terraform-aws-consul",
"versions": Object {},
}
`;
exports[`datasource/terraform-module getReleases processes real data 2`] = `
Array [
Object {
"headers": Object {
"accept": "application/json",
"accept-encoding": "gzip, deflate",
"host": "registry.terraform.io",
"user-agent": "https://github.com/renovatebot/renovate",
},
"method": "GET",
"url": "https://registry.terraform.io/.well-known/terraform.json",
},
Object {
"headers": Object {
"accept": "application/json",
"accept-encoding": "gzip, deflate",
"host": "registry.terraform.io",
"user-agent": "https://github.com/renovatebot/renovate",
},
"method": "GET",
"url": "https://registry.terraform.io/v1/modules/hashicorp/consul/aws",
},
]
`;
exports[`datasource/terraform-module getReleases processes real data on changed subpath 1`] = `
Array [
Object {
"headers": Object {
"accept": "application/json",
"accept-encoding": "gzip, deflate",
"host": "terraform.foo.bar",
"user-agent": "https://github.com/renovatebot/renovate",
},
"method": "GET",
"url": "https://terraform.foo.bar/.well-known/terraform.json",
},
Object {
"headers": Object {
"accept": "application/json",
"accept-encoding": "gzip, deflate",
"host": "terraform.foo.bar",
"user-agent": "https://github.com/renovatebot/renovate",
},
"method": "GET",
"url": "https://terraform.foo.bar/api/registry/v1/modules/hashicorp/consul/aws",
},
]
`;
exports[`datasource/terraform-module getReleases processes real data on changed subpath 2`] = `
Object {
"name": "hashicorp/consul/aws",
"releases": Array [
Object {
"version": "0.0.1",
},
Object {
"version": "0.0.2",
},
Object {
"version": "0.0.3",
},
Object {
"version": "0.0.4",
},
Object {
"version": "0.0.5",
},
Object {
"version": "0.1.0",
},
Object {
"version": "0.1.1",
},
Object {
"version": "0.1.2",
},
Object {
"version": "0.2.0",
},
Object {
"version": "0.2.1",
},
Object {
"version": "0.2.2",
},
Object {
"version": "0.3.0",
},
Object {
"version": "0.3.1",
},
Object {
"version": "0.3.2",
},
Object {
"version": "0.3.3",
},
Object {
"version": "0.3.4",
},
Object {
"version": "0.3.5",
},
Object {
"version": "0.3.6",
},
Object {
"version": "0.3.7",
},
Object {
"version": "0.3.8",
},
Object {
"version": "0.3.9",
},
Object {
"version": "0.3.10",
},
Object {
"version": "0.4.0",
},
],
"sourceUrl": "https://github.com/hashicorp/terraform-aws-consul",
"versions": Object {},
}
`;
exports[`datasource/terraform-module getReleases processes with registry in name 1`] = `
Object {
"homepage": "https://registry.terraform.io/modules/hashicorp/consul/aws",
"name": "hashicorp/consul/aws",
"releases": Array [
Object {
"version": "0.0.1",
},
Object {
"version": "0.0.2",
},
Object {
"version": "0.0.3",
},
Object {
"version": "0.0.4",
},
Object {
"version": "0.0.5",
},
Object {
"version": "0.1.0",
},
Object {
"version": "0.1.1",
},
Object {
"version": "0.1.2",
},
Object {
"version": "0.2.0",
},
Object {
"version": "0.2.1",
},
Object {
"version": "0.2.2",
},
Object {
"version": "0.3.0",
},
Object {
"version": "0.3.1",
},
Object {
"version": "0.3.2",
},
Object {
"version": "0.3.3",
},
Object {
"version": "0.3.4",
},
Object {
"version": "0.3.5",
},
Object {
"version": "0.3.6",
},
Object {
"version": "0.3.7",
},
Object {
"version": "0.3.8",
},
Object {
"version": "0.3.9",
},
Object {
"version": "0.3.10",
},
Object {
"version": "0.4.0",
},
],
"sourceUrl": "https://github.com/hashicorp/terraform-aws-consul",
"versions": Object {},
}
`;
exports[`datasource/terraform-module getReleases processes with registry in name 2`] = `
Array [
Object {
"headers": Object {
"accept": "application/json",
"accept-encoding": "gzip, deflate",
"host": "registry.terraform.io",
"user-agent": "https://github.com/renovatebot/renovate",
},
"method": "GET",
"url": "https://registry.terraform.io/.well-known/terraform.json",
},
Object {
"headers": Object {
"accept": "application/json",
"accept-encoding": "gzip, deflate",
"host": "registry.terraform.io",
"user-agent": "https://github.com/renovatebot/renovate",
},
"method": "GET",
"url": "https://registry.terraform.io/v1/modules/hashicorp/consul/aws",
},
]
`;
exports[`datasource/terraform-module getReleases rejects mismatch 1`] = `
Array [
Object {
"headers": Object {
"accept": "application/json",
"accept-encoding": "gzip, deflate",
"host": "terraform.company.com",
"user-agent": "https://github.com/renovatebot/renovate",
},
"method": "GET",
"url": "https://terraform.company.com/.well-known/terraform.json",
},
Object {
"headers": Object {
"accept": "application/json",
"accept-encoding": "gzip, deflate",
"host": "terraform.company.com",
"user-agent": "https://github.com/renovatebot/renovate",
},
"method": "GET",
"url": "https://terraform.company.com/v1/modules/consul/foo",
},
]
`;
exports[`datasource/terraform-module getReleases rejects servicediscovery 1`] = `
Array [
Object {
"headers": Object {
"accept": "application/json",
"accept-encoding": "gzip, deflate",
"host": "terraform.company.com",
"user-agent": "https://github.com/renovatebot/renovate",
},
"method": "GET",
"url": "https://terraform.company.com/.well-known/terraform.json",
},
]
`;
exports[`datasource/terraform-module getReleases returns null for 404 1`] = `
Array [
Object {
"headers": Object {
"accept": "application/json",
"accept-encoding": "gzip, deflate",
"host": "registry.terraform.io",
"user-agent": "https://github.com/renovatebot/renovate",
},
"method": "GET",
"url": "https://registry.terraform.io/.well-known/terraform.json",
},
Object {
"headers": Object {
"accept": "application/json",
"accept-encoding": "gzip, deflate",
"host": "registry.terraform.io",
"user-agent": "https://github.com/renovatebot/renovate",
},
"method": "GET",
"url": "https://registry.terraform.io/v1/modules/hashicorp/consul/aws",
},
]
`;
exports[`datasource/terraform-module getReleases returns null for empty result 1`] = `
Array [
Object {
"headers": Object {
"accept": "application/json",
"accept-encoding": "gzip, deflate",
"host": "registry.terraform.io",
"user-agent": "https://github.com/renovatebot/renovate",
},
"method": "GET",
"url": "https://registry.terraform.io/.well-known/terraform.json",
},
Object {
"headers": Object {
"accept": "application/json",
"accept-encoding": "gzip, deflate",
"host": "registry.terraform.io",
"user-agent": "https://github.com/renovatebot/renovate",
},
"method": "GET",
"url": "https://registry.terraform.io/v1/modules/hashicorp/consul/aws",
},
]
`;
exports[`datasource/terraform-module getReleases returns null for unknown error 1`] = `
Array [
Object {
"headers": Object {
"accept": "application/json",
"accept-encoding": "gzip, deflate",
"host": "registry.terraform.io",
"user-agent": "https://github.com/renovatebot/renovate",
},
"method": "GET",
"url": "https://registry.terraform.io/.well-known/terraform.json",
},
Object {
"headers": Object {
"accept": "application/json",
"accept-encoding": "gzip, deflate",
"host": "registry.terraform.io",
"user-agent": "https://github.com/renovatebot/renovate",
},
"method": "GET",
"url": "https://registry.terraform.io/v1/modules/hashicorp/consul/aws",
},
]
`;
| {
"pile_set_name": "Github"
} |
/* Copyright (c) 2012 The Chromium Authors. All rights reserved.
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file. */
/* These additional styles constrain the size of the proxy page to the height of
the dialog containing them. */
html {
height: 100%;
}
body {
-webkit-box-sizing: border-box;
height: 100%;
margin: 0;
padding: 8px;
}
#proxyPage {
height: 100%;
}
#proxy-page-title {
/* We have a title on the window, so the title in domui should be hidden. */
display: none;
}
#internet-details-content-area > .subpages-tab-contents {
height: 100% !important;
}
#advanced-config {
padding-top: 10px;
}
html[highlight=strong] input[type='button']:focus,
html[highlight=strong] input[type='checkbox']:focus,
html[highlight=strong] input[type='radio']:focus,
html[highlight=strong] input[type='text']:focus,
html[highlight=strong] input[type='url']:focus,
html[highlight=strong] button:focus,
html[highlight=strong] list:focus {
box-shadow: 0 0 23px rgb(77, 144, 254) !important;
}
html[highlight=strong] #network-proxy-tab {
padding: 10px;
}
| {
"pile_set_name": "Github"
} |
..
This file is part of Logtalk <https://logtalk.org/>
Copyright 1998-2020 Paulo Moura <[email protected]>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
.. index:: pair: abolish_protocol/1; Built-in predicate
.. _predicates_abolish_protocol_1:
abolish_protocol/1
==================
Description
-----------
::
abolish_protocol(Protocol)
Abolishes a dynamic protocol. The protocol identifier can then be reused when creating a new protocol.
Modes and number of proofs
--------------------------
::
abolish_protocol(@protocol_identifier) - one
Errors
------
| ``Protocol`` is a variable:
| ``instantiation_error``
| ``Protocol`` is neither a variable nor a valid protocol identifier:
| ``type_error(protocol_identifier, Protocol)``
| ``Protocol`` is an identifier of a static protocol:
| ``permission_error(modify, static_protocol, Protocol)``
| ``Protocol`` does not exist:
| ``existence_error(protocol, Protocol)``
Examples
--------
::
| ?- abolish_protocol(listp).
.. seealso::
:ref:`predicates_create_protocol_3`,
:ref:`predicates_current_protocol_1`,
:ref:`predicates_protocol_property_2`,
:ref:`predicates_conforms_to_protocol_2_3`,
:ref:`predicates_extends_protocol_2_3`,
:ref:`predicates_implements_protocol_2_3`
| {
"pile_set_name": "Github"
} |
#!/usr/bin/env bash
##############################################################################
##
## Gradle start up script for UN*X
##
##############################################################################
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS=""
APP_NAME="Gradle"
APP_BASE_NAME=`basename "$0"`
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"
warn ( ) {
echo "$*"
}
die ( ) {
echo
echo "$*"
echo
exit 1
}
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
case "`uname`" in
CYGWIN* )
cygwin=true
;;
Darwin* )
darwin=true
;;
MINGW* )
msys=true
;;
esac
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
PRG="$0"
# Need this for relative symlinks.
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "$PRG"`"/$link"
fi
done
SAVED="`pwd`"
cd "`dirname \"$PRG\"`/" >/dev/null
APP_HOME="`pwd -P`"
cd "$SAVED" >/dev/null
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
else
JAVACMD="$JAVA_HOME/bin/java"
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD="java"
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then
MAX_FD_LIMIT=`ulimit -H -n`
if [ $? -eq 0 ] ; then
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
MAX_FD="$MAX_FD_LIMIT"
fi
ulimit -n $MAX_FD
if [ $? -ne 0 ] ; then
warn "Could not set maximum file descriptor limit: $MAX_FD"
fi
else
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
fi
fi
# For Darwin, add options to specify how the application appears in the dock
if $darwin; then
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
fi
# For Cygwin, switch paths to Windows format before running java
if $cygwin ; then
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
JAVACMD=`cygpath --unix "$JAVACMD"`
# We build the pattern for arguments to be converted via cygpath
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
SEP=""
for dir in $ROOTDIRSRAW ; do
ROOTDIRS="$ROOTDIRS$SEP$dir"
SEP="|"
done
OURCYGPATTERN="(^($ROOTDIRS))"
# Add a user-defined pattern to the cygpath arguments
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
fi
# Now convert the arguments - kludge to limit ourselves to /bin/sh
i=0
for arg in "$@" ; do
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
else
eval `echo args$i`="\"$arg\""
fi
i=$((i+1))
done
case $i in
(0) set -- ;;
(1) set -- "$args0" ;;
(2) set -- "$args0" "$args1" ;;
(3) set -- "$args0" "$args1" "$args2" ;;
(4) set -- "$args0" "$args1" "$args2" "$args3" ;;
(5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
(6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
(7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
(8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
(9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
esac
fi
# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules
function splitJvmOpts() {
JVM_OPTS=("$@")
}
eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS
JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME"
exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@"
| {
"pile_set_name": "Github"
} |
describe("module:ng.directive:ngTransclude", function() {
beforeEach(function() {
browser.get("./examples/example-example46/index.html");
});
it('should have transcluded', function() {
var titleElement = element(by.model('title'));
titleElement.clear();
titleElement.sendKeys('TITLE');
var textElement = element(by.model('text'));
textElement.clear();
textElement.sendKeys('TEXT');
expect(element(by.binding('title')).getText()).toEqual('TITLE');
expect(element(by.binding('text')).getText()).toEqual('TEXT');
});
});
| {
"pile_set_name": "Github"
} |
setwd(normalizePath(dirname(R.utils::commandArgs(asValues=TRUE)$"f")))
source('../../findNSourceUtils.R')
test.GLM.offset <- function(conn) {
Log.info("Importing lung.csv data...\n")
lung.hex = h2o.uploadFile(conn, locate("smalldata/glm_test/lung.csv"))
lung.hex$log_pop <- log(lung.hex$pop)
lung.sum = summary(lung.hex)
print(lung.sum)
lung.r = read.csv(locate("smalldata/glm_test/lung.csv"), header = TRUE)
lung.r = na.omit(lung.r)
Log.info(cat("H2O GLM (poisson)"))
lung.glm.h2o = h2o.glm(y = 4, x = 1:2, data = lung.hex, family = "poisson", link = "log", offset = "log_pop")
print(lung.glm.h2o)
Log.info(cat("{stats} glm (poisson)"))
lung.glm.r = glm(cases ~ city + age + offset(log(pop)), family = "poisson", data = lung.r)
checkEqualsNumeric(lung.glm.h2o@model$deviance, lung.glm.r$deviance, tolerance = 0.01)
checkEqualsNumeric(lung.glm.h2o@model$aic, lung.glm.r$aic, tolerance = 0.01)
checkEqualsNumeric(lung.glm.h2o@model$null.deviance, lung.glm.r$null.deviance, tolerance = 0.01)
checkEqualsNumeric(lung.glm.h2o@model$coefficients["city.Horsens"], lung.glm.r$coefficients["cityHorsens"], tolerance = 0.01)
checkEqualsNumeric(lung.glm.h2o@model$coefficients["city.Kolding"], lung.glm.r$coefficients["cityKolding"], tolerance = 0.01)
checkEqualsNumeric(lung.glm.h2o@model$coefficients["city.Vejle"], lung.glm.r$coefficients["cityVejle"], tolerance = 0.01)
checkEqualsNumeric(lung.glm.h2o@model$coefficients["age"], lung.glm.r$coefficients["age"], tolerance = 0.01)
checkEqualsNumeric(lung.glm.h2o@model$coefficients["Intercept"], lung.glm.r$coefficients["(Intercept)"], tolerance = 0.01)
checkEqualsNumeric(lung.glm.h2o@model$coefficients["log_pop"], 1.0, tolerance = 1e-10)
testEnd()
}
doTest("GLM offset test", test.GLM.offset) | {
"pile_set_name": "Github"
} |
/*
* Copyright (C) 2014 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ro.pippo.core.util;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.regex.PatternSyntaxException;
/**
* String utility functions to keep pippo-core small.
*/
public class StringUtils {
private StringUtils() {}
public static boolean isNullOrEmpty(String s) {
return s == null || s.trim().isEmpty();
}
public static List<String> getList(String s, String separator) {
List<String> strings = new ArrayList<>();
try {
String[] chunks = s.split(separator + "(?=([^\"]*\"[^\"]*\")*[^\"]*$)");
for (String chunk : chunks) {
chunk = chunk.trim();
if (chunk.length() > 0) {
if (chunk.charAt(0) == '"' && chunk.charAt(chunk.length() - 1) == '"') {
// strip double quotes
chunk = chunk.substring(1, chunk.length() - 1).trim();
}
strings.add(chunk);
}
}
} catch (PatternSyntaxException e) {
throw new RuntimeException(e);
}
return Collections.unmodifiableList(strings);
}
/**
* <p>Removes a substring only if it is at the start of a source string,
* otherwise returns the source string.</p>
* <p/>
* <p>A {@code null} source string will return {@code null}.
* An empty ("") source string will return the empty string.
* A {@code null} search string will return the source string.</p>
* <p/>
* <pre>
* StringUtils.removeStart(null, *) = null
* StringUtils.removeStart("", *) = ""
* StringUtils.removeStart(*, null) = *
* StringUtils.removeStart("www.domain.com", "www.") = "domain.com"
* StringUtils.removeStart("abc", "") = "abc"
* </pre>
*
* @param str the source String to search, may be null
* @param remove the String to search for and remove, may be null
* @return the substring with the string removed if found,
* {@code null} if null String input
*/
public static String removeStart(String str, String remove) {
if (isNullOrEmpty(str) || isNullOrEmpty(remove)) {
return str;
}
if (str.startsWith(remove)) {
return str.substring(remove.length());
}
return str;
}
/**
* <p>Removes a substring only if it is at the end of a source string,
* otherwise returns the source string.</p>
* <p/>
* <p>A {@code null} source string will return {@code null}.
* An empty ("") source string will return the empty string.
* A {@code null} search string will return the source string.</p>
* <p/>
* <pre>
* StringUtils.removeEnd(null, *) = null
* StringUtils.removeEnd("", *) = ""
* StringUtils.removeEnd(*, null) = *
* StringUtils.removeEnd("www.domain.com", ".com.") = "www.domain.com"
* StringUtils.removeEnd("www.domain.com", ".com") = "www.domain"
* StringUtils.removeEnd("www.domain.com", "domain") = "www.domain.com"
* StringUtils.removeEnd("abc", "") = "abc"
* </pre>
*
* @param str the source String to search, may be null
* @param remove the String to search for and remove, may be null
* @return the substring with the string removed if found,
* {@code null} if null String input
*/
public static String removeEnd(String str, String remove) {
if (isNullOrEmpty(str) || isNullOrEmpty(remove)) {
return str;
}
if (str.endsWith(remove)) {
return str.substring(0, str.length() - remove.length());
}
return str;
}
/**
* <p>Adds a substring only if the source string does not already start with the substring,
* otherwise returns the source string.</p>
* <p/>
* <p>A {@code null} source string will return {@code null}.
* An empty ("") source string will return the empty string.
* A {@code null} search string will return the source string.</p>
* <p/>
* <pre>
* StringUtils.addStart(null, *) = *
* StringUtils.addStart("", *) = *
* StringUtils.addStart(*, null) = *
* StringUtils.addStart("domain.com", "www.") = "www.domain.com"
* StringUtils.addStart("abc123", "abc") = "abc123"
* </pre>
*
* @param str the source String to search, may be null
* @param add the String to search for and add, may be null
* @return the substring with the string added if required
*/
public static String addStart(String str, String add) {
if (isNullOrEmpty(add)) {
return str;
}
if (isNullOrEmpty(str)) {
return add;
}
if (!str.startsWith(add)) {
return add + str;
}
return str;
}
/**
* <p>Adds a substring only if the source string does not already end with the substring,
* otherwise returns the source string.</p>
* <p/>
* <p>A {@code null} source string will return {@code null}.
* An empty ("") source string will return the empty string.
* A {@code null} search string will return the source string.</p>
* <p/>
* <pre>
* StringUtils.addEnd(null, *) = *
* StringUtils.addEnd("", *) = *
* StringUtils.addEnd(*, null) = *
* StringUtils.addEnd("www.", "domain.com") = "www.domain.com"
* StringUtils.addEnd("123abc", "abc") = "123abc"
* </pre>
*
* @param str the source String to search, may be null
* @param add the String to search for and add, may be null
* @return the substring with the string added if required
*/
public static String addEnd(String str, String add) {
if (isNullOrEmpty(add)) {
return str;
}
if (isNullOrEmpty(str)) {
return add;
}
if (!str.endsWith(add)) {
return str + add;
}
return str;
}
/**
* Format the string. Replace "{}" with %s and format the string using String.format
*/
public static String format(String str, Object... args) {
str = str.replaceAll("\\{}", "%s");
return String.format(str, args);
}
/**
* Returns the file extension of the value without the dot or an empty string.
*
* @param value
* @return the extension without dot or an empry string
*/
public static String getFileExtension(String value) {
int index = value.lastIndexOf('.');
if (index > -1) {
return value.substring(index + 1);
}
return "";
}
/**
* Returns the prefix of the input string from 0 to the first index of the delimiter OR it returns the input string.
*
* @param input
* @param delimiter
* @return the prefix substring or the entire input string if the delimiter is not found
*/
public static String getPrefix(String input, char delimiter) {
int index = input.indexOf(delimiter);
if (index > -1) {
return input.substring(0, index);
}
return input;
}
}
| {
"pile_set_name": "Github"
} |
{
}
| {
"pile_set_name": "Github"
} |
use educe::Educe;
use futures::{pin_mut, stream, AsyncReadExt, FutureExt, Stream, StreamExt};
use serde::{Deserialize, Serialize};
use serde_closure::FnMutNamed;
use serde_json::Error as InternalJsonError;
use std::{
error, fmt::{self, Debug, Display}, io::{self, Cursor}, marker::PhantomData
};
use amadeus_core::{
file::{File, Page, Partition}, into_par_stream::IntoDistributedStream, par_stream::DistributedStream, util::{DistParStream, ResultExpandIter}, Source
};
use super::{SerdeData, SerdeDeserialize};
#[derive(Educe)]
#[educe(Clone, Debug)]
pub struct Json<File, Row>
where
File: amadeus_core::file::File,
Row: SerdeData,
{
partitions: Vec<File::Partition>,
marker: PhantomData<fn() -> Row>,
}
impl<F, Row> Json<F, Row>
where
F: File,
Row: SerdeData,
{
pub async fn new(file: F) -> Result<Self, <Self as Source>::Error> {
Ok(Self {
partitions: file.partitions().await.map_err(JsonError::File)?,
marker: PhantomData,
})
}
}
type Error<P, E> = JsonError<E, <P as Partition>::Error, <<P as Partition>::Page as Page>::Error>;
#[cfg(not(nightly))]
type Output<P, Row, E> = std::pin::Pin<Box<dyn Stream<Item = Result<Row, Error<P, E>>>>>;
#[cfg(nightly)]
type Output<P: Partition, Row, E> = impl Stream<Item = Result<Row, Error<P, E>>>;
FnMutNamed! {
pub type Closure<P, Row, E> = |self|partition=> P| -> Output<P, Row, E>
where
P: Partition,
Row: SerdeData,
E: 'static
{
#[allow(clippy::let_and_return)]
let ret = async move {
Ok(stream::iter(
partition
.pages()
.await
.map_err(JsonError::Partition)?
.into_iter(),
)
.flat_map(|page| {
async move {
let mut buf = Vec::with_capacity(10 * 1024 * 1024);
let reader = Page::reader(page);
pin_mut!(reader);
let buf = PassError::new(
reader.read_to_end(&mut buf).await.map(|_| Cursor::new(buf)),
);
Ok(stream::iter(
serde_json::Deserializer::from_reader(buf).into_iter().map(
|x: Result<SerdeDeserialize<Row>, InternalJsonError>| Ok(x?.0),
),
))
}
.map(ResultExpandIter::new)
.flatten_stream()
})
.map(|row: Result<Result<Row, InternalJsonError>, Error<P, E>>| Ok(row??)))
}
.map(ResultExpandIter::new)
.flatten_stream()
.map(|row: Result<Result<Row, Error<P, E>>, Error<P, E>>| Ok(row??));
#[cfg(not(nightly))]
let ret = ret.boxed_local();
ret
}
}
impl<F, Row> Source for Json<F, Row>
where
F: File,
Row: SerdeData,
{
type Item = Row;
#[allow(clippy::type_complexity)]
type Error = JsonError<
F::Error,
<F::Partition as Partition>::Error,
<<F::Partition as Partition>::Page as Page>::Error,
>;
type ParStream = DistParStream<Self::DistStream>;
#[cfg(not(nightly))]
#[allow(clippy::type_complexity)]
type DistStream = amadeus_core::par_stream::FlatMap<
amadeus_core::into_par_stream::IterDistStream<std::vec::IntoIter<F::Partition>>,
Closure<F::Partition, Row, F::Error>,
>;
#[cfg(nightly)]
type DistStream = impl DistributedStream<Item = Result<Self::Item, Self::Error>>;
fn par_stream(self) -> Self::ParStream {
DistParStream::new(self.dist_stream())
}
#[allow(clippy::let_and_return)]
fn dist_stream(self) -> Self::DistStream {
self.partitions.into_dist_stream().flat_map(Closure::new())
}
}
mod jsonerror {
use serde::{Deserializer, Serializer};
pub(crate) fn serialize<T, S>(_t: &T, _serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
unimplemented!()
}
pub(crate) fn deserialize<'de, T, D>(_deserializer: D) -> Result<T, D::Error>
where
D: Deserializer<'de>,
{
unimplemented!()
}
}
#[derive(Serialize, Deserialize, Debug)]
pub enum JsonError<A, B, C> {
File(A),
Partition(B),
Page(C),
Json(#[serde(with = "jsonerror")] InternalJsonError),
}
impl<A, B, C> Clone for JsonError<A, B, C>
where
A: Clone,
B: Clone,
C: Clone,
{
fn clone(&self) -> Self {
match self {
Self::File(err) => Self::File(err.clone()),
Self::Partition(err) => Self::Partition(err.clone()),
Self::Page(err) => Self::Page(err.clone()),
Self::Json(err) => Self::Json(serde::de::Error::custom(err)),
}
}
}
impl<A, B, C> PartialEq for JsonError<A, B, C>
where
A: PartialEq,
B: PartialEq,
C: PartialEq,
{
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(Self::File(a), Self::File(b)) => a.eq(b),
(Self::Partition(a), Self::Partition(b)) => a.eq(b),
(Self::Page(a), Self::Page(b)) => a.eq(b),
(Self::Json(a), Self::Json(b)) => a.to_string() == b.to_string(),
_ => false,
}
}
}
impl<A, B, C> error::Error for JsonError<A, B, C>
where
A: error::Error,
B: error::Error,
C: error::Error,
{
}
impl<A, B, C> Display for JsonError<A, B, C>
where
A: Display,
B: Display,
C: Display,
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Self::File(err) => Display::fmt(err, f),
Self::Partition(err) => Display::fmt(err, f),
Self::Page(err) => Display::fmt(err, f),
Self::Json(err) => Display::fmt(err, f),
}
}
}
impl<A, B, C> From<InternalJsonError> for JsonError<A, B, C> {
fn from(err: InternalJsonError) -> Self {
Self::Json(err)
}
}
struct PassError<R>(Result<R, Option<io::Error>>);
impl<R> PassError<R> {
fn new(r: Result<R, io::Error>) -> Self {
Self(r.map_err(Some))
}
}
impl<R> io::Read for PassError<R>
where
R: io::Read,
{
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
match &mut self.0 {
Ok(r) => r.read(buf),
Err(r) => Err(r.take().unwrap()),
}
}
}
| {
"pile_set_name": "Github"
} |
/**
*
* WARNING! This file was autogenerated by:
* _ _ _ _ __ __
* | | | | | | |\ \ / /
* | | | | |_| | \ V /
* | | | | _ | / \
* | |_| | | | |/ /^\ \
* \___/\_| |_/\/ \/
*
* This file was autogenerated by UnrealHxGenerator using UHT definitions.
* It only includes UPROPERTYs and UFUNCTIONs. Do not modify it!
* In order to add more definitions, create or edit a type with the same name/package, but with an `_Extra` suffix
**/
package unreal;
/**
Properties of representation of an 'agent' (or Pawn) used by AI navigation/pathfinding.
**/
@:glueCppIncludes("Classes/AI/Navigation/NavigationTypes.h")
@:uextern @:ustruct extern class FNavAgentProperties extends unreal.FMovementProperties {
/**
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = MovementProperties)
TSubclassOf<ANavigationData> PreferredNavData;
**/
@:uproperty public var PreferredNavData : unreal.FSoftClassPath;
/**
Scale factor to apply to height of bounds when searching for navmesh to project to when nav walking
**/
@:uproperty public var NavWalkingSearchHeightScale : unreal.Float32;
/**
Step height to use, or -1 for default value from navdata's config.
**/
@:uproperty public var AgentStepHeight : unreal.Float32;
/**
Total height of the capsule used for navigation/pathfinding.
**/
@:uproperty public var AgentHeight : unreal.Float32;
/**
Radius of the capsule used for navigation/pathfinding.
**/
@:uproperty public var AgentRadius : unreal.Float32;
}
| {
"pile_set_name": "Github"
} |
################################################################
# Qwt Widget Library
# Copyright (C) 1997 Josef Wilgen
# Copyright (C) 2002 Uwe Rathmann
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the Qwt License, Version 1.0
################################################################
include( $${PWD}/../playground.pri )
!contains(QWT_CONFIG, QwtSvg) {
message(Are you trying to build Qwt with the Qt Creator as Shadow Build ?)
error(Qwt is configured without SVG support !)
}
TARGET = svgmap
QT += svg
HEADERS = \
plot.h
SOURCES = \
plot.cpp \
main.cpp
| {
"pile_set_name": "Github"
} |
{
"name": "Wingstop Restaurants, Inc.",
"displayName": "Wingstop Restaurants",
"properties": [
"wingstop.com"
]
} | {
"pile_set_name": "Github"
} |
# V1beta1VolumeNodeResources
VolumeNodeResources is a set of resource limits for scheduling of volumes.
## Properties
Name | Type | Description | Notes
------------ | ------------- | ------------- | -------------
**count** | **Integer** | Maximum number of unique volumes managed by the CSI driver that can be used on a node. A volume that is both attached and mounted on a node is considered to be used once, not twice. The same rule applies for a unique volume that is shared among multiple pods on the same node. If this field is nil, then the supported number of volumes on this node is unbounded. | [optional]
| {
"pile_set_name": "Github"
} |
module ProcessSpecs
class Daemon
def initialize(argv)
args, @input, @data, @behavior = argv
@args = Marshal.load [args].pack("H*")
@no_at_exit = false
end
def run
send @behavior
# Exit without running any at_exit handlers
exit!(0) if @no_at_exit
end
def write(data)
File.open(@data, "wb") { |f| f.puts data }
end
def daemonizing_at_exit
at_exit do
write "running at_exit"
end
@no_at_exit = true
Process.daemon
write "not running at_exit"
end
def return_value
write Process.daemon.to_s
end
def pid
parent = Process.pid
Process.daemon
daemon = Process.pid
write "#{parent}:#{daemon}"
end
def process_group
parent = Process.getpgrp
Process.daemon
daemon = Process.getpgrp
write "#{parent}:#{daemon}"
end
def daemon_at_exit
at_exit do
write "running at_exit"
end
Process.daemon
end
def stay_in_dir
Process.daemon(*@args)
write Dir.pwd
end
def keep_stdio_open_false_stdout
Process.daemon(*@args)
$stdout.write "writing to stdout"
write ""
end
def keep_stdio_open_false_stderr
Process.daemon(*@args)
$stderr.write "writing to stderr"
write ""
end
def keep_stdio_open_false_stdin
Process.daemon(*@args)
# Reading from /dev/null will return right away. If STDIN were not
# /dev/null, reading would block and the spec would hang. This is not a
# perfect way to spec the behavior but it works.
write $stdin.read
end
def keep_stdio_open_true_stdout
$stdout.reopen @data
Process.daemon(*@args)
$stdout.write "writing to stdout"
end
def keep_stdio_open_true_stderr
$stderr.reopen @data
Process.daemon(*@args)
$stderr.write "writing to stderr"
end
def keep_stdio_open_true_stdin
File.open(@input, "w") { |f| f.puts "reading from stdin" }
$stdin.reopen @input, "r"
Process.daemon(*@args)
write $stdin.read
end
def keep_stdio_open_files
file = File.open @input, "w"
Process.daemon(*@args)
write file.closed?
end
end
end
ProcessSpecs::Daemon.new(ARGV).run
| {
"pile_set_name": "Github"
} |
module.exports = {
id: 'common-textual-elements',
title: 'Common textual elements',
tag: 'p',
keywords: ['p', 'paragraph', 'text', 'textual elements', 'strong', 'bold', 'b', 'em', 'i', 'emphasis', 'italics', 'small', 'a', 'link', 'hr', 'horizontal rule', 'sub', 'subscript', 'sup', 'exponent', 'superscript', 'normalize', 'reset'],
description: `<p><strong>mini.css</strong> utilizes the ruleset of <a href="http://necolas.github.io/normalize.css/" target="_blank">Normalize.css</a> v7.0.0 to reliably deal with inconsistencies between browsers, while applying some tasteful defaults on top, such as using <a href="https://www.smashingmagazine.com/2015/11/using-system-ui-fonts-practical-guide/" target="_blank">native font stack</a> to figure out the best font for each device, setting the background and foreground colors, as well as the size of the text to <code>16px</code> and its line height to <code>1.5</code>.</p>
<p>All of the most common HTML5 elements, such as paragraphs, links, bold, small and slanted text, have been styled by default using clean, modern typography to make your pages look cool and stand out from the rest of the internet.</p>`,
example: `<p class="doc">This is a paragraph with some sample text. Did you know that the latest version of <strong class="doc">mini.css</strong> is codenamed <em class="doc">Gluon</em>? Well, now you do!</p><hr class="doc"/><p><small class="doc">Remember that <strong class="doc">mini.css</strong> is totally free, no fine print involved!</small></p>`,
samples: [
`<pre><span class="code-line"><span class="highlight-a"><p></span>This is a paragraph. with some <span class="highlight-a"><strong></span>bold text<span class="highlight-a"></strong></span> and some <span class="highlight-a"><em></span>italics text<span class="highlight-a"></em></span>.<span class="highlight-a"></p></span></span>
<span class="code-line"><span class="highlight-a"><a</span> <span class="highlight-b">href</span>=<span class="highlight-c">"#"</span><span class="highlight-a">></span>This is a link.<span class="highlight-a"></a></span></span>
<span class="code-line"><span class="highlight-a"><small></span>This is some small text.<span class="highlight-a"></small></span></span>
<span class="code-line"><span class="highlight-a"><sub></span>Subscript<span class="highlight-a"></sub></span></span>
<span class="code-line"><span class="highlight-a"><sup></span>Superscript<span class="highlight-a"></sup></span></span>
<span class="code-line"><span class="highlight-a"><hr/></span></span></pre>`
],
notes: [
],
customization: [
`Text color can be changed globally by changing the value of the <code>--fore-color</code> variable.`,
`Background color can be changed globally by changing the value of the <code>--back-color</code> variable.`,
`Border color can be changed globally by changing the value of the <code>--border-color</code> variable. This affects the color of <code><hr></code> elements.`,
`Universal margin for elements can be changed globally by changing the value of the <code>--universal-margin</code> variable. As a rule of thumb, consider the universal margin to be the distance you want your paragraphs to have from the sides of the screen.`,
`Universal padding for elements can be changed globally by changing the value of the <code>--universal-padding</code> variable.`,
`You can change the color of links by changing the values of the <code>--a-link-color</code> and <code>--a-visited-color</code> variables.`
],
modifiers: [],
dos: [],
donts: [
{description: `Avoid altering the base font size of <code>16px</code> directly in your CSS code, as it can cause problems with the display of certain elements.`,
sample:
`<pre><span class="code-line"><span class="highlight-c">/* Do not do this (use Sass instead) */</span></span>
<span class="code-line"><span class="highlight-a">html</span> {</span>
<span class="code-line"> font-size: <span class="highlight-b">14px</span>;</span>
<span class="code-line">}</span></pre>`
}
]
}
| {
"pile_set_name": "Github"
} |
package com.sourcemaking.state.first_example.after;
import java.io.*;
interface State {
void pull(CeilingFanPullChain wrapper);
}
class CeilingFanPullChain {
private State currentState;
public CeilingFanPullChain() {
currentState = new Off();
}
public void set_state(State s) {
currentState = s;
}
public void pull() {
currentState.pull(this);
}
}
class Off implements State {
public void pull(CeilingFanPullChain wrapper) {
wrapper.set_state(new Low());
System.out.println("low speed");
}
}
class Low implements State {
public void pull(CeilingFanPullChain wrapper) {
wrapper.set_state(new Medium());
System.out.println("medium speed");
}
}
class Medium implements State {
public void pull(CeilingFanPullChain wrapper) {
wrapper.set_state(new High());
System.out.println("high speed");
}
}
class High implements State {
public void pull(CeilingFanPullChain wrapper) {
wrapper.set_state(new Off());
System.out.println("turning off");
}
}
public class StateDemo {
public static void main(String[] args) {
CeilingFanPullChain chain = new CeilingFanPullChain();
while (true) {
System.out.print("Press ENTER");
getLine();
chain.pull();
}
}
static String getLine() {
BufferedReader in = new BufferedReader(new InputStreamReader(System.in));
String line = null;
try {
line = in.readLine();
} catch (IOException ex) {
ex.printStackTrace();
}
return line;
}
}
| {
"pile_set_name": "Github"
} |
# DESCRIPTION
# ENDDESCRIPTION
##\{ textbook_ref_exact("Holt Linear Algebra", "2.1","66") \}
## DBsubject(Linear algebra)
## DBchapter(Euclidean spaces)
## DBsection(Vectors)
## Institution(Saint Louis University)
## Author(Mike May)
## Level(2)
## TitleText1('Linear Algebra')
## AuthorText1('Holt')
## EditionText1('')
## Section1('2.1')
## Problem1('6')
## KEYWORDS('subspaces')
DOCUMENT();
loadMacros(
"PGstandard.pl",
"PGchoicemacros.pl",
"parserPopUp.pl",
"MathObjects.pl",
"freemanMacros.pl",
"PGcourse.pl"
);
# make sure we're in the context we want
Context("Numeric");
$v11=random(1,9,1)*random(-1,1,2);
$v12=random(1,9,1)*random(-1,1,2);
$v21=random(1,9,1)*random(-1,1,2);
$v22=random(1,9,1)*random(-1,1,2);
# the arguments of PopUp are [list of answers],
# correct answer
$mc = new_multiple_choice();
$mc->qa(
"",
"A vector can have positive or negative components. A scalar can be positive or negative."
);
$mc->extra(
"A vector can have positive or negative components. A scalar must be positive.",
"A vector can have positive or negative components. A scalar must be negative.",
"A vector can only have nonnegative components. A scalar can be positive or negative.",
"A vector can only have nonnegative components. A scalar must be nonnegative.",
);
$mc->makeLast("none of the above");
TEXT(beginproblem());
$showPartialCorrectAnswers = 0;
Context()->texStrings;
BEGIN_TEXT
\{ textbook_ref_exact("Holt Linear Algebra", "2.1","66") \}
$PAR
$PAR
Select the best statement.
$BR
\{ $mc->print_a() \}
END_TEXT
Context()->normalStrings;
$showPartialCorrectAnswers = 0;
ANS( radio_cmp( $mc->correct_ans() ) );
Context()->texStrings;
SOLUTION(EV3(<<'END_SOLUTION'));
$PAR SOLUTION $PAR
A vector can have positive or negative components. A scalar can be positive or negative.
END_SOLUTION
Context()->normalStrings;
ENDDOCUMENT();
| {
"pile_set_name": "Github"
} |
<section class="sc new">
<h4>Character Key Shortcuts</h4>
<p class="conformance-level">A</p>
<p class="change">New</p>
<p>If a <a>keyboard shortcut</a> is implemented in content using only letter (including upper- and lower-case letters), punctuation, number, or symbol characters, then at least one of the following is true:</p>
<dl>
<dt>Turn off</dt>
<dd>A <a>mechanism</a> is available to turn the shortcut off;</dd>
<dt>Remap</dt>
<dd>A mechanism is available to remap the shortcut to use one or more non-printable keyboard characters (e.g. Ctrl, Alt, etc);</dd>
<dt>Active only on focus</dt>
<dd>The keyboard shortcut for a <a>user interface component</a> is only active when that component has focus.</dd>
</dl>
</section>
| {
"pile_set_name": "Github"
} |
package data
import (
"fmt"
"github.com/docker/go/canonical/json"
)
// SignedRoot is a fully unpacked root.json
type SignedRoot struct {
Signatures []Signature
Signed Root
Dirty bool
}
// Root is the Signed component of a root.json
type Root struct {
SignedCommon
Keys Keys `json:"keys"`
Roles map[RoleName]*RootRole `json:"roles"`
ConsistentSnapshot bool `json:"consistent_snapshot"`
}
// isValidRootStructure returns an error, or nil, depending on whether the content of the struct
// is valid for root metadata. This does not check signatures or expiry, just that
// the metadata content is valid.
func isValidRootStructure(r Root) error {
expectedType := TUFTypes[CanonicalRootRole]
if r.Type != expectedType {
return ErrInvalidMetadata{
role: CanonicalRootRole, msg: fmt.Sprintf("expected type %s, not %s", expectedType, r.Type)}
}
if r.Version < 1 {
return ErrInvalidMetadata{
role: CanonicalRootRole, msg: "version cannot be less than 1"}
}
// all the base roles MUST appear in the root.json - other roles are allowed,
// but other than the mirror role (not currently supported) are out of spec
for _, roleName := range BaseRoles {
roleObj, ok := r.Roles[roleName]
if !ok || roleObj == nil {
return ErrInvalidMetadata{
role: CanonicalRootRole, msg: fmt.Sprintf("missing %s role specification", roleName)}
}
if err := isValidRootRoleStructure(CanonicalRootRole, roleName, *roleObj, r.Keys); err != nil {
return err
}
}
return nil
}
func isValidRootRoleStructure(metaContainingRole, rootRoleName RoleName, r RootRole, validKeys Keys) error {
if r.Threshold < 1 {
return ErrInvalidMetadata{
role: metaContainingRole,
msg: fmt.Sprintf("invalid threshold specified for %s: %v ", rootRoleName, r.Threshold),
}
}
for _, keyID := range r.KeyIDs {
if _, ok := validKeys[keyID]; !ok {
return ErrInvalidMetadata{
role: metaContainingRole,
msg: fmt.Sprintf("key ID %s specified in %s without corresponding key", keyID, rootRoleName),
}
}
}
return nil
}
// NewRoot initializes a new SignedRoot with a set of keys, roles, and the consistent flag
func NewRoot(keys map[string]PublicKey, roles map[RoleName]*RootRole, consistent bool) (*SignedRoot, error) {
signedRoot := &SignedRoot{
Signatures: make([]Signature, 0),
Signed: Root{
SignedCommon: SignedCommon{
Type: TUFTypes[CanonicalRootRole],
Version: 0,
Expires: DefaultExpires(CanonicalRootRole),
},
Keys: keys,
Roles: roles,
ConsistentSnapshot: consistent,
},
Dirty: true,
}
return signedRoot, nil
}
// BuildBaseRole returns a copy of a BaseRole using the information in this SignedRoot for the specified role name.
// Will error for invalid role name or key metadata within this SignedRoot
func (r SignedRoot) BuildBaseRole(roleName RoleName) (BaseRole, error) {
roleData, ok := r.Signed.Roles[roleName]
if !ok {
return BaseRole{}, ErrInvalidRole{Role: roleName, Reason: "role not found in root file"}
}
// Get all public keys for the base role from TUF metadata
keyIDs := roleData.KeyIDs
pubKeys := make(map[string]PublicKey)
for _, keyID := range keyIDs {
pubKey, ok := r.Signed.Keys[keyID]
if !ok {
return BaseRole{}, ErrInvalidRole{
Role: roleName,
Reason: fmt.Sprintf("key with ID %s was not found in root metadata", keyID),
}
}
pubKeys[keyID] = pubKey
}
return BaseRole{
Name: roleName,
Keys: pubKeys,
Threshold: roleData.Threshold,
}, nil
}
// ToSigned partially serializes a SignedRoot for further signing
func (r SignedRoot) ToSigned() (*Signed, error) {
s, err := defaultSerializer.MarshalCanonical(r.Signed)
if err != nil {
return nil, err
}
// cast into a json.RawMessage
signed := json.RawMessage{}
err = signed.UnmarshalJSON(s)
if err != nil {
return nil, err
}
sigs := make([]Signature, len(r.Signatures))
copy(sigs, r.Signatures)
return &Signed{
Signatures: sigs,
Signed: &signed,
}, nil
}
// MarshalJSON returns the serialized form of SignedRoot as bytes
func (r SignedRoot) MarshalJSON() ([]byte, error) {
signed, err := r.ToSigned()
if err != nil {
return nil, err
}
return defaultSerializer.Marshal(signed)
}
// RootFromSigned fully unpacks a Signed object into a SignedRoot and ensures
// that it is a valid SignedRoot
func RootFromSigned(s *Signed) (*SignedRoot, error) {
r := Root{}
if s.Signed == nil {
return nil, ErrInvalidMetadata{
role: CanonicalRootRole,
msg: "root file contained an empty payload",
}
}
if err := defaultSerializer.Unmarshal(*s.Signed, &r); err != nil {
return nil, err
}
if err := isValidRootStructure(r); err != nil {
return nil, err
}
sigs := make([]Signature, len(s.Signatures))
copy(sigs, s.Signatures)
return &SignedRoot{
Signatures: sigs,
Signed: r,
}, nil
}
| {
"pile_set_name": "Github"
} |
// Copyright 2019 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
let m = import('modules-skip-1.mjs');
let m_namespace = await m;
export function life() {
return m_namespace.life();
}
| {
"pile_set_name": "Github"
} |
package tflite
/*
#ifndef GO_TFLITE_H
#include "tflite.go.h"
#endif
#cgo LDFLAGS: -ltensorflowlite_c
#cgo linux LDFLAGS: -ldl -lrt
*/
import "C"
import (
"reflect"
"unsafe"
"github.com/mattn/go-pointer"
"github.com/mattn/go-tflite/delegates"
)
//go:generate stringer -type TensorType,Status -output type_string.go .
// Model is TfLiteModel.
type Model struct {
m *C.TfLiteModel
}
// NewModel create new Model from buffer.
func NewModel(model_data []byte) *Model {
m := C.TfLiteModelCreate(C.CBytes(model_data), C.size_t(len(model_data)))
if m == nil {
return nil
}
return &Model{m: m}
}
// NewModelFromFile create new Model from file data.
func NewModelFromFile(model_path string) *Model {
ptr := C.CString(model_path)
defer C.free(unsafe.Pointer(ptr))
m := C.TfLiteModelCreateFromFile(ptr)
if m == nil {
return nil
}
return &Model{m: m}
}
// Delete delete instance of model.
func (m *Model) Delete() {
if m != nil {
C.TfLiteModelDelete(m.m)
}
}
// InterpreterOptions implement TfLiteInterpreterOptions.
type InterpreterOptions struct {
o *C.TfLiteInterpreterOptions
}
// NewInterpreterOptions create new InterpreterOptions.
func NewInterpreterOptions() *InterpreterOptions {
o := C.TfLiteInterpreterOptionsCreate()
if o == nil {
return nil
}
return &InterpreterOptions{o: o}
}
// SetNumThread set number of threads.
func (o *InterpreterOptions) SetNumThread(num_threads int) {
C.TfLiteInterpreterOptionsSetNumThreads(o.o, C.int32_t(num_threads))
}
// SetErrorRepoter set a function of reporter.
func (o *InterpreterOptions) SetErrorReporter(f func(string, interface{}), user_data interface{}) {
C._TfLiteInterpreterOptionsSetErrorReporter(o.o, pointer.Save(&callbackInfo{
user_data: user_data,
f: f,
}))
}
func (o *InterpreterOptions) AddDelegate(d delegates.Delegater) {
C.TfLiteInterpreterOptionsAddDelegate(o.o, (*C.TfLiteDelegate)(d.Ptr()))
}
// Delete delete instance of InterpreterOptions.
func (o *InterpreterOptions) Delete() {
if o != nil {
C.TfLiteInterpreterOptionsDelete(o.o)
}
}
// Interpreter implement TfLiteInterpreter.
type Interpreter struct {
i *C.TfLiteInterpreter
}
// NewInterpreter create new Interpreter.
func NewInterpreter(model *Model, options *InterpreterOptions) *Interpreter {
var o *C.TfLiteInterpreterOptions
if options != nil {
o = options.o
}
i := C.TfLiteInterpreterCreate(model.m, o)
if i == nil {
return nil
}
return &Interpreter{i: i}
}
// Delete delete instance of Interpreter.
func (i *Interpreter) Delete() {
if i != nil {
C.TfLiteInterpreterDelete(i.i)
}
}
// Tensor implement TfLiteTensor.
type Tensor struct {
t *C.TfLiteTensor
}
// GetInputTensorCount return number of input tensors.
func (i *Interpreter) GetInputTensorCount() int {
return int(C.TfLiteInterpreterGetInputTensorCount(i.i))
}
// GetInputTensor return input tensor specified by index.
func (i *Interpreter) GetInputTensor(index int) *Tensor {
t := C.TfLiteInterpreterGetInputTensor(i.i, C.int32_t(index))
if t == nil {
return nil
}
return &Tensor{t: t}
}
// State implement TfLiteStatus.
type Status int
const (
OK Status = 0
Error
)
// ResizeInputTensor resize the tensor specified by index with dims.
func (i *Interpreter) ResizeInputTensor(index int, dims []int32) Status {
s := C.TfLiteInterpreterResizeInputTensor(i.i, C.int32_t(index), (*C.int32_t)(unsafe.Pointer(&dims[0])), C.int32_t(len(dims)))
return Status(s)
}
// AllocateTensor allocate tensors for the interpreter.
func (i *Interpreter) AllocateTensors() Status {
if i != nil {
s := C.TfLiteInterpreterAllocateTensors(i.i)
return Status(s)
}
return Error
}
// Invoke invoke the task.
func (i *Interpreter) Invoke() Status {
s := C.TfLiteInterpreterInvoke(i.i)
return Status(s)
}
// GetOutputTensorCount return number of output tensors.
func (i *Interpreter) GetOutputTensorCount() int {
return int(C.TfLiteInterpreterGetOutputTensorCount(i.i))
}
// GetOutputTensor return output tensor specified by index.
func (i *Interpreter) GetOutputTensor(index int) *Tensor {
t := C.TfLiteInterpreterGetOutputTensor(i.i, C.int32_t(index))
if t == nil {
return nil
}
return &Tensor{t: t}
}
// TensorType is types of the tensor.
type TensorType int
const (
NoType TensorType = 0
Float32 TensorType = 1
Int32 TensorType = 2
UInt8 TensorType = 3
Int64 TensorType = 4
String TensorType = 5
Bool TensorType = 6
Int16 TensorType = 7
Complex64 TensorType = 8
Int8 TensorType = 9
)
// Type return TensorType.
func (t *Tensor) Type() TensorType {
return TensorType(C.TfLiteTensorType(t.t))
}
// NumDims return number of dimensions.
func (t *Tensor) NumDims() int {
return int(C.TfLiteTensorNumDims(t.t))
}
// Dim return dimension of the element specified by index.
func (t *Tensor) Dim(index int) int {
return int(C.TfLiteTensorDim(t.t, C.int32_t(index)))
}
// Shape return shape of the tensor.
func (t *Tensor) Shape() []int {
shape := make([]int, t.NumDims())
for i := 0; i < t.NumDims(); i++ {
shape[i] = t.Dim(i)
}
return shape
}
// ByteSize return byte size of the tensor.
func (t *Tensor) ByteSize() uint {
return uint(C.TfLiteTensorByteSize(t.t))
}
// Data return pointer of buffer.
func (t *Tensor) Data() unsafe.Pointer {
return C.TfLiteTensorData(t.t)
}
// Name return name of the tensor.
func (t *Tensor) Name() string {
return C.GoString(C.TfLiteTensorName(t.t))
}
// QuantizationParams implement TfLiteQuantizationParams.
type QuantizationParams struct {
Scale float64
ZeroPoint int
}
// QuantizationParams return quantization parameters of the tensor.
func (t *Tensor) QuantizationParams() QuantizationParams {
q := C.TfLiteTensorQuantizationParams(t.t)
return QuantizationParams{
Scale: float64(q.scale),
ZeroPoint: int(q.zero_point),
}
}
// CopyFromBuffer write buffer to the tensor.
func (t *Tensor) CopyFromBuffer(b interface{}) Status {
return Status(C.TfLiteTensorCopyFromBuffer(t.t, unsafe.Pointer(reflect.ValueOf(b).Pointer()), C.size_t(t.ByteSize())))
}
// CopyToBuffer write buffer from the tensor.
func (t *Tensor) CopyToBuffer(b interface{}) Status {
return Status(C.TfLiteTensorCopyToBuffer(t.t, unsafe.Pointer(reflect.ValueOf(b).Pointer()), C.size_t(t.ByteSize())))
}
| {
"pile_set_name": "Github"
} |
---
title: 如何:对工作流和工作流服务启用 SQL 持久性
ms.date: 03/30/2017
ms.assetid: ca7bf77f-3e5d-4b23-b17a-d0b60f46411d
ms.openlocfilehash: 5bcd37a654db35ba6e8af1b15d6c132a090b0579
ms.sourcegitcommit: 27a15a55019f6b5f2733961738babe94aec0def3
ms.translationtype: MT
ms.contentlocale: zh-CN
ms.lasthandoff: 09/15/2020
ms.locfileid: "90547748"
---
# <a name="how-to-enable-sql-persistence-for-workflows-and-workflow-services"></a>如何:对工作流和工作流服务启用 SQL 持久性
本主题介绍如何通过编程方式以及使用配置文件来配置 SQL 工作流实例存储功能,以便为工作流和工作流服务启用持久性。
Windows Server App Fabric 大大简化了配置持久性的过程。 有关详细信息,请参阅 [应用构造持久性配置](/previous-versions/appfabric/ee790848(v=azure.10))。
使用 SQL 工作流实例存储功能之前,创建一个数据库以供该功能用来持久保存工作流实例。 [!INCLUDE[netfx_current_short](../../../includes/netfx-current-short-md.md)] 安装程序将与 SQL 工作流实例存储功能相关联的 SQL 脚本文件复制到 %WINDIR%\Microsoft.NET\Framework\v4.xxx\SQL\EN 文件夹。 针对您希望 SQL 工作流实例存储用于持久保存工作流实例的 SQL Server 2005 或 SQL Server 2008 数据库,运行这些脚本文件。 首先运行 SqlWorkflowInstanceStoreSchema.sql 文件,之后运行 SqlWorkflowInstanceStoreLogic.sql 文件。
> [!NOTE]
> 若要清除此持久性数据库以获得一个全新的数据库,请按下列顺序运行 %WINDIR%\Microsoft.NET\Framework\v4.xxx\SQL\EN 中的脚本。
>
> 1. SqlWorkflowInstanceStoreSchema.sql
> 2. SqlWorkflowInstanceStoreLogic.sql
> [!IMPORTANT]
> 如果不创建持久性数据库,则当宿主尝试持久保存工作流时,SQL 工作流实例存储功能将引发与以下异常类似的异常。
>
> System.Data.SqlClient.SqlException: 找不到存储过程“System.Activities.DurableInstancing.CreateLockOwner”
下面介绍如何使用 SQL 工作流实例存储来为工作流与工作流服务启用持久性。 有关 SQL 工作流实例存储的属性的详细信息,请参阅 [Sql 工作流实例存储的属性](properties-of-sql-workflow-instance-store.md)。
## <a name="enabling-persistence-for-self-hosted-workflows-that-use-workflowapplication"></a>为使用 WorkflowApplication 的自承载工作流启用持久性
可通过使用 <xref:System.Activities.WorkflowApplication> 对象模型为以编程方式使用 <xref:System.Activities.DurableInstancing.SqlWorkflowInstanceStore> 的自承载工作流启用持久性。 以下过程包含了执行上述操作的步骤。
#### <a name="to-enable-persistence-for-self-hosted-workflows"></a>为自承载工作流启用持久性
1. 添加对 System.Activities.DurableInstancing.dll 的引用。
2. 将以下语句添加到源文件顶部的现有“using”语句后面。
```csharp
using System.Activities.DurableInstancing;
```
3. 如下面的代码示例中所示,构造一个 <xref:System.Activities.DurableInstancing.SqlWorkflowInstanceStore>,并将其分配给 <xref:System.Activities.WorkflowApplication.InstanceStore%2A> 的 <xref:System.Activities.WorkflowApplication>。
```csharp
SqlWorkflowInstanceStore store =
new SqlWorkflowInstanceStore("Server=.\\SQLEXPRESS;Initial Catalog=Persistence;Integrated Security=SSPI");
WorkflowApplication wfApp =
new WorkflowApplication(new Workflow1());
wfApp.InstanceStore = store;
```
> [!NOTE]
> 根据您的 SQL Server 版本,该连接字符串服务器的名称可能有所不同。
4. 对 <xref:System.Activities.WorkflowApplication.Persist%2A> 对象调用 <xref:System.Activities.WorkflowApplication> 方法以持久保存工作流,或者调用 <xref:System.Activities.WorkflowApplication.Unload%2A> 方法以持久保存并卸载工作流。 还可以处理由 <xref:System.Activities.WorkflowApplication.PersistableIdle%2A> 对象引发的 <xref:System.Activities.WorkflowApplication> 事件,并返回 <xref:System.Activities.PersistableIdleAction.Persist> 的适当的(<xref:System.Activities.PersistableIdleAction.Unload> 或 <xref:System.Activities.PersistableIdleAction>)成员。
```csharp
wfApp.PersistableIdle = delegate(WorkflowApplicationIdleEventArgs e)
{
return PersistableIdleAction.Persist;
};
```
> [!NOTE]
> 有关分步说明,请参阅[入门教程](getting-started-tutorial.md)中的 "[如何:创建和运行长时间运行的工作流](how-to-create-and-run-a-long-running-workflow.md)" 步骤。
## <a name="enabling-persistence-for-self-hosted-workflow-services-that-use-the-workflowservicehost"></a>为使用 WorkflowServiceHost 的自承载工作流服务启用持久性
可通过使用 <xref:System.ServiceModel.WorkflowServiceHost> 类或者 <xref:System.ServiceModel.Activities.Description.SqlWorkflowInstanceStoreBehavior> 类为以编程方式使用 <xref:System.ServiceModel.Activities.WorkflowServiceHost.DurableInstancingOptions%2A> 的自承载工作流服务启用持久性。
### <a name="using-the-sqlworkflowinstancestorebehavior-class"></a>使用 SqlWorkflowInstanceStoreBehavior 类
以下过程包含使用 <xref:System.ServiceModel.Activities.Description.SqlWorkflowInstanceStoreBehavior> 类来为自承载工作流服务启用持久性的步骤。
#### <a name="to-enable-persistence-using-sqlworkflowinstancestorebehavior"></a>使用 SqlWorkflowInstanceStoreBehavior 启用持久性
1. 添加对 System.ServiceModel.dll 的引用。
2. 将以下语句添加到源文件顶部的现有“using”语句后面。
```csharp
using System.ServiceModel.Activities.Description;
```
3. 创建一个 `WorkflowServiceHost` 实例,并为该工作流服务添加终结点。
```csharp
WorkflowServiceHost host = new WorkflowServiceHost(new CountingWorkflow(), new Uri(hostBaseAddress));
host.AddServiceEndpoint("ICountingWorkflow", new BasicHttpBinding(), "");
```
4. 构造一个 `SqlWorkflowInstanceStoreBehavior` 对象,并设置该行为对象的属性。
```csharp
SqlWorkflowInstanceStoreBehavior instanceStoreBehavior = new SqlWorkflowInstanceStoreBehavior(connectionString);
instanceStoreBehavior.HostLockRenewalPeriod = new TimeSpan(0, 0, 5);
instanceStoreBehavior.InstanceCompletionAction = InstanceCompletionAction.DeleteAll;
instanceStoreBehavior.InstanceLockedExceptionAction = InstanceLockedExceptionAction.AggressiveRetry;
instanceStoreBehavior.InstanceEncodingOption = InstanceEncodingOption.GZip;
instanceStoreBehavior.RunnableInstancesDetectionPeriod = new TimeSpan("00:00:02");
host.Description.Behaviors.Add(instanceStoreBehavior);
```
5. 打开工作流服务主机。
```csharp
host.Open();
```
### <a name="using-the-durableinstancingoptions-property"></a>使用 DurableInstancingOptions 属性
应用 `SqlWorkflowInstanceStoreBehavior` 时,`DurableInstancingOptions.InstanceStore` 上的 `WorkflowServiceHost` 设置为使用配置值创建的 `SqlWorkflowInstanceStore` 对象。 如下面的代码示例中所示,你可以通过编程方式设置 <xref:System.ServiceModel.Activities.WorkflowServiceHost.DurableInstancingOptions%2A> 的 `WorkflowServiceHost` 属性(而不使用 `SqlWorkflowInstanceStoreBehavior` 类)来达到相同目的。
```csharp
workflowServiceHost.DurableInstancingOptions.InstanceStore = sqlInstanceStoreObject;
```
## <a name="enabling-persistence-for-was-hosted-workflow-services-that-use-the-workflowservicehost-using-a-configuration-file"></a>使用配置文件为使用 WorkflowServiceHost 的 WAS 承载工作流服务启用持久性
可使用配置文件为自承载或 Windows 进程激活服务 (WAS) 承载的工作流服务启用持久性。 WAS 承载的工作流服务与自承载工作流服务一样,都使用 WorkflowServiceHost。
`SqlWorkflowInstanceStoreBehavior`,一种服务行为,可用于通过 XML 配置方便地更改[SQL 工作流实例存储](sql-workflow-instance-store.md)属性。 对于 WAS 承载的工作流服务,请使用 Web.config 文件。 下面的配置示例演示如何使用配置文件中的 `sqlWorkflowInstanceStore` 行为元素来配置 SQL 工作流实例存储。
```xml
<serviceBehaviors>
<behavior name="">
<sqlWorkflowInstanceStore
connectionString="Data Source=(local);Initial Catalog=DefaultPersistenceProviderDb;Integrated Security=True;Async=true"
instanceEncodingOption="GZip | None"
instanceCompletionAction="DeleteAll | DeleteNothing"
instanceLockedExceptionAction="NoRetry | BasicRetry |AggressiveRetry"
hostLockRenewalPeriod="00:00:30"
runnableInstancesDetectionPeriod="00:00:05" />
</behavior>
</serviceBehaviors>
```
如果没有设置 `connectionString` 或 `connectionStringName` 属性的值,则 SQL 工作流实例存储将使用默认命名连接字符串 `DefaultSqlWorkflowInstanceStoreConnectionString`。
应用 `SqlWorkflowInstanceStoreBehavior` 时,`DurableInstancingOptions.InstanceStore` 上的 `WorkflowServiceHost` 设置为使用配置值创建的 `SqlWorkflowInstanceStore` 对象。 可以通过编程方式将 `SqlWorkflowInstanceStore` 与 `WorkflowServiceHost` 一起使用,而不使用服务行为元素来达到相同目的。
```csharp
workflowServiceHost.DurableInstancingOptions.InstanceStore = sqlInstanceStoreObject;
```
> [!IMPORTANT]
> 建议不要在 Web.config 文件中存储敏感信息,如用户名和密码。 如果在 Web.config 文件中存储了敏感信息,应使用文件系统访问控制列表 (ACL) 来确保安全访问 Web.config 文件。 此外,还可以在 [使用受保护配置加密配置信息](/previous-versions/aspnet/53tyfkaw(v=vs.100))中所述,保护配置文件中的配置值。
### <a name="machineconfig-elements-related-to-the-sql-workflow-instance-store-feature"></a>与 SQL 工作流实例存储功能相关的 Machine.config 元素
[!INCLUDE[netfx_current_short](../../../includes/netfx-current-short-md.md)] 安装将以下与 SQL 工作流实例存储功能相关的元素添加到 Machine.config 文件中:
- 将以下行为扩展元素添加到 Machine.config 文件中,以便可以使用 \<sqlWorkflowInstanceStore> 配置文件中的服务行为元素来为服务配置持久性。
```xml
<configuration>
<system.serviceModel>
<extensions>
<behaviorExtensions>
<add name="sqlWorkflowInstanceStore" type="System.Activities.DurableInstancing.SqlWorkflowInstanceStoreElement, System.Activities.DurableInstancing, Version=4.0.0.0, Culture=neutral, PublicKeyToken=31bf3856ad364e35" />
</behaviorExtensions>
</extensions>
</system.serviceModel>
</configuration>
```
| {
"pile_set_name": "Github"
} |
'use strict'
const path = require('path')
const config = require('../config')
// extract-text-webpack-plugin可以提取bundle中的特定文本,将提取后的文本单独存放到另外的文件
// 这里用来提取css样式
const ExtractTextPlugin = require('extract-text-webpack-plugin')
const packageConfig = require('../package.json')
// 资源文件的存放路径
exports.assetsPath = function(_path) {
const assetsSubDirectory = process.env.NODE_ENV === 'production' ?
config.build.assetsSubDirectory :
config.dev.assetsSubDirectory
return path.posix.join(assetsSubDirectory, _path)
}
// 生成css、sass、scss等各种用来编写样式的语言所对应的loader配置
exports.cssLoaders = function(options) {
options = options || {}
const cssLoader = {
loader: 'css-loader',
options: {
sourceMap: options.sourceMap
}
}
const postcssLoader = {
loader: 'postcss-loader',
options: {
sourceMap: options.sourceMap
}
}
// generate loader string to be used with extract text plugin
// 生成各种loader配置,并且配置了extract-text-pulgin
function generateLoaders(loader, loaderOptions) {
const loaders = options.usePostCSS ? [cssLoader, postcssLoader] : [cssLoader]
// 如果非css,则增加一个处理预编译语言的loader并设好相关配置属性
// 例如generateLoaders('less'),这里就会push一个less-loader
// less-loader先将less编译成css,然后再由css-loader去处理css
// 其他sass、scss等语言也是一样的过程
if (loader) {
loaders.push({
loader: loader + '-loader',
options: Object.assign({}, loaderOptions, {
sourceMap: options.sourceMap
})
})
}
// Extract CSS when that option is specified
// (which is the case during production build)
if (options.extract) {
// 配置extract-text-plugin提取样式
return ExtractTextPlugin.extract({
use: loaders,
fallback: 'vue-style-loader'
})
} else {
// 无需提取样式则简单使用vue-style-loader配合各种样式loader去处理<style>里面的样式
return ['vue-style-loader'].concat(loaders)
}
}
// https://vue-loader.vuejs.org/en/configurations/extract-css.html
// 得到各种不同处理样式的语言所对应的loader
return {
css: generateLoaders(),
postcss: generateLoaders(),
less: generateLoaders('less'),
sass: generateLoaders('sass', { indentedSyntax: true }),
scss: generateLoaders('sass'),
stylus: generateLoaders('stylus'),
styl: generateLoaders('stylus')
}
}
// Generate loaders for standalone style files (outside of .vue)
// 生成处理单独的.css、.sass、.scss等样式文件的规则
exports.styleLoaders = function(options) {
const output = []
const loaders = exports.cssLoaders(options)
for (const extension in loaders) {
const loader = loaders[extension]
output.push({
test: new RegExp('\\.' + extension + '$'),
use: loader
})
}
return output
}
// 报错通知
exports.createNotifierCallback = () => {
const notifier = require('node-notifier')
return (severity, errors) => {
if (severity !== 'error') return
const error = errors[0]
const filename = error.file && error.file.split('!').pop()
notifier.notify({
title: packageConfig.name,
message: severity + ': ' + error.name,
subtitle: filename || '',
icon: path.join(__dirname, 'logo.png')
})
}
}
| {
"pile_set_name": "Github"
} |
using System;
using System.Collections.Generic;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Web;
using System.Web.UI;
using System.Web.UI.HtmlControls;
using Microsoft.AspNet.FriendlyUrls;
using System.Web.UI.WebControls;
using BugNET.BLL;
using BugNET.Entities;
using BugNET.Common;
using BugNET.UserInterfaceLayer;
namespace BugNET.Projects
{
/// <summary>
/// Summary description for ChangeLog.
/// </summary>
public partial class ChangeLog : BasePage
{
/// <summary>
/// Handles the Load event of the Page control.
/// </summary>
/// <param name="sender">The source of the event.</param>
/// <param name="e">The <see cref="T:System.EventArgs"/> instance containing the event data.</param>
protected void Page_Load(object sender, EventArgs e)
{
if(!Page.IsPostBack)
{
try
{
IList<string> segments = Request.GetFriendlyUrlSegments();
ProjectId = Int32.Parse(segments[0]);
}
catch
{
ProjectId = Request.QueryString.Get("pid", 0);
}
// If don't know project or issue then redirect to something missing page
if(ProjectId == 0)
{
ErrorRedirector.TransferToSomethingMissingPage(Page);
return;
}
var p = ProjectManager.GetById(ProjectId);
if (p == null || p.Disabled)
{
ErrorRedirector.TransferToSomethingMissingPage(Page);
return;
}
ltProject.Text = p.Name;
litProjectCode.Text = p.Code;
PreviousMilestones.ForeColor = Color.Black;
PreviousMilestones.Enabled = false;
Linkbutton5.ForeColor = ColorTranslator.FromHtml("#00489E");
Linkbutton5.Enabled = true;
Linkbutton7.ForeColor = Color.Black;
Linkbutton7.Enabled = false;
Linkbutton9.ForeColor = ColorTranslator.FromHtml("#00489E");
Linkbutton9.Enabled = true;
ViewMode = 1;
SortMilestonesAscending = false;
SortHeader = "Id";
SortAscending = false;
SortField = "iv.[IssueId]";
BindChangeLog();
}
// The ExpandIssuePaths method is called to handle
// the SiteMapResolve event.
// SiteMap.SiteMapResolve += ExpandProjectPaths;
}
/// <summary>
/// Handles the Click event of the SwitchViewMode control.
/// </summary>
/// <param name="sender">The source of the event.</param>
/// <param name="e">The <see cref="System.EventArgs"/> instance containing the event data.</param>
protected void SwitchViewMode_Click(object sender, EventArgs e)
{
var button = sender as LinkButton;
if (button != null && button.CommandArgument == "1")
{
PreviousMilestones.ForeColor = Color.Black;
PreviousMilestones.Enabled = false;
Linkbutton5.ForeColor = ColorTranslator.FromHtml("#00489E");
Linkbutton5.Enabled = true;
ViewMode = 1;
}
else
{
PreviousMilestones.ForeColor = ColorTranslator.FromHtml("#00489E");
PreviousMilestones.Enabled = true;
Linkbutton5.Enabled = false;
Linkbutton5.ForeColor = Color.Black;
ViewMode = 2;
}
BindChangeLog();
}
/// <summary>
/// Handles the Click event of the SortMilestone control.
/// </summary>
/// <param name="sender">The source of the event.</param>
/// <param name="e">The <see cref="System.EventArgs"/> instance containing the event data.</param>
protected void SortMilestone_Click(object sender, EventArgs e)
{
var button = sender as LinkButton;
var ascending = true;
if(button != null)
{
ascending = Boolean.Parse(button.CommandArgument);
}
if (ascending)
{
Linkbutton9.ForeColor = Color.Black;
Linkbutton9.Enabled = false;
Linkbutton7.ForeColor = ColorTranslator.FromHtml("#00489E");
Linkbutton7.Enabled = true;
}
else
{
Linkbutton9.ForeColor = ColorTranslator.FromHtml("#00489E");
Linkbutton9.Enabled = true;
Linkbutton7.Enabled = false;
Linkbutton7.ForeColor = Color.Black;
}
SortMilestonesAscending = ascending;
BindChangeLog();
}
/// <summary>
/// Gets or sets the sort field.
/// </summary>
/// <value>The sort field.</value>
string SortField
{
get { return ViewState.Get("SortField", string.Empty); }
set
{
if(value == SortField)
{
// same as current sort file, toggle sort direction
SortAscending = !SortAscending;
}
ViewState.Set("SortField", value);
}
}
/// <summary>
/// Gets or sets a value indicating whether [sort ascending].
/// </summary>
/// <value><c>true</c> if [sort ascending]; otherwise, <c>false</c>.</value>
bool SortAscending
{
get { return ViewState.Get("SortAscending", true); }
set { ViewState.Set("SortAscending", value); }
}
/// <summary>
/// Gets or sets the view mode.
/// </summary>
/// <value>The view mode.</value>
int ViewMode
{
get { return ViewState.Get("ViewMode", 1); }
set { ViewState.Set("ViewMode", value); }
}
string SortHeader
{
get { return ViewState.Get("SortHeader", string.Empty); }
set { ViewState.Set("SortHeader", value); }
}
protected void SortIssueClick(object sender, EventArgs e)
{
var button = sender as LinkButton;
if (button != null)
{
SortField = button.CommandArgument;
SortHeader = button.CommandName;
}
BindChangeLog();
}
/// <summary>
/// Gets or sets a value indicating whether [sort milestones ascending].
/// </summary>
/// <value>
/// <c>true</c> if [sort milestones ascending]; otherwise, <c>false</c>.
/// </value>
bool SortMilestonesAscending
{
get { return ViewState.Get("SortMilestonesAscending", true); }
set { ViewState.Set("SortMilestonesAscending", value); }
}
/// <summary>
/// Binds the project summary.
/// </summary>
private void BindChangeLog()
{
var ascending = SortMilestonesAscending ? "" : " desc";
var milestones = MilestoneManager.GetByProjectId(ProjectId).Sort("SortOrder" + ascending).ToList();
ChangeLogRepeater.DataSource = ViewMode == 1 ?
milestones.Take(5) :
milestones;
ChangeLogRepeater.DataBind();
}
/// <summary>
/// Handles the ItemCreated event of the rptChangeLog control.
/// </summary>
/// <param name="sender">The source of the event.</param>
/// <param name="e">The <see cref="System.Web.UI.WebControls.RepeaterItemEventArgs"/> instance containing the event data.</param>
protected void rptChangeLog_ItemCreated(object sender, RepeaterItemEventArgs e)
{
switch (e.Item.ItemType)
{
case ListItemType.Header:
foreach (Control c in e.Item.Controls)
{
if (c.GetType() != typeof (HtmlTableCell) || c.ID != string.Format("td{0}", SortHeader)) continue;
var img = new System.Web.UI.WebControls.Image
{
ImageUrl = string.Format("~/images/{0}.png", (SortAscending ? "bullet_arrow_up" : "bullet_arrow_down")),
CssClass = "icon"
};
// setting the dynamically URL of the image
c.Controls.Add(img);
}
break;
}
}
/// <summary>
/// Expands the project paths.
/// </summary>
/// <param name="sender">The sender.</param>
/// <param name="e">The <see cref="System.Web.SiteMapResolveEventArgs"/> instance containing the event data.</param>
/// <returns></returns>
private SiteMapNode ExpandProjectPaths(Object sender, SiteMapResolveEventArgs e)
{
var currentNode = SiteMap.CurrentNode.Clone(true);
var tempNode = currentNode;
// The current node, and its parents, can be modified to include
// dynamic query string information relevant to the currently
// executing request.
if (ProjectId != 0)
{
tempNode.Url = tempNode.Url + "?pid=" + ProjectId.ToString();
}
if ((null != (tempNode = tempNode.ParentNode)) &&
(ProjectId != 0))
{
tempNode.Url = tempNode.Url + "?pid=" + ProjectId.ToString();
}
return currentNode;
}
/// <summary>
/// Handles the Unload event of the Page control.
/// </summary>
/// <param name="sender">The source of the event.</param>
/// <param name="e">The <see cref="System.EventArgs"/> instance containing the event data.</param>
protected void Page_Unload(object sender, EventArgs e)
{
//remove the event handler
//SiteMap.SiteMapResolve -= ExpandProjectPaths;
}
/// <summary>
/// Handles the ItemDataBound event of the ChangeLogRepeater control.
/// </summary>
/// <param name="sender">The source of the event.</param>
/// <param name="e">The <see cref="System.Web.UI.WebControls.RepeaterItemEventArgs"/> instance containing the event data.</param>
protected void ChangeLogRepeater_ItemDataBound(object sender, RepeaterItemEventArgs e)
{
if (e.Item.ItemType != ListItemType.Item && e.Item.ItemType != ListItemType.AlternatingItem) return;
var m = (Milestone)e.Item.DataItem;
if (!string.IsNullOrWhiteSpace(m.Notes))
{
((Label)e.Item.FindControl("MilestoneNotes")).Text = " - " + m.Notes;
}
((HyperLink)e.Item.FindControl("ReleaseNotes")).NavigateUrl = string.Format(Page.ResolveUrl("~/Projects/ReleaseNotes.aspx") + "?pid={0}&m={1}", ProjectId, m.Id);
if (m.ReleaseDate.HasValue)
{
var date = (DateTime)m.ReleaseDate;
((Label)e.Item.FindControl("lblReleaseDate")).Text = string.Format(GetLocalResourceObject("ReleasedOn").ToString(), date.ToShortDateString());
}
else
{
((Label)e.Item.FindControl("lblReleaseDate")).Text = GetLocalResourceObject("NoReleaseDate").ToString();
}
var list = e.Item.FindControl("IssuesList") as Repeater;
if(list == null) return;
var queryClauses = new List<QueryClause>
{
new QueryClause("AND", "iv.[IssueMilestoneId]", "=", m.Id.ToString(), SqlDbType.Int),
new QueryClause("AND", "iv.[IsClosed]", "=", "1", SqlDbType.Int),
new QueryClause("AND", "iv.[Disabled]", "=", "0", SqlDbType.Int)
};
var sortString = (SortAscending) ? "ASC" : "DESC";
var sortList = new List<KeyValuePair<string, string>>
{
new KeyValuePair<string, string>(SortField, sortString)
};
var issueList = IssueManager.PerformQuery(queryClauses, sortList, ProjectId);
// private issue check
issueList = IssueManager.StripPrivateIssuesForRequestor(issueList, Security.GetUserName()).ToList();
if (issueList.Count > 0)
{
list.DataSource = issueList;
list.DataBind();
}
else
e.Item.Visible = false;
((HyperLink)e.Item.FindControl("IssuesCount")).NavigateUrl = string.Format(Page.ResolveUrl("~/Issues/IssueList.aspx") + "?pid={0}&m={1}&s=-1", ProjectId, m.Id);
// Set language-specific declination
((HyperLink)e.Item.FindControl("IssuesCount")).Text = GetIssuesCountText(issueList.Count);
((HyperLink)e.Item.FindControl("MilestoneLink")).NavigateUrl = string.Format(Page.ResolveUrl("~/Issues/IssueList.aspx") + "?pid={0}&m={1}&s=-1", ProjectId, m.Id);
((HyperLink)e.Item.FindControl("MilestoneLink")).Text = m.Name;
}
private string GetIssuesCountText(int issuesCount)
{
if (System.Threading.Thread.CurrentThread.CurrentUICulture.Name == "ru-RU")
{
return GetIssuesCountText_ru_RU(issuesCount);
}
return issuesCount.ToString() + GetLocalResourceObject("Issues").ToString();
}
private string GetIssuesCountText_ru_RU(int issuesCount)
{
int n100 = issuesCount % 100;
string resName;
if (n100 >= 11 && n100 <= 19)
resName = "RU_Issues3";
else
{
int n10 = issuesCount % 10;
if (n10 == 1)
resName = "RU_Issues1";
else if (n10 == 2 || n10 == 3 || n10 == 4)
resName = "RU_Issues2";
else
resName = "RU_Issues3";
}
return String.Format(GetLocalResourceObject(resName).ToString(), issuesCount);
}
}
}
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string name="hello">Hello World, Click Me!</string>
<string name="app_name">StreamLocatorClient</string>
</resources>
| {
"pile_set_name": "Github"
} |
{
"extends":"http://json-schema.org/hyper-schema",
"description":"A schema for schema interface definitions that describe programmatic class structures using JSON schema syntax",
"properties":{
"methods":{
"type":"object",
"description":"This defines the set of methods available to the class instances",
"additionalProperties":{
"type":"object",
"description":"The definition of the method",
"properties":{
"parameters":{
"type":"array",
"description":"The set of parameters that should be passed to the method when it is called",
"items":{"$ref":"#"},
"required": true
},
"returns":{"$ref":"#"}
}
}
}
}
}
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="UTF-8"?>
<Scheme
LastUpgradeVersion = "0830"
version = "1.3">
<BuildAction
parallelizeBuildables = "NO"
buildImplicitDependencies = "YES">
<BuildActionEntries>
<BuildActionEntry
buildForTesting = "YES"
buildForRunning = "YES"
buildForProfiling = "YES"
buildForArchiving = "YES"
buildForAnalyzing = "YES">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "2D2A28121D9B038B00D4039D"
BuildableName = "libReact.a"
BlueprintName = "React-tvOS"
ReferencedContainer = "container:../node_modules/react-native/React/React.xcodeproj">
</BuildableReference>
</BuildActionEntry>
<BuildActionEntry
buildForTesting = "YES"
buildForRunning = "YES"
buildForProfiling = "YES"
buildForArchiving = "YES"
buildForAnalyzing = "YES">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "2D02E47A1E0B4A5D006451C7"
BuildableName = "Example-tvOS.app"
BlueprintName = "Example-tvOS"
ReferencedContainer = "container:Example.xcodeproj">
</BuildableReference>
</BuildActionEntry>
<BuildActionEntry
buildForTesting = "YES"
buildForRunning = "YES"
buildForProfiling = "NO"
buildForArchiving = "NO"
buildForAnalyzing = "YES">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "2D02E48F1E0B4A5D006451C7"
BuildableName = "Example-tvOSTests.xctest"
BlueprintName = "Example-tvOSTests"
ReferencedContainer = "container:Example.xcodeproj">
</BuildableReference>
</BuildActionEntry>
</BuildActionEntries>
</BuildAction>
<TestAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
shouldUseLaunchSchemeArgsEnv = "YES">
<Testables>
<TestableReference
skipped = "NO">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "2D02E48F1E0B4A5D006451C7"
BuildableName = "Example-tvOSTests.xctest"
BlueprintName = "Example-tvOSTests"
ReferencedContainer = "container:Example.xcodeproj">
</BuildableReference>
</TestableReference>
</Testables>
<MacroExpansion>
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "2D02E47A1E0B4A5D006451C7"
BuildableName = "Example-tvOS.app"
BlueprintName = "Example-tvOS"
ReferencedContainer = "container:Example.xcodeproj">
</BuildableReference>
</MacroExpansion>
<AdditionalOptions>
</AdditionalOptions>
</TestAction>
<LaunchAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
launchStyle = "0"
useCustomWorkingDirectory = "NO"
ignoresPersistentStateOnLaunch = "NO"
debugDocumentVersioning = "YES"
debugServiceExtension = "internal"
allowLocationSimulation = "YES">
<BuildableProductRunnable
runnableDebuggingMode = "0">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "2D02E47A1E0B4A5D006451C7"
BuildableName = "Example-tvOS.app"
BlueprintName = "Example-tvOS"
ReferencedContainer = "container:Example.xcodeproj">
</BuildableReference>
</BuildableProductRunnable>
<AdditionalOptions>
</AdditionalOptions>
</LaunchAction>
<ProfileAction
buildConfiguration = "Release"
shouldUseLaunchSchemeArgsEnv = "YES"
savedToolIdentifier = ""
useCustomWorkingDirectory = "NO"
debugDocumentVersioning = "YES">
<BuildableProductRunnable
runnableDebuggingMode = "0">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "2D02E47A1E0B4A5D006451C7"
BuildableName = "Example-tvOS.app"
BlueprintName = "Example-tvOS"
ReferencedContainer = "container:Example.xcodeproj">
</BuildableReference>
</BuildableProductRunnable>
</ProfileAction>
<AnalyzeAction
buildConfiguration = "Debug">
</AnalyzeAction>
<ArchiveAction
buildConfiguration = "Release"
revealArchiveInOrganizer = "YES">
</ArchiveAction>
</Scheme>
| {
"pile_set_name": "Github"
} |
;*******************************************************************************
;* *
;* copyright (c) 1988 kent state univ. kent, ohio 44242 *
;* *
;*******************************************************************************
(when (null (fboundp 'wrs)) (load "convmac.lisp"))
(declare-top (special *gentran-dir tempvartype* tempvarname* tempvarnum* genstmtno*
genstmtincr* *symboltable* *instk* *stdin* *currin* *outstk*
*stdout* *currout* *outchanl* *lispdefops* *lisparithexpops*
*lisplogexpops* *lispstmtops* *lispstmtgpops*))
;; ----------- ;;
;; segmnt.l ;; segmentation module
;; ----------- ;;
(declare-top (special *gentranopt *gentranlang maxexpprintlen*))
;; ;;
;; 1. segmentation routines ;;
;; ;;
(defun seg (forms)
; exp --+--> exp ;
; +--> (assign assign ... assign exp ) ;
; (1) (2) (n-1) (n) ;
; stmt --+--> stmt ;
; +--> stmtgp ;
; stmtgp -----> stmtgp ;
; def -----> def ;
(foreach f in forms collect
(cond ((lispexpp f)
(cond ((toolongexpp f)
(segexp f 'unknown))
(t
f)))
((lispstmtp f)
(segstmt f))
((lispstmtgpp f)
(cond ((toolongstmtgpp f)
(seggroup f))
(t
f)))
((lispdefp f)
(cond ((toolongdefp f)
(segdef f))
(t
f)))
(t
f))))
(defun segexp (exp type)
; exp --> (assign assign ... assign exp ) ;
; (1) (2) (n-1) (n) ;
(reverse (segexp1 exp type)))
(defun segexp1 (exp type)
; exp --> (exp assign assign ... assign ) ;
; (n) (n-1) (n-2) (1) ;
(prog (res tempvarname)
(setq tempvarname tempvarname*)
(cond (*gentranopt
(setq tempvarname (stripdollar1 tempvarname))
(setq tempvarname* (explode2 tempvarname))
(setq tempvarname* (compress (cons (car tempvarname*)
tempvarname*)))))
(setq res (segexp2 exp type))
(recurunmark res)
(setq tempvarname* tempvarname)
(cond ((equal (car res) (cadadr res))
(progn
(setq res (cdr res))
(rplaca res (caddar res)))))
(return res)))
(defun segexp2 (exp type)
; exp --> (exp assign assign ... assign ) ;
; (n) (n-1) (n-2) (1) ;
(prog (expn assigns newassigns unops op termlist var tmp)
(setq expn exp)
(while (equal (length expn) 2)
(progn
(setq unops (cons (car expn) unops))
(setq expn (cadr expn))))
(setq op (car expn))
(foreach term in (cdr expn) do
(progn
(cond ((toolongexpp term)
(progn
(setq tmp (segexp2 term type))
(setq term (car tmp))
(setq newassigns (cdr tmp))))
(t
(setq newassigns 'nil)))
(cond ((and (toolongexpp (cons op (cons term termlist)))
termlist
(or (> (length termlist) 1)
(listp (car termlist))))
(progn
(recurunmark termlist)
(setq var (or var (tempvar type)))
(markvar var)
(setq assigns
(cons (mkassign var
(cond ((onep (length termlist))
(car termlist))
(t
(cons op termlist))))
assigns))
(setq termlist (list var term))))
(t
(setq termlist (aconc termlist term))))
(setq assigns (append newassigns assigns))))
(setq expn (cond ((onep (length termlist))
(car termlist))
(t
(cons op termlist))))
(while unops
(progn
(setq expn (list (car unops) expn))
(setq unops (cdr unops))))
(cond ((equal expn exp)
(progn
(recurunmark expn)
(setq var (or var (tempvar type)))
(markvar var)
(setq assigns (list (mkassign var expn)))
(setq expn var))))
(return (cons expn assigns))))
(defun segstmt (stmt)
; assign --+--> assign ;
; +--> stmtgp ;
; cond --+--> cond ;
; +--> stmtgp ;
; do --+--> do ;
; +--> stmtgp ;
; return --+--> return ;
; +--> stmtgp ;
(cond ((lispassignp stmt)
(cond ((toolongassignp stmt)
(segassign stmt))
(t
stmt)))
((lispcondp stmt)
(cond ((toolongcondp stmt)
(segcond stmt))
(t
stmt)))
((lispdop stmt)
(cond ((toolongdop stmt)
(segdo stmt))
(t
stmt)))
((lispreturnp stmt)
(cond ((toolongreturnp stmt)
(segreturn stmt))
(t
stmt)))
(t
stmt)))
(defun segassign (stmt)
; assign --> stmtgp ;
(prog (var exp type)
(setq var (cadr stmt))
(setq type (getvartype var))
(setq exp (caddr stmt))
(setq stmt (segexp1 exp type))
(rplaca stmt (mkassign var (car stmt)))
(return (mkstmtgp 0 (reverse stmt)))))
(defun segcond (cond)
; cond --+--> cond ;
; +--> stmtgp ;
(prog (tassigns res markedvars type)
(cond ((eq *gentranlang 'c)
(setq type 'int))
(t
(setq type 'logical)))
(while (setq cond (cdr cond))
(prog (exp stmt)
(cond ((toolongexpp (setq exp (caar cond)))
(progn
(setq exp (segexp1 exp type))
(setq tassigns (append (cdr exp) tassigns))
(setq exp (car exp))
(markvar exp)
(setq markedvars (cons exp markedvars)))))
(setq stmt (foreach st in (cdar cond) collect
(segstmt st)))
(setq res (cons (cons exp stmt) res))))
(recurunmark markedvars)
(return (cond (tassigns
(mkstmtgp 0
(reverse (cons (mkcond (reverse res))
tassigns))))
(t
(mkcond (reverse res)))))))
(defun segdo (stmt)
; do --+--> do ;
; +--> stmtgp ;
(prog (tassigns var initexp nextexp exitcond body markedvars type)
(setq body (cdddr stmt))
(cond ((setq var (cadr stmt))
(progn
(cond ((toolongexpp (setq initexp (cadar var)))
(progn
(setq type (getvartype (caar var)))
(setq initexp (segexp1 initexp type))
(setq tassigns (cdr initexp))
(setq initexp (car initexp))
(markvar initexp)
(setq markedvars (cons initexp markedvars)))))
(cond ((toolongexpp (setq nextexp (caddar var)))
(progn
(setq type (getvartype (caar var)))
(setq nextexp (segexp1 nextexp type))
(setq body (append body (reverse (cdr nextexp))))
(setq nextexp (car nextexp))
(markvar nextexp)
(setq markedvars (cons nextexp markedvars)))))
(setq var (list (list (caar var) initexp nextexp))))))
(cond ((toolongexpp (car (setq exitcond (caddr stmt))))
(prog (texps ltype)
(cond ((eq *gentranlang 'c)
(setq ltype 'int))
(t
(setq ltype 'logical)))
(setq texps (segexp1 (car exitcond) ltype))
(markvar (car texps))
(setq markedvars (cons (car texps) markedvars))
(rplaca exitcond (car texps))
(foreach texp in (reverse (cdr texps)) do
(progn
(setq texp (reverse texp))
(setq var
(cons (cdr (reverse (cons (car texp)
texp)))
var))))
(setq var (reverse var)))))
(setq body (foreach st in body collect (segstmt st)))
(recurunmark markedvars)
(return (cond (tassigns
(mkstmtgp 0 (reverse (cons (mkdo var exitcond body)
tassigns))))
(t
(mkdo var exitcond body))))))
(defun segreturn (ret)
; return --> stmtgp ;
(progn
(setq ret (segexp1 (cadr ret) 'unknown))
(rplaca ret (mkreturn (car ret)))
(mkstmtgp 0 (reverse ret))))
(defun seggroup (stmtgp)
; stmtgp --> stmtgp ;
(prog (locvars res)
(cond ((equal (car stmtgp) 'prog)
(progn
(setq locvars (cadr stmtgp))
(setq stmtgp (cdr stmtgp))))
(t
(setq locvars 0)))
(while (setq stmtgp (cdr stmtgp))
(setq res (cons (segstmt (car stmtgp)) res)))
(return (mkstmtgp locvars (reverse res)))))
(defun segdef (def)
; def --> def ;
(mkdef (cadr def)
(caddr def)
(foreach stmt in (cdddr def) collect (segstmt stmt))))
;; ;;
;; 2. long statement & expression predicates ;;
;; ;;
(defun toolongexpp (exp)
(> (numprintlen exp) maxexpprintlen*))
(defun toolongstmtp (stmt)
(cond ((atom stmt) nil) ;; pwang 11/11/86
((lispstmtp stmt)
(cond ((lispcondp stmt)
(toolongcondp stmt))
((lispassignp stmt)
(toolongassignp stmt))
((lispreturnp stmt)
(toolongreturnp stmt))
((lispdop stmt)
(toolongdop stmt))
(t
(eval (cons 'or
(foreach exp in stmt collect (toolongexpp exp)))))))
(t
(toolongstmtgpp stmt))))
(defun toolongassignp (assign)
(toolongexpp (caddr assign)))
(defun toolongcondp (cond)
(prog (toolong)
(while (setq cond (cdr cond))
(cond ((or (toolongexpp (caar cond))
(toolongstmtp (cadar cond)))
(setq toolong t))))
(return toolong)))
(defun toolongdop (dostmt)
(cond ((> (eval (cons '+ (foreach exp in (caadr dostmt) collect
(numprintlen exp))))
maxexpprintlen*) t)
((toolongexpp (caaddr dostmt)) t)
((lispstmtgpp (cadddr dostmt)) (toolongstmtgpp (cadddr dostmt)))
(t (eval (cons 'or (foreach stmt in (cdddr dostmt) collect
(toolongstmtp stmt)))))))
(defun toolongreturnp (ret)
(toolongexpp (cadr ret)))
(defun toolongstmtgpp (stmtgp)
(eval (cons 'or
(foreach stmt in (cdr stmtgp) collect (toolongstmtp stmt)))))
(defun toolongdefp (def)
(cond ((lispstmtgpp (cadddr def))
(toolongstmtgpp (cadddr def)))
(t
(eval (cons 'or
(foreach stmt in (cdddr def) collect
(toolongstmtp stmt)))))))
;; ;;
;; 3. print length function ;;
;; ;;
(defun numprintlen (exp)
(cond ((atom exp)
(length (explode exp)))
((onep (length exp))
(numprintlen (car exp)))
(t
(+ (length exp)
(eval (cons '+
(foreach elt in (cdr exp) collect
(numprintlen elt))))))))
| {
"pile_set_name": "Github"
} |
/*
* Copyright (c) 2012, 2017, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* COPYRIGHT AND PERMISSION NOTICE
*
* Copyright (C) 1991-2016 Unicode, Inc. All rights reserved.
* Distributed under the Terms of Use in
* http://www.unicode.org/copyright.html.
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of the Unicode data files and any associated documentation
* (the "Data Files") or Unicode software and any associated documentation
* (the "Software") to deal in the Data Files or Software
* without restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, and/or sell copies of
* the Data Files or Software, and to permit persons to whom the Data Files
* or Software are furnished to do so, provided that
* (a) this copyright and permission notice appear with all copies
* of the Data Files or Software,
* (b) this copyright and permission notice appear in associated
* documentation, and
* (c) there is clear notice in each modified Data File or in the Software
* as well as in the documentation associated with the Data File(s) or
* Software that the data or software has been modified.
*
* THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF
* ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
* WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT OF THIRD PARTY RIGHTS.
* IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS
* NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL
* DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
* DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
* TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
* PERFORMANCE OF THE DATA FILES OR SOFTWARE.
*
* Except as contained in this notice, the name of a copyright holder
* shall not be used in advertising or otherwise to promote the sale,
* use or other dealings in these Data Files or Software without prior
* written authorization of the copyright holder.
*/
package sun.util.resources.cldr.ext;
import sun.util.resources.OpenListResourceBundle;
public class LocaleNames_ur_IN extends OpenListResourceBundle {
@Override
protected final Object[][] getContents() {
final Object[][] data = new Object[][] {
{ "AC", "\u062c\u0632\u06cc\u0631\u06c1 \u0627\u0633\u06cc\u0646\u0634\u0646" },
{ "AX", "\u062c\u0632\u0627\u0626\u0631 \u0622\u0644\u06cc\u0646\u0688" },
{ "BV", "\u062c\u0632\u06cc\u0631\u06c1 \u0628\u0648\u0648\u06cc\u062a" },
{ "CC", "\u062c\u0632\u0627\u0626\u0631 (\u06a9\u06cc\u0644\u0646\u06af) \u06a9\u0648\u06a9\u0648\u0633" },
{ "CI", "\u06a9\u0648\u062a \u062f\u0627\u0648\u0648\u0627\u063a" },
{ "CK", "\u062c\u0632\u0627\u0626\u0631 \u06a9\u06a9" },
{ "CP", "\u062c\u0632\u06cc\u0631\u06c1 \u06a9\u0644\u067e\u0631\u0679\u0646" },
{ "DG", "\u0688\u06cc\u06af\u0648 \u06af\u0627\u0631\u0634\u06cc\u0627" },
{ "FK", "\u062c\u0632\u0627\u0626\u0631 \u0641\u0627\u06a9\u0644\u06cc\u0646\u0688" },
{ "FO", "\u062c\u0632\u0627\u0626\u0631 \u0641\u06cc\u0631\u0648" },
{ "GF", "\u0641\u0631\u0627\u0646\u0633\u06cc\u0633\u06cc \u06af\u06cc\u0627\u0646\u0627" },
{ "HM", "\u062c\u0632\u0627\u0626\u0631 \u06c1\u0631\u0688 \u0648 \u0645\u06a9\u0688\u0648\u0646\u0644\u0688" },
{ "IC", "\u062c\u0632\u0627\u0626\u0631 \u06a9\u0646\u0627\u0631\u06cc" },
{ "IO", "\u0628\u0631\u0637\u0627\u0646\u0648\u06cc \u0628\u062d\u0631\u06c1\u0646\u062f \u062e\u0637\u06c1" },
{ "MH", "\u062c\u0632\u0627\u0626\u0631 \u0645\u0627\u0631\u0634\u0644" },
{ "MP", "\u062c\u0632\u0627\u0626\u0631 \u0634\u0645\u0627\u0644\u06cc \u0645\u0627\u0631\u06cc\u0627\u0646\u0627" },
{ "NF", "\u062c\u0632\u06cc\u0631\u06c1 \u0646\u0627\u0631\u0641\u0648\u06a9" },
{ "PN", "\u062c\u0632\u0627\u0626\u0631 \u067e\u0679\u06a9\u06cc\u0631\u0646" },
{ "SB", "\u062c\u0632\u0627\u0626\u0631 \u0633\u0644\u06cc\u0645\u0627\u0646" },
{ "TA", "\u062a\u0631\u0633\u0679\u0627\u0646 \u062f\u0627 \u06a9\u0648\u0646\u06cc\u0627" },
{ "TC", "\u062c\u0632\u0627\u0626\u0631 \u06a9\u06cc\u06a9\u0633 \u0648 \u062a\u0631\u06a9\u06cc\u06c1" },
{ "UM", "\u0627\u0645\u0631\u06cc\u06a9\u06cc \u0628\u06cc\u0631\u0648\u0646\u06cc \u062c\u0632\u0627\u0626\u0631" },
{ "VG", "\u0628\u0631\u0637\u0627\u0646\u0648\u06cc \u062c\u0632\u0627\u0626\u0631 \u0648\u0631\u062c\u0646" },
{ "VI", "\u0627\u0645\u0631\u06cc\u06a9\u06cc \u062c\u0632\u0627\u0626\u0631 \u0648\u0631\u062c\u0646" },
{ "af", "\u0627\u0641\u0631\u06cc\u0642\u06cc" },
{ "co", "\u06a9\u0627\u0631\u0633\u06cc\u06a9\u0627\u0626\u06cc" },
{ "kn", "\u06a9\u0646\u0691" },
{ "ku", "\u06a9\u0631\u062f" },
{ "lv", "\u0644\u0679\u0648\u064a\u0627\u06cc" },
{ "dje", "\u0632\u0631\u0645\u06c1" },
{ "zgh", "\u0645\u0639\u06cc\u0627\u0631\u06cc \u0645\u0631\u0627\u0642\u0634\u06cc \u062a\u0645\u0627\u0632\u06cc\u0642\u06cc" },
{ "ar_001", "\u062c\u062f\u06cc\u062f \u0645\u0639\u06cc\u0627\u0631\u06cc \u0639\u0631\u0628\u06cc" },
{ "zh_Hans", "\u0622\u0633\u0627\u0646 \u0686\u06cc\u0646\u06cc" },
};
return data;
}
}
| {
"pile_set_name": "Github"
} |
package de.congrace.exp4j;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
class Tokenizer {
private final Set<String> variableNames;
private final Map<String, CustomFunction> functions;
private final Map<String, CustomOperator> operators;
Tokenizer(Set<String> variableNames, Map<String, CustomFunction> functions, Map<String, CustomOperator> operators) {
super();
this.variableNames = variableNames;
this.functions = functions;
this.operators = operators;
}
private boolean isVariable(String name) {
if (variableNames != null) {
for (String var : variableNames) {
if (name.equals(var)) {
return true;
}
}
}
return false;
}
private boolean isFunction(String name) {
return functions.containsKey(name);
}
private boolean isOperatorCharacter(char c) {
for (String symbol : operators.keySet()) {
if (symbol.indexOf(c) != -1) {
return true;
}
}
return false;
}
List<Token> getTokens(final String expression) throws UnparsableExpressionException, UnknownFunctionException {
final List<Token> tokens = new ArrayList<>();
final char[] chars = expression.toCharArray();
int openBraces = 0;
int openCurly = 0;
int openSquare = 0;
// iterate over the chars and fork on different types of input
Token lastToken;
for (int i = 0; i < chars.length; i++) {
char c = chars[i];
if (c == ' ') {
continue;
}
if (Character.isDigit(c)) {
final StringBuilder valueBuilder = new StringBuilder(1);
// handle the numbers of the expression
valueBuilder.append(c);
int numberLen = 1;
boolean lastCharExpNotationSeparator = false; // needed to determine if a + or - following an e/E is a unary operation
boolean expNotationSeparatorOccurred = false; // to check if only one E/e notation separator has occurred
boolean hexNotationPrefixOccurred = false; // to check if only one '0x' notation prefix has occurred
boolean binNotationPrefixOccurred = false; // to check if only one '0b' notation prefix has occurred
boolean octNotationPrefixOccurred = false; // to check if only one '0' notation prefix has occurred
while (i + numberLen < chars.length) {
char cc = chars[i + numberLen];
if (c == '0' && numberLen == 1) { // possibly binary, hex or octal notations
if (cc == 'x' || cc == 'X') { // hex
hexNotationPrefixOccurred = true;
valueBuilder.append(cc);
numberLen++;
continue;
} else if (cc == 'b' || cc == 'B') { // binary
binNotationPrefixOccurred = true;
valueBuilder.append(cc);
numberLen++;
continue;
} else if (cc != '.') { // octal
octNotationPrefixOccurred = true;
}
}
if (cc == '.') {
if (hexNotationPrefixOccurred || binNotationPrefixOccurred || octNotationPrefixOccurred) {
throw new UnparsableExpressionException("Unexpected decimal separator");
}
valueBuilder.append(cc);
lastCharExpNotationSeparator = false;
} else if (Character.isDigit(cc)) {
valueBuilder.append(cc);
lastCharExpNotationSeparator = false;
} else if ((cc >= 'a' && cc <= 'f') || (cc >= 'A' && cc <= 'F')) {
if (!hexNotationPrefixOccurred && (cc == 'e' || cc == 'E')) {
if (expNotationSeparatorOccurred){
throw new UnparsableExpressionException("Number can have only one notation separator 'e/E'");
}
valueBuilder.append(cc);
lastCharExpNotationSeparator = true;
expNotationSeparatorOccurred = true;
} else if (hexNotationPrefixOccurred) {
valueBuilder.append(cc);
} else {
throw new UnparsableExpressionException("Digit expected");
}
} else if (lastCharExpNotationSeparator && (cc == '-' || cc == '+')) {
valueBuilder.append(chars[i + numberLen]);
lastCharExpNotationSeparator = false;
} else if (cc != '_') {
break; // break out of the while loop here, since the number seem finished
}
numberLen++;
}
i += numberLen - 1;
lastToken = new NumberToken(valueBuilder.toString());
} else if (Character.isLetter(c) || c == '_') {
// can be a variable or function
final StringBuilder nameBuilder = new StringBuilder();
nameBuilder.append(c);
int offset = 1;
while (i + offset < chars.length && (Character.isLetter(chars[i + offset]) || Character.isDigit(chars[i + offset]) || chars[i + offset] == '_')) {
nameBuilder.append(chars[i + offset++]);
}
String name = nameBuilder.toString();
if (isVariable(name)) {
// a variable
i += offset - 1;
lastToken = new VariableToken(name);
} else if (this.isFunction(name)) {
// might be a function
i += offset - 1;
lastToken = new FunctionToken(name, functions.get(name));
} else {
// an unknown symbol was encountered
throw new UnparsableExpressionException(expression, c, i + 1);
}
} else if (c == ',') {
// a function separator, hopefully
lastToken = new FunctionSeparatorToken();
} else if (isOperatorCharacter(c)) {
// might be an operation
StringBuilder symbolBuilder = new StringBuilder();
symbolBuilder.append(c);
int offset = 1;
while (chars.length > i + offset && (isOperatorCharacter(chars[i + offset]))
&& isOperatorStart(symbolBuilder.toString() + chars[i + offset])) {
symbolBuilder.append(chars[i + offset]);
offset++;
}
String symbol = symbolBuilder.toString();
if (operators.containsKey(symbol)) {
i += offset - 1;
lastToken = new OperatorToken(symbol, operators.get(symbol));
} else {
throw new UnparsableExpressionException(expression, c, i + 1);
}
} else if (c == '('){
openBraces++;
lastToken = new ParenthesesToken(String.valueOf(c));
} else if (c == '{'){
openCurly++;
lastToken = new ParenthesesToken(String.valueOf(c));
} else if( c == '['){
openSquare++;
lastToken = new ParenthesesToken(String.valueOf(c));
} else if ( c == ')'){
openBraces--;
lastToken = new ParenthesesToken(String.valueOf(c));
} else if ( c == '}'){
openCurly--;
lastToken = new ParenthesesToken(String.valueOf(c));
} else if ( c == ']'){
openSquare--;
lastToken = new ParenthesesToken(String.valueOf(c));
} else {
// an unknown symbol was encountered
throw new UnparsableExpressionException(expression, c, i + 1);
}
tokens.add(lastToken);
}
if (openCurly != 0 || openBraces != 0 | openSquare != 0){
StringBuilder errorBuilder = new StringBuilder();
errorBuilder.append("There are ");
boolean first = true;
if (openBraces != 0) {
errorBuilder.append(Math.abs(openBraces)).append(" unmatched parantheses ");
first = false;
}
if (openCurly != 0) {
if (!first){
errorBuilder.append(" and ");
}
errorBuilder.append(Math.abs(openCurly)).append(" unmatched curly brackets ");
first = false;
}
if (openSquare != 0){
if (!first){
errorBuilder.append(" and ");
}
errorBuilder.append(Math.abs(openSquare)).append(" unmatched square brackets ");
first = false;
}
errorBuilder.append("in expression '").append(expression).append("'");
throw new UnparsableExpressionException(errorBuilder.toString());
}
return tokens;
}
private boolean isOperatorStart(String op) {
for (String operatorName : operators.keySet()) {
if (operatorName.startsWith(op)) {
return true;
}
}
return false;
}
}
| {
"pile_set_name": "Github"
} |
#!/bin/sh
cd ${0%/*} || exit 1 # run from this directory
# Source tutorial clean functions
. $WM_PROJECT_DIR/bin/tools/CleanFunctions
# remove surface
\rm -f constant/triSurface/flange.stl.gz
rm -rf 0 > /dev/null 2>&1
rm -f ./flange ./*.obj > /dev/null 2>&1
rm -rf constant/extendedFeatureEdgeMesh > /dev/null 2>&1
rm -f constant/triSurface/flange.eMesh > /dev/null 2>&1
rm -f constant/polyMesh/boundary
cleanCase
# ----------------------------------------------------------------- end-of-file
| {
"pile_set_name": "Github"
} |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
use dataverse tpch;
declare type LineItemType as closed {
l_orderkey: int32,
l_partkey: int32,
l_suppkey: int32,
l_linenumber: int32,
l_quantity: int32,
l_extendedprice: double,
l_discount: double,
l_tax: double,
l_returnflag: string,
l_linestatus: string,
l_shipdate: string,
l_commitdate: string,
l_receiptdate: string,
l_shipinstruct: string,
l_shipmode: string,
l_comment: string
}
declare nodegroup group1 on asterix_nc1, asterix_nc2;
set format "org.apache.asterix.runtime.formats.nontagged.NonTaggedDataFormat";
declare dataset LineItems(LineItemType)
primary key l_orderkey, l_linenumber with {"node-group":{"name":"group1"}};
write output to asterix_nc1:"/home/yasser/Dropbox/Research/data/results/result_tpch_closed_1.adm";
for $l in dataset('LineItems')
where $l.l_shipdate <= '1998-09-02'
group by $l_returnflag := $l.l_returnflag, $l_linestatus := $l.l_linestatus
with $l
order by $l_returnflag, $l_linestatus
return {
"l_returnflag": $l_returnflag,
"l_linestatus": $l_linestatus,
"sum_qty": sum(for $i in $l return $i.l_quantity),
"sum_base_price": sum(for $i in $l return $i.l_extendedprice),
"sum_disc_price": sum(for $i in $l return $i.l_extendedprice * (1 - $i.l_discount)),
"sum_charge": sum(for $i in $l return $i.l_extendedprice * (1 - $i.l_discount) * (1 + $i.l_tax)),
"ave_qty": avg(for $i in $l return $i.l_quantity),
"ave_price": avg(for $i in $l return $i.l_extendedprice),
"ave_disc": avg(for $i in $l return $i.l_discount),
"count_order": count($l)
}
| {
"pile_set_name": "Github"
} |
namespace FlubuCore.Tasks.Process
{
public interface IExternalProcess<out T>
where T : ITask
{
/// <summary>
/// Set the full file path of the executable file.
/// </summary>
/// <param name="executableFullFilePath"></param>
/// <returns></returns>
T Executable(string executableFullFilePath);
/// <summary>
/// Add argument for executable.
/// </summary>
/// <param name="arg"></param>
/// <param name="maskArg">If <c>true</c> argument is masked. Otherwise not.</param>
/// <returns></returns>
T WithArguments(string arg, bool maskArg);
/// <summary>
/// Add arguments for executable.
/// </summary>
/// <param name="args"></param>
/// <returns></returns>
T WithArguments(params string[] args);
/// <summary>
/// Clear all arguments for the command line.
/// </summary>
/// <returns></returns>
T ClearArguments();
/// <summary>
/// Set the working folder for the executable.
/// </summary>
/// <param name="folder"></param>
/// <returns></returns>
T WorkingFolder(string folder);
/// <summary>
/// Do not log output to the console.
/// </summary>
/// <returns></returns>
T DoNotLogOutput();
}
}
| {
"pile_set_name": "Github"
} |
/*-------------------------------------------------------------------------
support.c - startup for PIC16 regression tests with gpsim
Copyright (c) 2006-2010 Borut Razem
This program is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the
Free Software Foundation; either version 2, or (at your option) any
later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
In other words, you are welcome to use, share and improve this program.
You are forbidden to forbid anyone else to use, share and improve
what you give them. Help stamp out software-hoarding!
-------------------------------------------------------------------------*/
#pragma preproc_asm -
#pragma stack 0x200 255 /* set stack size to 255 bytes */
#include <pic18f452.h>
void
_putchar(char c)
{
while (!PIR1bits.TXIF)
;
TXREG = c;
}
void
_initEmu(void)
{
/* load and configure the libgpsim_modules module */
__asm
;; Set frequency to 20MHz
.direct "e", ".frequency=20e6"
;; Load the USART library and module
.direct "e", "module library libgpsim_modules"
.direct "e", "module load usart U1"
;; Define a node
.direct "e", "node PIC_tx"
;; Tie the USART module to the PIC
.direct "e", "attach PIC_tx portc6 U1.RXPIN"
;; Set the USART module's Baud Rate
.direct "e", "U1.rxbaud = 9600"
;; Display the received character on terminal
.direct "e", "U1.console = true"
__endasm;
/* USART initialization */
PORTCbits.TX = 1; // Set TX pin to 1
TRISCbits.TRISC6 = 0; // TX pin is output
TXSTA = 0; // Reset USART registers to POR state
RCSTA = 0;
//1. Initialize the SPBRG register for the appropriate
// baud rate. If a high speed baud rate is desired,
// set bit BRGH (Section 16.1).
TXSTAbits.BRGH = 1;
SPBRG = 129;
//2. Enable the asynchronous serial port by clearing
// bit SYNC and setting bit SPEN.
RCSTAbits.SPEN = 1;
//3. If interrupts are desired, set enable bit TXIE.
//4. If 9-bit transmission is desired, set transmit bit
// TX9. Can be used as address/data bit.
//5. Enable the transmission by setting bit TXEN,
// which will also set bit TXIF.
TXSTAbits.TXEN = 1;
//6. If 9-bit transmission is selected, the ninth bit
// should be loaded in bit TX9D.
//7. Load data to the TXREG register (starts
// transmission).
}
void
_exitEmu(void)
{
/* wait until the transmit buffer is empty */
while (!TXSTAbits.TRMT)
;
/* set the breakpoint */
__asm
.direct "a", "\"\""
__endasm;
}
| {
"pile_set_name": "Github"
} |
Replaced the plain DocBook XSL admonition icons with Jimmac's DocBook
icons (http://jimmac.musichall.cz/ikony.php3). I dropped transparency
from the Jimmac icons to get round MS IE and FOP PNG incompatibilies.
Stuart Rackham
| {
"pile_set_name": "Github"
} |
#' Clip the first and last n metres of SpatialLines
#'
#' Takes lines and removes the start and end point, to a distance determined
#' by the user.
#'
#' Note: [toptailgs()] is around 10 times faster, but only works
#' on data with geographic CRS's due to its reliance on the geosphere
#' package.
#'
#' @param l A SpatialLines object
#' @param toptail_dist The distance (in metres) to top and tail the line by.
#' Can either be a single value or a vector of the same length as the
#' SpatialLines object.
#' @param ... Arguments passed to rgeos::gBuffer()
#' @aliases toptail
#' @family lines
#' @export
#' @examples
#' lib_versions <- sf::sf_extSoftVersion()
#' lib_versions
#' # dont test due to issues with sp classes on some set-ups
#' if(lib_versions[3] >= "6.3.1") {
#' # l <- routes_fast[2:4, ] # to run with sp classes
#' l <- routes_fast_sf[2:4, ]
#' l_top_tail <- geo_toptail(l, 300)
#' l_top_tail
#' plot(sf::st_geometry(l_top_tail))
#' plot(sf::st_geometry(geo_toptail(l, 600)), lwd = 9, add = TRUE)
#' }
geo_toptail <- function(l, toptail_dist, ...) {
UseMethod("geo_toptail")
}
#' @export
geo_toptail.Spatial <- toptail <- function(l, toptail_dist, ...) {
if (length(toptail_dist) > 1 & length(toptail_dist) != length(l)) {
stop("toptail_dist is vector but not of equal length to spatial object")
}
lpoints <- line2points(l)
if (length(toptail_dist) == 1) {
toptail_dist <- rep(toptail_dist, length(l))
}
for (i in 1:length(l)) {
sel_points <- lpoints[lpoints$id == i, ]
# Create buffer for geographic or projected crs
if (!sp::is.projected(l)) {
sel <- geo_buffer(lpoints, width = toptail_dist[i], ..., silent = TRUE)
} else {
sel <- rgeos::gBuffer(lpoints, dist = toptail_dist[i], ...)
}
if (rgeos::gContainsProperly(sel, l[i, ])) {
message(paste0(
"Line ", i, " is completely removed by the clip and",
" is omitted from the results"
))
next
}
l2 <- rgeos::gDifference(l[i, ], sel)
if (!exists("out")) {
out <- l2
} else {
out <- raster::bind(out, l2)
}
}
out
}
#' @export
geo_toptail.sf <- function(l, toptail_dist, ...) {
suppressMessages(suppressWarnings({
line_list <- lapply(
seq(nrow(l)),
function(i) {
li <- l[i, ]
sel_points <- sf::st_union(
lwgeom::st_startpoint(li),
lwgeom::st_endpoint(li)
)
sel <- geo_buffer(shp = sel_points, dist = toptail_dist, nQuadSegs = 5)
if(any(sf::st_contains_properly(sel, li, sparse = FALSE))) {
message(
"Line ", i, " is completely removed by the clip and",
" is omitted from the results"
)
return()
}
sf::st_difference(x = li, y = sel)
}
)
}))
out <- do.call(rbind, line_list)
# out <- data.table::rbindlist(line_list)
# sf::st_sf(out)
out
}
#' Clip the first and last n metres of SpatialLines
#'
#' Takes lines and removes the start and end point, to a distance determined
#' by the user. Uses the geosphere::distHaversine function and requires
#' coordinates in WGS84 (lng/lat).
#'
#' @param l A SpatialLines object
#' @param toptail_dist The distance (in metres) to top the line by.
#' Can be either a single value or a vector of the same length as the
#' SpatialLines object. If tail_dist is missing, is used as the tail distance.
#' @param tail_dist The distance (in metres) to tail the line by. Can be
#' either a single value or a vector of the same length as the SpatialLines
#' object.
#' @family lines
#' @export
#' @examples
#' data("routes_fast")
#' rf <- routes_fast[2:3, ]
#' r_toptail <- toptailgs(rf, toptail_dist = 300)
#' plot(rf, lwd = 3)
#' plot(r_toptail, col = "red", add = TRUE)
#' plot(cents, add = TRUE)
toptailgs <- function(l, toptail_dist, tail_dist = NULL) {
if (length(toptail_dist) > 1) {
if (length(toptail_dist) != length(l)) {
stop("toptail_dist is vector but not of equal length to SpatialLines object")
}
}
if (!missing(tail_dist)) {
if (length(tail_dist) > 1) {
if (length(tail_dist) != length(l)) {
stop("tail_dist is vector but not of equal length to SpatialLines object")
}
}
}
else {
tail_dist <- toptail_dist
}
toptail_disto <- toptail_dist
tail_disto <- tail_dist
i <- 1
while (i <= length(l)) {
toptail_dist <- ifelse(length(toptail_disto) == 1, toptail_disto, toptail_disto[i])
linecoords <- coordinates(l@lines[[i]])[[1]]
topdists <- geosphere::distHaversine(linecoords[1, ], linecoords)
linecoords <- rbind(
tail(linecoords[which(topdists < toptail_dist), , drop = FALSE], n = 1) + (
linecoords[which(topdists >= toptail_dist), , drop = FALSE][1, ] -
tail(linecoords[which(topdists < toptail_dist), , drop = FALSE], n = 1)
) * (
(toptail_dist - tail(topdists[which(topdists < toptail_dist)], n = 1)) / (topdists[which(topdists >= toptail_dist)][1] - tail(topdists[which(topdists < toptail_dist)], n = 1))
),
linecoords[which(topdists >= toptail_dist), , drop = FALSE]
)
bottomdists <- geosphere::distHaversine(linecoords[nrow(linecoords), ], linecoords)
tail_dist <- ifelse(length(tail_disto) == 1, tail_disto, tail_disto[i])
linecoords <- rbind(
linecoords[which(bottomdists >= tail_dist), , drop = FALSE],
tail(linecoords[which(bottomdists >= tail_dist), , drop = FALSE], n = 1) + (
linecoords[which(bottomdists < tail_dist), , drop = FALSE][1, ] -
tail(linecoords[which(bottomdists >= tail_dist), , drop = FALSE], n = 1)
) *
((tail(bottomdists[which(bottomdists >= tail_dist)], n = 1) - tail_dist) / (tail(bottomdists[which(bottomdists >= tail_dist)], n = 1) - bottomdists[which(bottomdists < tail_dist)][1]))
)
l@lines[[i]]@Lines[[1]]@coords <- unname(linecoords)
i <- i + 1
}
return(l)
}
#' Clip the beginning and ends SpatialLines to the edge of SpatialPolygon borders
#'
#' Takes lines and removes the start and end point, to a distance determined
#' by the nearest polygon border.
#'
#' @param l An sf LINESTRING object
#' @param buff An sf POLYGON object to act as the buffer
#' @param ... Arguments passed to rgeos::gBuffer()
#' @family lines
#' @export
#' @examples
#' l <- routes_fast_sf
#' buff <- zones_sf
#' r_toptail <- toptail_buff(l, buff)
#' nrow(l)
#' nrow(r_toptail)
#' plot(zones_sf$geometry)
#' plot(l$geometry, add = TRUE)
#' plot(r_toptail$geometry, lwd = 5, add = TRUE)
toptail_buff <- function(l, buff, ...) {
i_indexed <- out <- NULL
for (i in 1:length(l)) {
lpoints <- line2points(l[i, ])
# Select zones per line
sel <- sf::st_union(buff[lpoints, ])
l2 <- sf::st_difference(l$geometry[i], sel)
# mapview::mapview(sel) +
# mapview::mapview(l2[1])
if (length(l2) == 0) {
next
}
i_indexed <- c(i_indexed, i)
out <- c(out, l2)
}
out <- sf::st_sfc(out)
l_between_zones <- l[i_indexed, ]
l_between_zones$geometry = out
l_between_zones
}
| {
"pile_set_name": "Github"
} |
TARGET = test
LIBS = -lm
CC = gcc
CFLAGS = -O2 -g -Wall -Wextra -Wundef -std=gnu99 -I../../
SOURCES = main.c ../../packet.c ../../crc.c
HEADERS = ../../packet.h ../../crc.h
OBJECTS = $(notdir $(SOURCES:.c=.o))
.PHONY: default all clean
default: $(TARGET)
all: default
%.o: %.c $(HEADERS)
$(CC) $(CFLAGS) -c $< -o $@
%.o: ../../%.c $(HEADERS)
$(CC) $(CFLAGS) -c $< -o $@
.PRECIOUS: $(TARGET) $(OBJECTS)
$(TARGET): $(OBJECTS)
$(CC) $(OBJECTS) -Wall $(LIBS) -o $@
clean:
rm -f $(OBJECTS) $(TARGET)
test2:
echo $(OBJECTS)
run: $(TARGET)
./$(TARGET)
| {
"pile_set_name": "Github"
} |
/*
Copyright The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Code generated by informer-gen. DO NOT EDIT.
package informers
import (
"fmt"
v1alpha1 "k8s.io/api/admissionregistration/v1alpha1"
v1beta1 "k8s.io/api/admissionregistration/v1beta1"
v1 "k8s.io/api/apps/v1"
apps_v1beta1 "k8s.io/api/apps/v1beta1"
v1beta2 "k8s.io/api/apps/v1beta2"
autoscaling_v1 "k8s.io/api/autoscaling/v1"
v2beta1 "k8s.io/api/autoscaling/v2beta1"
batch_v1 "k8s.io/api/batch/v1"
batch_v1beta1 "k8s.io/api/batch/v1beta1"
v2alpha1 "k8s.io/api/batch/v2alpha1"
certificates_v1beta1 "k8s.io/api/certificates/v1beta1"
core_v1 "k8s.io/api/core/v1"
events_v1beta1 "k8s.io/api/events/v1beta1"
extensions_v1beta1 "k8s.io/api/extensions/v1beta1"
networking_v1 "k8s.io/api/networking/v1"
policy_v1beta1 "k8s.io/api/policy/v1beta1"
rbac_v1 "k8s.io/api/rbac/v1"
rbac_v1alpha1 "k8s.io/api/rbac/v1alpha1"
rbac_v1beta1 "k8s.io/api/rbac/v1beta1"
scheduling_v1alpha1 "k8s.io/api/scheduling/v1alpha1"
scheduling_v1beta1 "k8s.io/api/scheduling/v1beta1"
settings_v1alpha1 "k8s.io/api/settings/v1alpha1"
storage_v1 "k8s.io/api/storage/v1"
storage_v1alpha1 "k8s.io/api/storage/v1alpha1"
storage_v1beta1 "k8s.io/api/storage/v1beta1"
schema "k8s.io/apimachinery/pkg/runtime/schema"
cache "k8s.io/client-go/tools/cache"
)
// GenericInformer is type of SharedIndexInformer which will locate and delegate to other
// sharedInformers based on type
type GenericInformer interface {
Informer() cache.SharedIndexInformer
Lister() cache.GenericLister
}
type genericInformer struct {
informer cache.SharedIndexInformer
resource schema.GroupResource
}
// Informer returns the SharedIndexInformer.
func (f *genericInformer) Informer() cache.SharedIndexInformer {
return f.informer
}
// Lister returns the GenericLister.
func (f *genericInformer) Lister() cache.GenericLister {
return cache.NewGenericLister(f.Informer().GetIndexer(), f.resource)
}
// ForResource gives generic access to a shared informer of the matching type
// TODO extend this to unknown resources with a client pool
func (f *sharedInformerFactory) ForResource(resource schema.GroupVersionResource) (GenericInformer, error) {
switch resource {
// Group=admissionregistration.k8s.io, Version=v1alpha1
case v1alpha1.SchemeGroupVersion.WithResource("initializerconfigurations"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Admissionregistration().V1alpha1().InitializerConfigurations().Informer()}, nil
// Group=admissionregistration.k8s.io, Version=v1beta1
case v1beta1.SchemeGroupVersion.WithResource("mutatingwebhookconfigurations"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Admissionregistration().V1beta1().MutatingWebhookConfigurations().Informer()}, nil
case v1beta1.SchemeGroupVersion.WithResource("validatingwebhookconfigurations"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Admissionregistration().V1beta1().ValidatingWebhookConfigurations().Informer()}, nil
// Group=apps, Version=v1
case v1.SchemeGroupVersion.WithResource("controllerrevisions"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Apps().V1().ControllerRevisions().Informer()}, nil
case v1.SchemeGroupVersion.WithResource("daemonsets"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Apps().V1().DaemonSets().Informer()}, nil
case v1.SchemeGroupVersion.WithResource("deployments"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Apps().V1().Deployments().Informer()}, nil
case v1.SchemeGroupVersion.WithResource("replicasets"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Apps().V1().ReplicaSets().Informer()}, nil
case v1.SchemeGroupVersion.WithResource("statefulsets"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Apps().V1().StatefulSets().Informer()}, nil
// Group=apps, Version=v1beta1
case apps_v1beta1.SchemeGroupVersion.WithResource("controllerrevisions"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Apps().V1beta1().ControllerRevisions().Informer()}, nil
case apps_v1beta1.SchemeGroupVersion.WithResource("deployments"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Apps().V1beta1().Deployments().Informer()}, nil
case apps_v1beta1.SchemeGroupVersion.WithResource("statefulsets"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Apps().V1beta1().StatefulSets().Informer()}, nil
// Group=apps, Version=v1beta2
case v1beta2.SchemeGroupVersion.WithResource("controllerrevisions"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Apps().V1beta2().ControllerRevisions().Informer()}, nil
case v1beta2.SchemeGroupVersion.WithResource("daemonsets"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Apps().V1beta2().DaemonSets().Informer()}, nil
case v1beta2.SchemeGroupVersion.WithResource("deployments"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Apps().V1beta2().Deployments().Informer()}, nil
case v1beta2.SchemeGroupVersion.WithResource("replicasets"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Apps().V1beta2().ReplicaSets().Informer()}, nil
case v1beta2.SchemeGroupVersion.WithResource("statefulsets"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Apps().V1beta2().StatefulSets().Informer()}, nil
// Group=autoscaling, Version=v1
case autoscaling_v1.SchemeGroupVersion.WithResource("horizontalpodautoscalers"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Autoscaling().V1().HorizontalPodAutoscalers().Informer()}, nil
// Group=autoscaling, Version=v2beta1
case v2beta1.SchemeGroupVersion.WithResource("horizontalpodautoscalers"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Autoscaling().V2beta1().HorizontalPodAutoscalers().Informer()}, nil
// Group=batch, Version=v1
case batch_v1.SchemeGroupVersion.WithResource("jobs"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Batch().V1().Jobs().Informer()}, nil
// Group=batch, Version=v1beta1
case batch_v1beta1.SchemeGroupVersion.WithResource("cronjobs"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Batch().V1beta1().CronJobs().Informer()}, nil
// Group=batch, Version=v2alpha1
case v2alpha1.SchemeGroupVersion.WithResource("cronjobs"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Batch().V2alpha1().CronJobs().Informer()}, nil
// Group=certificates.k8s.io, Version=v1beta1
case certificates_v1beta1.SchemeGroupVersion.WithResource("certificatesigningrequests"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Certificates().V1beta1().CertificateSigningRequests().Informer()}, nil
// Group=core, Version=v1
case core_v1.SchemeGroupVersion.WithResource("componentstatuses"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Core().V1().ComponentStatuses().Informer()}, nil
case core_v1.SchemeGroupVersion.WithResource("configmaps"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Core().V1().ConfigMaps().Informer()}, nil
case core_v1.SchemeGroupVersion.WithResource("endpoints"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Core().V1().Endpoints().Informer()}, nil
case core_v1.SchemeGroupVersion.WithResource("events"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Core().V1().Events().Informer()}, nil
case core_v1.SchemeGroupVersion.WithResource("limitranges"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Core().V1().LimitRanges().Informer()}, nil
case core_v1.SchemeGroupVersion.WithResource("namespaces"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Core().V1().Namespaces().Informer()}, nil
case core_v1.SchemeGroupVersion.WithResource("nodes"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Core().V1().Nodes().Informer()}, nil
case core_v1.SchemeGroupVersion.WithResource("persistentvolumes"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Core().V1().PersistentVolumes().Informer()}, nil
case core_v1.SchemeGroupVersion.WithResource("persistentvolumeclaims"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Core().V1().PersistentVolumeClaims().Informer()}, nil
case core_v1.SchemeGroupVersion.WithResource("pods"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Core().V1().Pods().Informer()}, nil
case core_v1.SchemeGroupVersion.WithResource("podtemplates"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Core().V1().PodTemplates().Informer()}, nil
case core_v1.SchemeGroupVersion.WithResource("replicationcontrollers"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Core().V1().ReplicationControllers().Informer()}, nil
case core_v1.SchemeGroupVersion.WithResource("resourcequotas"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Core().V1().ResourceQuotas().Informer()}, nil
case core_v1.SchemeGroupVersion.WithResource("secrets"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Core().V1().Secrets().Informer()}, nil
case core_v1.SchemeGroupVersion.WithResource("services"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Core().V1().Services().Informer()}, nil
case core_v1.SchemeGroupVersion.WithResource("serviceaccounts"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Core().V1().ServiceAccounts().Informer()}, nil
// Group=events.k8s.io, Version=v1beta1
case events_v1beta1.SchemeGroupVersion.WithResource("events"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Events().V1beta1().Events().Informer()}, nil
// Group=extensions, Version=v1beta1
case extensions_v1beta1.SchemeGroupVersion.WithResource("daemonsets"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Extensions().V1beta1().DaemonSets().Informer()}, nil
case extensions_v1beta1.SchemeGroupVersion.WithResource("deployments"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Extensions().V1beta1().Deployments().Informer()}, nil
case extensions_v1beta1.SchemeGroupVersion.WithResource("ingresses"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Extensions().V1beta1().Ingresses().Informer()}, nil
case extensions_v1beta1.SchemeGroupVersion.WithResource("podsecuritypolicies"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Extensions().V1beta1().PodSecurityPolicies().Informer()}, nil
case extensions_v1beta1.SchemeGroupVersion.WithResource("replicasets"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Extensions().V1beta1().ReplicaSets().Informer()}, nil
// Group=networking.k8s.io, Version=v1
case networking_v1.SchemeGroupVersion.WithResource("networkpolicies"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Networking().V1().NetworkPolicies().Informer()}, nil
// Group=policy, Version=v1beta1
case policy_v1beta1.SchemeGroupVersion.WithResource("poddisruptionbudgets"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Policy().V1beta1().PodDisruptionBudgets().Informer()}, nil
case policy_v1beta1.SchemeGroupVersion.WithResource("podsecuritypolicies"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Policy().V1beta1().PodSecurityPolicies().Informer()}, nil
// Group=rbac.authorization.k8s.io, Version=v1
case rbac_v1.SchemeGroupVersion.WithResource("clusterroles"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Rbac().V1().ClusterRoles().Informer()}, nil
case rbac_v1.SchemeGroupVersion.WithResource("clusterrolebindings"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Rbac().V1().ClusterRoleBindings().Informer()}, nil
case rbac_v1.SchemeGroupVersion.WithResource("roles"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Rbac().V1().Roles().Informer()}, nil
case rbac_v1.SchemeGroupVersion.WithResource("rolebindings"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Rbac().V1().RoleBindings().Informer()}, nil
// Group=rbac.authorization.k8s.io, Version=v1alpha1
case rbac_v1alpha1.SchemeGroupVersion.WithResource("clusterroles"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Rbac().V1alpha1().ClusterRoles().Informer()}, nil
case rbac_v1alpha1.SchemeGroupVersion.WithResource("clusterrolebindings"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Rbac().V1alpha1().ClusterRoleBindings().Informer()}, nil
case rbac_v1alpha1.SchemeGroupVersion.WithResource("roles"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Rbac().V1alpha1().Roles().Informer()}, nil
case rbac_v1alpha1.SchemeGroupVersion.WithResource("rolebindings"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Rbac().V1alpha1().RoleBindings().Informer()}, nil
// Group=rbac.authorization.k8s.io, Version=v1beta1
case rbac_v1beta1.SchemeGroupVersion.WithResource("clusterroles"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Rbac().V1beta1().ClusterRoles().Informer()}, nil
case rbac_v1beta1.SchemeGroupVersion.WithResource("clusterrolebindings"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Rbac().V1beta1().ClusterRoleBindings().Informer()}, nil
case rbac_v1beta1.SchemeGroupVersion.WithResource("roles"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Rbac().V1beta1().Roles().Informer()}, nil
case rbac_v1beta1.SchemeGroupVersion.WithResource("rolebindings"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Rbac().V1beta1().RoleBindings().Informer()}, nil
// Group=scheduling.k8s.io, Version=v1alpha1
case scheduling_v1alpha1.SchemeGroupVersion.WithResource("priorityclasses"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Scheduling().V1alpha1().PriorityClasses().Informer()}, nil
// Group=scheduling.k8s.io, Version=v1beta1
case scheduling_v1beta1.SchemeGroupVersion.WithResource("priorityclasses"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Scheduling().V1beta1().PriorityClasses().Informer()}, nil
// Group=settings.k8s.io, Version=v1alpha1
case settings_v1alpha1.SchemeGroupVersion.WithResource("podpresets"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Settings().V1alpha1().PodPresets().Informer()}, nil
// Group=storage.k8s.io, Version=v1
case storage_v1.SchemeGroupVersion.WithResource("storageclasses"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Storage().V1().StorageClasses().Informer()}, nil
// Group=storage.k8s.io, Version=v1alpha1
case storage_v1alpha1.SchemeGroupVersion.WithResource("volumeattachments"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Storage().V1alpha1().VolumeAttachments().Informer()}, nil
// Group=storage.k8s.io, Version=v1beta1
case storage_v1beta1.SchemeGroupVersion.WithResource("storageclasses"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Storage().V1beta1().StorageClasses().Informer()}, nil
case storage_v1beta1.SchemeGroupVersion.WithResource("volumeattachments"):
return &genericInformer{resource: resource.GroupResource(), informer: f.Storage().V1beta1().VolumeAttachments().Informer()}, nil
}
return nil, fmt.Errorf("no informer found for %v", resource)
}
| {
"pile_set_name": "Github"
} |
//
// NSArray+Sort.h
// https://github.com/hackiftekhar/IQKeyboardManager
// Copyright (c) 2013-16 Iftekhar Qurashi.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
#import <Foundation/NSArray.h>
/**
UIView.subviews sorting category.
*/
@interface NSArray (IQ_NSArray_Sort)
///--------------
/// @name Sorting
///--------------
/**
Returns the array by sorting the UIView's by their tag property.
*/
@property (nonatomic, readonly, copy) NSArray * _Nonnull sortedArrayByTag;
/**
Returns the array by sorting the UIView's by their tag property.
*/
@property (nonatomic, readonly, copy) NSArray * _Nonnull sortedArrayByPosition;
@end
| {
"pile_set_name": "Github"
} |
/*
* Copyright 2020 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.config.validation;
import com.thoughtworks.go.service.ConfigRepository;
import org.junit.Test;
import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class ConfigRepositoryValidatorServiceTest {
@Test
public void shouldShutDownServerIfConfigRepositoryIsCorrupted() throws Exception {
ConfigRepository configRepository = mock(ConfigRepository.class);
when(configRepository.isRepositoryCorrupted()).thenReturn(true);
ConfigRepositoryValidatorService configRepositoryValidatorServiceSpy = spy(new ConfigRepositoryValidatorService(configRepository));
doNothing().when(configRepositoryValidatorServiceSpy).shutDownServer();
configRepositoryValidatorServiceSpy.afterPropertiesSet();
verify(configRepositoryValidatorServiceSpy).shutDownServer();
}
@Test
public void shouldNotShutDownServerIfConfigRepositoryIsSane() throws Exception {
ConfigRepository configRepository = mock(ConfigRepository.class);
when(configRepository.isRepositoryCorrupted()).thenReturn(false);
ConfigRepositoryValidatorService configRepositoryValidatorServiceSpy = spy(new ConfigRepositoryValidatorService(configRepository));
doNothing().when(configRepositoryValidatorServiceSpy).shutDownServer();
configRepositoryValidatorServiceSpy.afterPropertiesSet();
verify(configRepositoryValidatorServiceSpy, never()).shutDownServer();
}
}
| {
"pile_set_name": "Github"
} |
// +build windows
package mem
import (
"context"
"unsafe"
"github.com/shirou/gopsutil/internal/common"
"golang.org/x/sys/windows"
)
var (
procGlobalMemoryStatusEx = common.Modkernel32.NewProc("GlobalMemoryStatusEx")
procGetPerformanceInfo = common.ModPsapi.NewProc("GetPerformanceInfo")
)
type memoryStatusEx struct {
cbSize uint32
dwMemoryLoad uint32
ullTotalPhys uint64 // in bytes
ullAvailPhys uint64
ullTotalPageFile uint64
ullAvailPageFile uint64
ullTotalVirtual uint64
ullAvailVirtual uint64
ullAvailExtendedVirtual uint64
}
func VirtualMemory() (*VirtualMemoryStat, error) {
return VirtualMemoryWithContext(context.Background())
}
func VirtualMemoryWithContext(ctx context.Context) (*VirtualMemoryStat, error) {
var memInfo memoryStatusEx
memInfo.cbSize = uint32(unsafe.Sizeof(memInfo))
mem, _, _ := procGlobalMemoryStatusEx.Call(uintptr(unsafe.Pointer(&memInfo)))
if mem == 0 {
return nil, windows.GetLastError()
}
ret := &VirtualMemoryStat{
Total: memInfo.ullTotalPhys,
Available: memInfo.ullAvailPhys,
UsedPercent: float64(memInfo.dwMemoryLoad),
}
ret.Used = ret.Total - ret.Available
return ret, nil
}
type performanceInformation struct {
cb uint32
commitTotal uint64
commitLimit uint64
commitPeak uint64
physicalTotal uint64
physicalAvailable uint64
systemCache uint64
kernelTotal uint64
kernelPaged uint64
kernelNonpaged uint64
pageSize uint64
handleCount uint32
processCount uint32
threadCount uint32
}
func SwapMemory() (*SwapMemoryStat, error) {
return SwapMemoryWithContext(context.Background())
}
func SwapMemoryWithContext(ctx context.Context) (*SwapMemoryStat, error) {
var perfInfo performanceInformation
perfInfo.cb = uint32(unsafe.Sizeof(perfInfo))
mem, _, _ := procGetPerformanceInfo.Call(uintptr(unsafe.Pointer(&perfInfo)), uintptr(perfInfo.cb))
if mem == 0 {
return nil, windows.GetLastError()
}
tot := perfInfo.commitLimit * perfInfo.pageSize
used := perfInfo.commitTotal * perfInfo.pageSize
free := tot - used
var usedPercent float64
if tot == 0 {
usedPercent = 0
} else {
usedPercent = float64(used) / float64(tot)
}
ret := &SwapMemoryStat{
Total: tot,
Used: used,
Free: free,
UsedPercent: usedPercent,
}
return ret, nil
}
| {
"pile_set_name": "Github"
} |
// This file has been automatically generated by the Unreal Header Implementation tool
#include "FGMapManager.h"
AFGMapManager* AFGMapManager::Get(UWorld* world){ return nullptr; }
AFGMapManager* AFGMapManager::Get(UObject* worldContext){ return nullptr; }
AFGMapManager::AFGMapManager() : Super() {
this->PrimaryActorTick.TickGroup = TG_DuringPhysics; this->PrimaryActorTick.EndTickGroup = TG_PrePhysics; this->PrimaryActorTick.bTickEvenWhenPaused = false; this->PrimaryActorTick.bCanEverTick = true; this->PrimaryActorTick.bStartWithTickEnabled = true; this->PrimaryActorTick.bAllowTickOnDedicatedServer = true; this->PrimaryActorTick.TickInterval = 0.100000001490116;
this->bAlwaysRelevant = true;
this->bReplicates = true;
}
void AFGMapManager::GetLifetimeReplicatedProps(TArray< FLifetimeProperty >& OutLifetimeProps) const{ }
void AFGMapManager::BeginPlay(){ }
void AFGMapManager::Tick(float dt){ }
void AFGMapManager::PreSaveGame_Implementation(int32 saveVersion, int32 gameVersion){ }
void AFGMapManager::PostSaveGame_Implementation(int32 saveVersion, int32 gameVersion){ }
void AFGMapManager::PreLoadGame_Implementation(int32 saveVersion, int32 gameVersion){ }
void AFGMapManager::PostLoadGame_Implementation(int32 saveVersion, int32 gameVersion){ }
void AFGMapManager::GatherDependencies_Implementation(TArray< UObject* >& out_dependentObjects){ }
bool AFGMapManager::NeedTransform_Implementation(){ return bool(); }
bool AFGMapManager::ShouldSave_Implementation() const{ return bool(); }
void AFGMapManager::RequestFogOfWarData( AFGPlayerController* playerController){ }
void AFGMapManager::TransferFogOfWarData(){ }
void AFGMapManager::SyncFogOfWarChanges(const TArray<uint8>& fogOfWarRawData, int32 index){ }
void AFGMapManager::SetupFogOfWarTexture(){ }
void AFGMapManager::InitialFogOfWarRequest(){ }
void AFGMapManager::SetupRepresentationManager(){ }
void AFGMapManager::BindActorRepresentationManager( AFGActorRepresentationManager* representationManager){ }
void AFGMapManager::UpdateFogOfWar(UFGActorRepresentation* actor){ }
FVector2D AFGMapManager::GetMapPositionFromWorldLocation(FVector worldLocation){ return FVector2D(); }
float AFGMapManager::GetMapDistanceFromWorldDistance(float worldDistance){ return float(); }
void AFGMapManager::DrawCircle(FVector2D centerPoint, float radius, float gradientHeightModifier){ }
void AFGMapManager::OnActorRepresentationAdded( UFGActorRepresentation* actorRepresentation){ }
void AFGMapManager::OnActorRepresentationUpdated( UFGActorRepresentation* actorRepresentation){ }
void AFGMapManager::OnActorRepresentationRemoved( UFGActorRepresentation* actorRepresentation){ }
const int32 AFGMapManager::PIXEL_OFFSET = int32();
| {
"pile_set_name": "Github"
} |
/*
Copyright 2017 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package v1beta1
import (
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/types"
)
// ConversionStrategyType describes different conversion types.
type ConversionStrategyType string
const (
// KubeAPIApprovedAnnotation is an annotation that must be set to create a CRD for the k8s.io, *.k8s.io, kubernetes.io, or *.kubernetes.io namespaces.
// The value should be a link to a URL where the current spec was approved, so updates to the spec should also update the URL.
// If the API is unapproved, you may set the annotation to a string starting with `"unapproved"`. For instance, `"unapproved, temporarily squatting"` or `"unapproved, experimental-only"`. This is discouraged.
KubeAPIApprovedAnnotation = "api-approved.kubernetes.io"
// NoneConverter is a converter that only sets apiversion of the CR and leave everything else unchanged.
NoneConverter ConversionStrategyType = "None"
// WebhookConverter is a converter that calls to an external webhook to convert the CR.
WebhookConverter ConversionStrategyType = "Webhook"
)
// CustomResourceDefinitionSpec describes how a user wants their resource to appear
type CustomResourceDefinitionSpec struct {
// group is the API group of the defined custom resource.
// The custom resources are served under `/apis/<group>/...`.
// Must match the name of the CustomResourceDefinition (in the form `<names.plural>.<group>`).
Group string `json:"group" protobuf:"bytes,1,opt,name=group"`
// version is the API version of the defined custom resource.
// The custom resources are served under `/apis/<group>/<version>/...`.
// Must match the name of the first item in the `versions` list if `version` and `versions` are both specified.
// Optional if `versions` is specified.
// Deprecated: use `versions` instead.
// +optional
Version string `json:"version,omitempty" protobuf:"bytes,2,opt,name=version"`
// names specify the resource and kind names for the custom resource.
Names CustomResourceDefinitionNames `json:"names" protobuf:"bytes,3,opt,name=names"`
// scope indicates whether the defined custom resource is cluster- or namespace-scoped.
// Allowed values are `Cluster` and `Namespaced`. Default is `Namespaced`.
Scope ResourceScope `json:"scope" protobuf:"bytes,4,opt,name=scope,casttype=ResourceScope"`
// validation describes the schema used for validation and pruning of the custom resource.
// If present, this validation schema is used to validate all versions.
// Top-level and per-version schemas are mutually exclusive.
// +optional
Validation *CustomResourceValidation `json:"validation,omitempty" protobuf:"bytes,5,opt,name=validation"`
// subresources specify what subresources the defined custom resource has.
// If present, this field configures subresources for all versions.
// Top-level and per-version subresources are mutually exclusive.
// +optional
Subresources *CustomResourceSubresources `json:"subresources,omitempty" protobuf:"bytes,6,opt,name=subresources"`
// versions is the list of all API versions of the defined custom resource.
// Optional if `version` is specified.
// The name of the first item in the `versions` list must match the `version` field if `version` and `versions` are both specified.
// Version names are used to compute the order in which served versions are listed in API discovery.
// If the version string is "kube-like", it will sort above non "kube-like" version strings, which are ordered
// lexicographically. "Kube-like" versions start with a "v", then are followed by a number (the major version),
// then optionally the string "alpha" or "beta" and another number (the minor version). These are sorted first
// by GA > beta > alpha (where GA is a version with no suffix such as beta or alpha), and then by comparing
// major version, then minor version. An example sorted list of versions:
// v10, v2, v1, v11beta2, v10beta3, v3beta1, v12alpha1, v11alpha2, foo1, foo10.
// +optional
Versions []CustomResourceDefinitionVersion `json:"versions,omitempty" protobuf:"bytes,7,rep,name=versions"`
// additionalPrinterColumns specifies additional columns returned in Table output.
// See https://kubernetes.io/docs/reference/using-api/api-concepts/#receiving-resources-as-tables for details.
// If present, this field configures columns for all versions.
// Top-level and per-version columns are mutually exclusive.
// If no top-level or per-version columns are specified, a single column displaying the age of the custom resource is used.
// +optional
AdditionalPrinterColumns []CustomResourceColumnDefinition `json:"additionalPrinterColumns,omitempty" protobuf:"bytes,8,rep,name=additionalPrinterColumns"`
// conversion defines conversion settings for the CRD.
// +optional
Conversion *CustomResourceConversion `json:"conversion,omitempty" protobuf:"bytes,9,opt,name=conversion"`
// preserveUnknownFields indicates that object fields which are not specified
// in the OpenAPI schema should be preserved when persisting to storage.
// apiVersion, kind, metadata and known fields inside metadata are always preserved.
// If false, schemas must be defined for all versions.
// Defaults to true in v1beta for backwards compatibility.
// Deprecated: will be required to be false in v1. Preservation of unknown fields can be specified
// in the validation schema using the `x-kubernetes-preserve-unknown-fields: true` extension.
// See https://kubernetes.io/docs/tasks/access-kubernetes-api/custom-resources/custom-resource-definitions/#pruning-versus-preserving-unknown-fields for details.
// +optional
PreserveUnknownFields *bool `json:"preserveUnknownFields,omitempty" protobuf:"varint,10,opt,name=preserveUnknownFields"`
}
// CustomResourceConversion describes how to convert different versions of a CR.
type CustomResourceConversion struct {
// strategy specifies how custom resources are converted between versions. Allowed values are:
// - `None`: The converter only change the apiVersion and would not touch any other field in the custom resource.
// - `Webhook`: API Server will call to an external webhook to do the conversion. Additional information
// is needed for this option. This requires spec.preserveUnknownFields to be false, and spec.conversion.webhookClientConfig to be set.
Strategy ConversionStrategyType `json:"strategy" protobuf:"bytes,1,name=strategy"`
// webhookClientConfig is the instructions for how to call the webhook if strategy is `Webhook`.
// Required when `strategy` is set to `Webhook`.
// +optional
WebhookClientConfig *WebhookClientConfig `json:"webhookClientConfig,omitempty" protobuf:"bytes,2,name=webhookClientConfig"`
// conversionReviewVersions is an ordered list of preferred `ConversionReview`
// versions the Webhook expects. The API server will use the first version in
// the list which it supports. If none of the versions specified in this list
// are supported by API server, conversion will fail for the custom resource.
// If a persisted Webhook configuration specifies allowed versions and does not
// include any versions known to the API Server, calls to the webhook will fail.
// Defaults to `["v1beta1"]`.
// +optional
ConversionReviewVersions []string `json:"conversionReviewVersions,omitempty" protobuf:"bytes,3,rep,name=conversionReviewVersions"`
}
// WebhookClientConfig contains the information to make a TLS connection with the webhook.
type WebhookClientConfig struct {
// url gives the location of the webhook, in standard URL form
// (`scheme://host:port/path`). Exactly one of `url` or `service`
// must be specified.
//
// The `host` should not refer to a service running in the cluster; use
// the `service` field instead. The host might be resolved via external
// DNS in some apiservers (e.g., `kube-apiserver` cannot resolve
// in-cluster DNS as that would be a layering violation). `host` may
// also be an IP address.
//
// Please note that using `localhost` or `127.0.0.1` as a `host` is
// risky unless you take great care to run this webhook on all hosts
// which run an apiserver which might need to make calls to this
// webhook. Such installs are likely to be non-portable, i.e., not easy
// to turn up in a new cluster.
//
// The scheme must be "https"; the URL must begin with "https://".
//
// A path is optional, and if present may be any string permissible in
// a URL. You may use the path to pass an arbitrary string to the
// webhook, for example, a cluster identifier.
//
// Attempting to use a user or basic auth e.g. "user:password@" is not
// allowed. Fragments ("#...") and query parameters ("?...") are not
// allowed, either.
//
// +optional
URL *string `json:"url,omitempty" protobuf:"bytes,3,opt,name=url"`
// service is a reference to the service for this webhook. Either
// service or url must be specified.
//
// If the webhook is running within the cluster, then you should use `service`.
//
// +optional
Service *ServiceReference `json:"service,omitempty" protobuf:"bytes,1,opt,name=service"`
// caBundle is a PEM encoded CA bundle which will be used to validate the webhook's server certificate.
// If unspecified, system trust roots on the apiserver are used.
// +optional
CABundle []byte `json:"caBundle,omitempty" protobuf:"bytes,2,opt,name=caBundle"`
}
// ServiceReference holds a reference to Service.legacy.k8s.io
type ServiceReference struct {
// namespace is the namespace of the service.
// Required
Namespace string `json:"namespace" protobuf:"bytes,1,opt,name=namespace"`
// name is the name of the service.
// Required
Name string `json:"name" protobuf:"bytes,2,opt,name=name"`
// path is an optional URL path at which the webhook will be contacted.
// +optional
Path *string `json:"path,omitempty" protobuf:"bytes,3,opt,name=path"`
// port is an optional service port at which the webhook will be contacted.
// `port` should be a valid port number (1-65535, inclusive).
// Defaults to 443 for backward compatibility.
// +optional
Port *int32 `json:"port,omitempty" protobuf:"varint,4,opt,name=port"`
}
// CustomResourceDefinitionVersion describes a version for CRD.
type CustomResourceDefinitionVersion struct {
// name is the version name, e.g. “v1”, “v2beta1”, etc.
// The custom resources are served under this version at `/apis/<group>/<version>/...` if `served` is true.
Name string `json:"name" protobuf:"bytes,1,opt,name=name"`
// served is a flag enabling/disabling this version from being served via REST APIs
Served bool `json:"served" protobuf:"varint,2,opt,name=served"`
// storage indicates this version should be used when persisting custom resources to storage.
// There must be exactly one version with storage=true.
Storage bool `json:"storage" protobuf:"varint,3,opt,name=storage"`
// schema describes the schema used for validation and pruning of this version of the custom resource.
// Top-level and per-version schemas are mutually exclusive.
// Per-version schemas must not all be set to identical values (top-level validation schema should be used instead).
// +optional
Schema *CustomResourceValidation `json:"schema,omitempty" protobuf:"bytes,4,opt,name=schema"`
// subresources specify what subresources this version of the defined custom resource have.
// Top-level and per-version subresources are mutually exclusive.
// Per-version subresources must not all be set to identical values (top-level subresources should be used instead).
// +optional
Subresources *CustomResourceSubresources `json:"subresources,omitempty" protobuf:"bytes,5,opt,name=subresources"`
// additionalPrinterColumns specifies additional columns returned in Table output.
// See https://kubernetes.io/docs/reference/using-api/api-concepts/#receiving-resources-as-tables for details.
// Top-level and per-version columns are mutually exclusive.
// Per-version columns must not all be set to identical values (top-level columns should be used instead).
// If no top-level or per-version columns are specified, a single column displaying the age of the custom resource is used.
// +optional
AdditionalPrinterColumns []CustomResourceColumnDefinition `json:"additionalPrinterColumns,omitempty" protobuf:"bytes,6,rep,name=additionalPrinterColumns"`
}
// CustomResourceColumnDefinition specifies a column for server side printing.
type CustomResourceColumnDefinition struct {
// name is a human readable name for the column.
Name string `json:"name" protobuf:"bytes,1,opt,name=name"`
// type is an OpenAPI type definition for this column.
// See https://github.com/OAI/OpenAPI-Specification/blob/master/versions/2.0.md#data-types for details.
Type string `json:"type" protobuf:"bytes,2,opt,name=type"`
// format is an optional OpenAPI type definition for this column. The 'name' format is applied
// to the primary identifier column to assist in clients identifying column is the resource name.
// See https://github.com/OAI/OpenAPI-Specification/blob/master/versions/2.0.md#data-types for details.
// +optional
Format string `json:"format,omitempty" protobuf:"bytes,3,opt,name=format"`
// description is a human readable description of this column.
// +optional
Description string `json:"description,omitempty" protobuf:"bytes,4,opt,name=description"`
// priority is an integer defining the relative importance of this column compared to others. Lower
// numbers are considered higher priority. Columns that may be omitted in limited space scenarios
// should be given a priority greater than 0.
// +optional
Priority int32 `json:"priority,omitempty" protobuf:"bytes,5,opt,name=priority"`
// JSONPath is a simple JSON path (i.e. with array notation) which is evaluated against
// each custom resource to produce the value for this column.
JSONPath string `json:"JSONPath" protobuf:"bytes,6,opt,name=JSONPath"`
}
// CustomResourceDefinitionNames indicates the names to serve this CustomResourceDefinition
type CustomResourceDefinitionNames struct {
// plural is the plural name of the resource to serve.
// The custom resources are served under `/apis/<group>/<version>/.../<plural>`.
// Must match the name of the CustomResourceDefinition (in the form `<names.plural>.<group>`).
// Must be all lowercase.
Plural string `json:"plural" protobuf:"bytes,1,opt,name=plural"`
// singular is the singular name of the resource. It must be all lowercase. Defaults to lowercased `kind`.
// +optional
Singular string `json:"singular,omitempty" protobuf:"bytes,2,opt,name=singular"`
// shortNames are short names for the resource, exposed in API discovery documents,
// and used by clients to support invocations like `kubectl get <shortname>`.
// It must be all lowercase.
// +optional
ShortNames []string `json:"shortNames,omitempty" protobuf:"bytes,3,opt,name=shortNames"`
// kind is the serialized kind of the resource. It is normally CamelCase and singular.
// Custom resource instances will use this value as the `kind` attribute in API calls.
Kind string `json:"kind" protobuf:"bytes,4,opt,name=kind"`
// listKind is the serialized kind of the list for this resource. Defaults to "`kind`List".
// +optional
ListKind string `json:"listKind,omitempty" protobuf:"bytes,5,opt,name=listKind"`
// categories is a list of grouped resources this custom resource belongs to (e.g. 'all').
// This is published in API discovery documents, and used by clients to support invocations like
// `kubectl get all`.
// +optional
Categories []string `json:"categories,omitempty" protobuf:"bytes,6,rep,name=categories"`
}
// ResourceScope is an enum defining the different scopes available to a custom resource
type ResourceScope string
const (
ClusterScoped ResourceScope = "Cluster"
NamespaceScoped ResourceScope = "Namespaced"
)
type ConditionStatus string
// These are valid condition statuses. "ConditionTrue" means a resource is in the condition.
// "ConditionFalse" means a resource is not in the condition. "ConditionUnknown" means kubernetes
// can't decide if a resource is in the condition or not. In the future, we could add other
// intermediate conditions, e.g. ConditionDegraded.
const (
ConditionTrue ConditionStatus = "True"
ConditionFalse ConditionStatus = "False"
ConditionUnknown ConditionStatus = "Unknown"
)
// CustomResourceDefinitionConditionType is a valid value for CustomResourceDefinitionCondition.Type
type CustomResourceDefinitionConditionType string
const (
// Established means that the resource has become active. A resource is established when all names are
// accepted without a conflict for the first time. A resource stays established until deleted, even during
// a later NamesAccepted due to changed names. Note that not all names can be changed.
Established CustomResourceDefinitionConditionType = "Established"
// NamesAccepted means the names chosen for this CustomResourceDefinition do not conflict with others in
// the group and are therefore accepted.
NamesAccepted CustomResourceDefinitionConditionType = "NamesAccepted"
// NonStructuralSchema means that one or more OpenAPI schema is not structural.
//
// A schema is structural if it specifies types for all values, with the only exceptions of those with
// - x-kubernetes-int-or-string: true — for fields which can be integer or string
// - x-kubernetes-preserve-unknown-fields: true — for raw, unspecified JSON values
// and there is no type, additionalProperties, default, nullable or x-kubernetes-* vendor extenions
// specified under allOf, anyOf, oneOf or not.
//
// Non-structural schemas will not be allowed anymore in v1 API groups. Moreover, new features will not be
// available for non-structural CRDs:
// - pruning
// - defaulting
// - read-only
// - OpenAPI publishing
// - webhook conversion
NonStructuralSchema CustomResourceDefinitionConditionType = "NonStructuralSchema"
// Terminating means that the CustomResourceDefinition has been deleted and is cleaning up.
Terminating CustomResourceDefinitionConditionType = "Terminating"
// KubernetesAPIApprovalPolicyConformant indicates that an API in *.k8s.io or *.kubernetes.io is or is not approved. For CRDs
// outside those groups, this condition will not be set. For CRDs inside those groups, the condition will
// be true if .metadata.annotations["api-approved.kubernetes.io"] is set to a URL, otherwise it will be false.
// See https://github.com/kubernetes/enhancements/pull/1111 for more details.
KubernetesAPIApprovalPolicyConformant CustomResourceDefinitionConditionType = "KubernetesAPIApprovalPolicyConformant"
)
// CustomResourceDefinitionCondition contains details for the current condition of this pod.
type CustomResourceDefinitionCondition struct {
// type is the type of the condition. Types include Established, NamesAccepted and Terminating.
Type CustomResourceDefinitionConditionType `json:"type" protobuf:"bytes,1,opt,name=type,casttype=CustomResourceDefinitionConditionType"`
// status is the status of the condition.
// Can be True, False, Unknown.
Status ConditionStatus `json:"status" protobuf:"bytes,2,opt,name=status,casttype=ConditionStatus"`
// lastTransitionTime last time the condition transitioned from one status to another.
// +optional
LastTransitionTime metav1.Time `json:"lastTransitionTime,omitempty" protobuf:"bytes,3,opt,name=lastTransitionTime"`
// reason is a unique, one-word, CamelCase reason for the condition's last transition.
// +optional
Reason string `json:"reason,omitempty" protobuf:"bytes,4,opt,name=reason"`
// message is a human-readable message indicating details about last transition.
// +optional
Message string `json:"message,omitempty" protobuf:"bytes,5,opt,name=message"`
}
// CustomResourceDefinitionStatus indicates the state of the CustomResourceDefinition
type CustomResourceDefinitionStatus struct {
// conditions indicate state for particular aspects of a CustomResourceDefinition
// +optional
Conditions []CustomResourceDefinitionCondition `json:"conditions" protobuf:"bytes,1,opt,name=conditions"`
// acceptedNames are the names that are actually being used to serve discovery.
// They may be different than the names in spec.
AcceptedNames CustomResourceDefinitionNames `json:"acceptedNames" protobuf:"bytes,2,opt,name=acceptedNames"`
// storedVersions lists all versions of CustomResources that were ever persisted. Tracking these
// versions allows a migration path for stored versions in etcd. The field is mutable
// so a migration controller can finish a migration to another version (ensuring
// no old objects are left in storage), and then remove the rest of the
// versions from this list.
// Versions may not be removed from `spec.versions` while they exist in this list.
StoredVersions []string `json:"storedVersions" protobuf:"bytes,3,rep,name=storedVersions"`
}
// CustomResourceCleanupFinalizer is the name of the finalizer which will delete instances of
// a CustomResourceDefinition
const CustomResourceCleanupFinalizer = "customresourcecleanup.apiextensions.k8s.io"
// +genclient
// +genclient:nonNamespaced
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
// CustomResourceDefinition represents a resource that should be exposed on the API server. Its name MUST be in the format
// <.spec.name>.<.spec.group>.
// Deprecated in v1.16, planned for removal in v1.19. Use apiextensions.k8s.io/v1 CustomResourceDefinition instead.
type CustomResourceDefinition struct {
metav1.TypeMeta `json:",inline"`
metav1.ObjectMeta `json:"metadata,omitempty" protobuf:"bytes,1,opt,name=metadata"`
// spec describes how the user wants the resources to appear
Spec CustomResourceDefinitionSpec `json:"spec" protobuf:"bytes,2,opt,name=spec"`
// status indicates the actual state of the CustomResourceDefinition
// +optional
Status CustomResourceDefinitionStatus `json:"status,omitempty" protobuf:"bytes,3,opt,name=status"`
}
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
// CustomResourceDefinitionList is a list of CustomResourceDefinition objects.
type CustomResourceDefinitionList struct {
metav1.TypeMeta `json:",inline"`
metav1.ListMeta `json:"metadata,omitempty" protobuf:"bytes,1,opt,name=metadata"`
// items list individual CustomResourceDefinition objects
Items []CustomResourceDefinition `json:"items" protobuf:"bytes,2,rep,name=items"`
}
// CustomResourceValidation is a list of validation methods for CustomResources.
type CustomResourceValidation struct {
// openAPIV3Schema is the OpenAPI v3 schema to use for validation and pruning.
// +optional
OpenAPIV3Schema *JSONSchemaProps `json:"openAPIV3Schema,omitempty" protobuf:"bytes,1,opt,name=openAPIV3Schema"`
}
// CustomResourceSubresources defines the status and scale subresources for CustomResources.
type CustomResourceSubresources struct {
// status indicates the custom resource should serve a `/status` subresource.
// When enabled:
// 1. requests to the custom resource primary endpoint ignore changes to the `status` stanza of the object.
// 2. requests to the custom resource `/status` subresource ignore changes to anything other than the `status` stanza of the object.
// +optional
Status *CustomResourceSubresourceStatus `json:"status,omitempty" protobuf:"bytes,1,opt,name=status"`
// scale indicates the custom resource should serve a `/scale` subresource that returns an `autoscaling/v1` Scale object.
// +optional
Scale *CustomResourceSubresourceScale `json:"scale,omitempty" protobuf:"bytes,2,opt,name=scale"`
}
// CustomResourceSubresourceStatus defines how to serve the status subresource for CustomResources.
// Status is represented by the `.status` JSON path inside of a CustomResource. When set,
// * exposes a /status subresource for the custom resource
// * PUT requests to the /status subresource take a custom resource object, and ignore changes to anything except the status stanza
// * PUT/POST/PATCH requests to the custom resource ignore changes to the status stanza
type CustomResourceSubresourceStatus struct{}
// CustomResourceSubresourceScale defines how to serve the scale subresource for CustomResources.
type CustomResourceSubresourceScale struct {
// specReplicasPath defines the JSON path inside of a custom resource that corresponds to Scale `spec.replicas`.
// Only JSON paths without the array notation are allowed.
// Must be a JSON Path under `.spec`.
// If there is no value under the given path in the custom resource, the `/scale` subresource will return an error on GET.
SpecReplicasPath string `json:"specReplicasPath" protobuf:"bytes,1,name=specReplicasPath"`
// statusReplicasPath defines the JSON path inside of a custom resource that corresponds to Scale `status.replicas`.
// Only JSON paths without the array notation are allowed.
// Must be a JSON Path under `.status`.
// If there is no value under the given path in the custom resource, the `status.replicas` value in the `/scale` subresource
// will default to 0.
StatusReplicasPath string `json:"statusReplicasPath" protobuf:"bytes,2,opt,name=statusReplicasPath"`
// labelSelectorPath defines the JSON path inside of a custom resource that corresponds to Scale `status.selector`.
// Only JSON paths without the array notation are allowed.
// Must be a JSON Path under `.status` or `.spec`.
// Must be set to work with HorizontalPodAutoscaler.
// The field pointed by this JSON path must be a string field (not a complex selector struct)
// which contains a serialized label selector in string form.
// More info: https://kubernetes.io/docs/tasks/access-kubernetes-api/custom-resources/custom-resource-definitions#scale-subresource
// If there is no value under the given path in the custom resource, the `status.selector` value in the `/scale`
// subresource will default to the empty string.
// +optional
LabelSelectorPath *string `json:"labelSelectorPath,omitempty" protobuf:"bytes,3,opt,name=labelSelectorPath"`
}
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
// ConversionReview describes a conversion request/response.
type ConversionReview struct {
metav1.TypeMeta `json:",inline"`
// request describes the attributes for the conversion request.
// +optional
Request *ConversionRequest `json:"request,omitempty" protobuf:"bytes,1,opt,name=request"`
// response describes the attributes for the conversion response.
// +optional
Response *ConversionResponse `json:"response,omitempty" protobuf:"bytes,2,opt,name=response"`
}
// ConversionRequest describes the conversion request parameters.
type ConversionRequest struct {
// uid is an identifier for the individual request/response. It allows distinguishing instances of requests which are
// otherwise identical (parallel requests, etc).
// The UID is meant to track the round trip (request/response) between the Kubernetes API server and the webhook, not the user request.
// It is suitable for correlating log entries between the webhook and apiserver, for either auditing or debugging.
UID types.UID `json:"uid" protobuf:"bytes,1,name=uid"`
// desiredAPIVersion is the version to convert given objects to. e.g. "myapi.example.com/v1"
DesiredAPIVersion string `json:"desiredAPIVersion" protobuf:"bytes,2,name=desiredAPIVersion"`
// objects is the list of custom resource objects to be converted.
Objects []runtime.RawExtension `json:"objects" protobuf:"bytes,3,rep,name=objects"`
}
// ConversionResponse describes a conversion response.
type ConversionResponse struct {
// uid is an identifier for the individual request/response.
// This should be copied over from the corresponding `request.uid`.
UID types.UID `json:"uid" protobuf:"bytes,1,name=uid"`
// convertedObjects is the list of converted version of `request.objects` if the `result` is successful, otherwise empty.
// The webhook is expected to set `apiVersion` of these objects to the `request.desiredAPIVersion`. The list
// must also have the same size as the input list with the same objects in the same order (equal kind, metadata.uid, metadata.name and metadata.namespace).
// The webhook is allowed to mutate labels and annotations. Any other change to the metadata is silently ignored.
ConvertedObjects []runtime.RawExtension `json:"convertedObjects" protobuf:"bytes,2,rep,name=convertedObjects"`
// result contains the result of conversion with extra details if the conversion failed. `result.status` determines if
// the conversion failed or succeeded. The `result.status` field is required and represents the success or failure of the
// conversion. A successful conversion must set `result.status` to `Success`. A failed conversion must set
// `result.status` to `Failure` and provide more details in `result.message` and return http status 200. The `result.message`
// will be used to construct an error message for the end user.
Result metav1.Status `json:"result" protobuf:"bytes,3,name=result"`
}
| {
"pile_set_name": "Github"
} |
/*
*
* Copyright 2015 gRPC authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
#ifndef GRPCPP_SUPPORT_CLIENT_INTERCEPTOR_H
#define GRPCPP_SUPPORT_CLIENT_INTERCEPTOR_H
#include <grpcpp/impl/codegen/client_interceptor.h>
#endif // GRPCPP_SUPPORT_CLIENT_INTERCEPTOR_H
| {
"pile_set_name": "Github"
} |
/*
* Copyright (C) 2006 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.elasticsearch.common.inject.name;
import org.elasticsearch.common.inject.BindingAnnotation;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
/**
* Annotates named things.
*
* @author [email protected] (Bob Lee)
*/
@Retention(RUNTIME)
@Target({ElementType.FIELD, ElementType.PARAMETER, ElementType.METHOD})
@BindingAnnotation
public @interface Named {
String value();
}
| {
"pile_set_name": "Github"
} |
<math xmlns="http://www.w3.org/1998/Math/MathML" display="block">
<mrow>
<mtable>
<mtr>
<mtd>
<mo>∏</mo>
<mfenced>
<mrow>
<mi>n</mi>
<mo>,</mo>
<mi>φ</mi>
<mo>,</mo>
<mn>0</mn>
</mrow>
</mfenced>
</mtd>
<mtd>
<mo>=</mo>
</mtd>
<mtd>
<msqrt>
<mrow>
<mfrac>
<mn>1</mn>
<mrow>
<mn>1</mn>
<mo>−</mo>
<mi>n</mi>
</mrow>
</mfrac>
</mrow>
</msqrt>
<msup>
<mi>tan</mi>
<mrow>
<mo>−</mo>
<mn>1</mn>
</mrow>
</msup>
<mfenced>
<mrow>
<msqrt>
<mrow>
<mn>1</mn>
<mo>−</mo>
<mi>n</mi>
</mrow>
</msqrt>
<mi>tan</mi>
<mi>φ</mi>
</mrow>
</mfenced>
</mtd>
<mtd>
<mo>;</mo>
<mi>n</mi>
<mo><</mo>
<mn>1</mn>
</mtd>
</mtr>
<mtr>
<mtd>
<mspace width="1em"/>
</mtd>
<mtd>
<mo>=</mo>
</mtd>
<mtd>
<msqrt>
<mrow>
<mfrac>
<mn>1</mn>
<mrow>
<mi>n</mi>
<mo>−</mo>
<mn>1</mn>
</mrow>
</mfrac>
</mrow>
</msqrt>
<msup>
<mi>tanh</mi>
<mrow>
<mo>−</mo>
<mn>1</mn>
</mrow>
</msup>
<mfenced>
<mrow>
<msqrt>
<mrow>
<mi>n</mi>
<mo>−</mo>
<mn>1</mn>
</mrow>
</msqrt>
<mi>tan</mi>
<mi>φ</mi>
</mrow>
</mfenced>
</mtd>
<mtd>
<mo>;</mo>
<mi>n</mi>
<mo>></mo>
<mn>1</mn>
</mtd>
</mtr>
<mtr>
<mtd>
<mspace width="1em"/>
</mtd>
<mtd>
<mo>=</mo>
</mtd>
<mtd>
<mi>tan</mi>
<mi>φ</mi>
</mtd>
<mtd>
<mo>;</mo>
<mi>n</mi>
<mo>=</mo>
<mn>1</mn>
</mtd>
</mtr>
</mtable>
</mrow>
</math>
| {
"pile_set_name": "Github"
} |
try:
from OpenGL import NullFunctionError
except ImportError:
from OpenGL.error import NullFunctionError
from OpenGL.GL import *
from OpenGL.GL import shaders
import re
## For centralizing and managing vertex/fragment shader programs.
def initShaders():
global Shaders
Shaders = [
ShaderProgram(None, []),
## increases fragment alpha as the normal turns orthogonal to the view
## this is useful for viewing shells that enclose a volume (such as isosurfaces)
ShaderProgram('balloon', [
VertexShader("""
varying vec3 normal;
void main() {
// compute here for use in fragment shader
normal = normalize(gl_NormalMatrix * gl_Normal);
gl_FrontColor = gl_Color;
gl_BackColor = gl_Color;
gl_Position = ftransform();
}
"""),
FragmentShader("""
varying vec3 normal;
void main() {
vec4 color = gl_Color;
color.w = min(color.w + 2.0 * color.w * pow(normal.x*normal.x + normal.y*normal.y, 5.0), 1.0);
gl_FragColor = color;
}
""")
]),
## colors fragments based on face normals relative to view
## This means that the colors will change depending on how the view is rotated
ShaderProgram('viewNormalColor', [
VertexShader("""
varying vec3 normal;
void main() {
// compute here for use in fragment shader
normal = normalize(gl_NormalMatrix * gl_Normal);
gl_FrontColor = gl_Color;
gl_BackColor = gl_Color;
gl_Position = ftransform();
}
"""),
FragmentShader("""
varying vec3 normal;
void main() {
vec4 color = gl_Color;
color.x = (normal.x + 1.0) * 0.5;
color.y = (normal.y + 1.0) * 0.5;
color.z = (normal.z + 1.0) * 0.5;
gl_FragColor = color;
}
""")
]),
## colors fragments based on absolute face normals.
ShaderProgram('normalColor', [
VertexShader("""
varying vec3 normal;
void main() {
// compute here for use in fragment shader
normal = normalize(gl_Normal);
gl_FrontColor = gl_Color;
gl_BackColor = gl_Color;
gl_Position = ftransform();
}
"""),
FragmentShader("""
varying vec3 normal;
void main() {
vec4 color = gl_Color;
color.x = (normal.x + 1.0) * 0.5;
color.y = (normal.y + 1.0) * 0.5;
color.z = (normal.z + 1.0) * 0.5;
gl_FragColor = color;
}
""")
]),
## very simple simulation of lighting.
## The light source position is always relative to the camera.
ShaderProgram('shaded', [
VertexShader("""
varying vec3 normal;
void main() {
// compute here for use in fragment shader
normal = normalize(gl_NormalMatrix * gl_Normal);
gl_FrontColor = gl_Color;
gl_BackColor = gl_Color;
gl_Position = ftransform();
}
"""),
FragmentShader("""
varying vec3 normal;
void main() {
float p = dot(normal, normalize(vec3(1.0, -1.0, -1.0)));
p = p < 0. ? 0. : p * 0.8;
vec4 color = gl_Color;
color.x = color.x * (0.2 + p);
color.y = color.y * (0.2 + p);
color.z = color.z * (0.2 + p);
gl_FragColor = color;
}
""")
]),
## colors get brighter near edges of object
ShaderProgram('edgeHilight', [
VertexShader("""
varying vec3 normal;
void main() {
// compute here for use in fragment shader
normal = normalize(gl_NormalMatrix * gl_Normal);
gl_FrontColor = gl_Color;
gl_BackColor = gl_Color;
gl_Position = ftransform();
}
"""),
FragmentShader("""
varying vec3 normal;
void main() {
vec4 color = gl_Color;
float s = pow(normal.x*normal.x + normal.y*normal.y, 2.0);
color.x = color.x + s * (1.0-color.x);
color.y = color.y + s * (1.0-color.y);
color.z = color.z + s * (1.0-color.z);
gl_FragColor = color;
}
""")
]),
## colors fragments by z-value.
## This is useful for coloring surface plots by height.
## This shader uses a uniform called "colorMap" to determine how to map the colors:
## red = pow(colorMap[0]*(z + colorMap[1]), colorMap[2])
## green = pow(colorMap[3]*(z + colorMap[4]), colorMap[5])
## blue = pow(colorMap[6]*(z + colorMap[7]), colorMap[8])
## (set the values like this: shader['uniformMap'] = array([...])
ShaderProgram('heightColor', [
VertexShader("""
varying vec4 pos;
void main() {
gl_FrontColor = gl_Color;
gl_BackColor = gl_Color;
pos = gl_Vertex;
gl_Position = ftransform();
}
"""),
FragmentShader("""
uniform float colorMap[9];
varying vec4 pos;
//out vec4 gl_FragColor; // only needed for later glsl versions
//in vec4 gl_Color;
void main() {
vec4 color = gl_Color;
color.x = colorMap[0] * (pos.z + colorMap[1]);
if (colorMap[2] != 1.0)
color.x = pow(color.x, colorMap[2]);
color.x = color.x < 0. ? 0. : (color.x > 1. ? 1. : color.x);
color.y = colorMap[3] * (pos.z + colorMap[4]);
if (colorMap[5] != 1.0)
color.y = pow(color.y, colorMap[5]);
color.y = color.y < 0. ? 0. : (color.y > 1. ? 1. : color.y);
color.z = colorMap[6] * (pos.z + colorMap[7]);
if (colorMap[8] != 1.0)
color.z = pow(color.z, colorMap[8]);
color.z = color.z < 0. ? 0. : (color.z > 1. ? 1. : color.z);
color.w = 1.0;
gl_FragColor = color;
}
"""),
], uniforms={'colorMap': [1, 1, 1, 1, 0.5, 1, 1, 0, 1]}),
ShaderProgram('pointSprite', [ ## allows specifying point size using normal.x
## See:
##
## http://stackoverflow.com/questions/9609423/applying-part-of-a-texture-sprite-sheet-texture-map-to-a-point-sprite-in-ios
## http://stackoverflow.com/questions/3497068/textured-points-in-opengl-es-2-0
##
##
VertexShader("""
void main() {
gl_FrontColor=gl_Color;
gl_PointSize = gl_Normal.x;
gl_Position = ftransform();
}
"""),
#FragmentShader("""
##version 120
#uniform sampler2D texture;
#void main ( )
#{
#gl_FragColor = texture2D(texture, gl_PointCoord) * gl_Color;
#}
#""")
]),
]
CompiledShaderPrograms = {}
def getShaderProgram(name):
return ShaderProgram.names[name]
class Shader(object):
def __init__(self, shaderType, code):
self.shaderType = shaderType
self.code = code
self.compiled = None
def shader(self):
if self.compiled is None:
try:
self.compiled = shaders.compileShader(self.code, self.shaderType)
except NullFunctionError:
raise Exception("This OpenGL implementation does not support shader programs; many OpenGL features in pyqtgraph will not work.")
except RuntimeError as exc:
## Format compile errors a bit more nicely
if len(exc.args) == 3:
err, code, typ = exc.args
if not err.startswith('Shader compile failure'):
raise
code = code[0].decode('utf_8').split('\n')
err, c, msgs = err.partition(':')
err = err + '\n'
msgs = re.sub('b\'','',msgs)
msgs = re.sub('\'$','',msgs)
msgs = re.sub('\\\\n','\n',msgs)
msgs = msgs.split('\n')
errNums = [()] * len(code)
for i, msg in enumerate(msgs):
msg = msg.strip()
if msg == '':
continue
m = re.match(r'(\d+\:)?\d+\((\d+)\)', msg)
if m is not None:
line = int(m.groups()[1])
errNums[line-1] = errNums[line-1] + (str(i+1),)
#code[line-1] = '%d\t%s' % (i+1, code[line-1])
err = err + "%d %s\n" % (i+1, msg)
errNums = [','.join(n) for n in errNums]
maxlen = max(map(len, errNums))
code = [errNums[i] + " "*(maxlen-len(errNums[i])) + line for i, line in enumerate(code)]
err = err + '\n'.join(code)
raise Exception(err)
else:
raise
return self.compiled
class VertexShader(Shader):
def __init__(self, code):
Shader.__init__(self, GL_VERTEX_SHADER, code)
class FragmentShader(Shader):
def __init__(self, code):
Shader.__init__(self, GL_FRAGMENT_SHADER, code)
class ShaderProgram(object):
names = {}
def __init__(self, name, shaders, uniforms=None):
self.name = name
ShaderProgram.names[name] = self
self.shaders = shaders
self.prog = None
self.blockData = {}
self.uniformData = {}
## parse extra options from the shader definition
if uniforms is not None:
for k,v in uniforms.items():
self[k] = v
def setBlockData(self, blockName, data):
if data is None:
del self.blockData[blockName]
else:
self.blockData[blockName] = data
def setUniformData(self, uniformName, data):
if data is None:
del self.uniformData[uniformName]
else:
self.uniformData[uniformName] = data
def __setitem__(self, item, val):
self.setUniformData(item, val)
def __delitem__(self, item):
self.setUniformData(item, None)
def program(self):
if self.prog is None:
try:
compiled = [s.shader() for s in self.shaders] ## compile all shaders
self.prog = shaders.compileProgram(*compiled) ## compile program
except:
self.prog = -1
raise
return self.prog
def __enter__(self):
if len(self.shaders) > 0 and self.program() != -1:
glUseProgram(self.program())
try:
## load uniform values into program
for uniformName, data in self.uniformData.items():
loc = self.uniform(uniformName)
if loc == -1:
raise Exception('Could not find uniform variable "%s"' % uniformName)
glUniform1fv(loc, len(data), data)
### bind buffer data to program blocks
#if len(self.blockData) > 0:
#bindPoint = 1
#for blockName, data in self.blockData.items():
### Program should have a uniform block declared:
###
### layout (std140) uniform blockName {
### vec4 diffuse;
### };
### pick any-old binding point. (there are a limited number of these per-program
#bindPoint = 1
### get the block index for a uniform variable in the shader
#blockIndex = glGetUniformBlockIndex(self.program(), blockName)
### give the shader block a binding point
#glUniformBlockBinding(self.program(), blockIndex, bindPoint)
### create a buffer
#buf = glGenBuffers(1)
#glBindBuffer(GL_UNIFORM_BUFFER, buf)
#glBufferData(GL_UNIFORM_BUFFER, size, data, GL_DYNAMIC_DRAW)
### also possible to use glBufferSubData to fill parts of the buffer
### bind buffer to the same binding point
#glBindBufferBase(GL_UNIFORM_BUFFER, bindPoint, buf)
except:
glUseProgram(0)
raise
def __exit__(self, *args):
if len(self.shaders) > 0:
glUseProgram(0)
def uniform(self, name):
"""Return the location integer for a uniform variable in this program"""
return glGetUniformLocation(self.program(), name.encode('utf_8'))
#def uniformBlockInfo(self, blockName):
#blockIndex = glGetUniformBlockIndex(self.program(), blockName)
#count = glGetActiveUniformBlockiv(self.program(), blockIndex, GL_UNIFORM_BLOCK_ACTIVE_UNIFORMS)
#indices = []
#for i in range(count):
#indices.append(glGetActiveUniformBlockiv(self.program(), blockIndex, GL_UNIFORM_BLOCK_ACTIVE_UNIFORM_INDICES))
class HeightColorShader(ShaderProgram):
def __enter__(self):
## Program should have a uniform block declared:
##
## layout (std140) uniform blockName {
## vec4 diffuse;
## vec4 ambient;
## };
## pick any-old binding point. (there are a limited number of these per-program
bindPoint = 1
## get the block index for a uniform variable in the shader
blockIndex = glGetUniformBlockIndex(self.program(), "blockName")
## give the shader block a binding point
glUniformBlockBinding(self.program(), blockIndex, bindPoint)
## create a buffer
buf = glGenBuffers(1)
glBindBuffer(GL_UNIFORM_BUFFER, buf)
glBufferData(GL_UNIFORM_BUFFER, size, data, GL_DYNAMIC_DRAW)
## also possible to use glBufferSubData to fill parts of the buffer
## bind buffer to the same binding point
glBindBufferBase(GL_UNIFORM_BUFFER, bindPoint, buf)
initShaders()
| {
"pile_set_name": "Github"
} |
declare namespace abp {
//TODO: Gets JQuery.AjaxOptions and returns JQuery.Promise
function ajax(userOptions: any): any;
} | {
"pile_set_name": "Github"
} |
<!DOCTYPE html>
<html lang="en">
<head>
<title>ConnectedNodeDescription Protocol Reference</title>
<link rel="stylesheet" type="text/css" href="../css/jazzy.css" />
<link rel="stylesheet" type="text/css" href="../css/highlight.css" />
<meta charset='utf-8'>
<script src="../js/jquery.min.js" defer></script>
<script src="../js/jazzy.js" defer></script>
</head>
<body>
<a name="//apple_ref/swift/Protocol/ConnectedNodeDescription" class="dashAnchor"></a>
<a title="ConnectedNodeDescription Protocol Reference"></a>
<header>
<div class="content-wrapper">
<p><a href="../index.html">Katana Docs</a> (100% documented)</p>
<p class="header-right"><a href="https://github.com/BendingSpoons/katana-swift"><img src="../img/gh.png"/>View on GitHub</a></p>
</div>
</header>
<div class="content-wrapper">
<p id="breadcrumbs">
<a href="../index.html">Katana Reference</a>
<img id="carat" src="../img/carat.png" />
ConnectedNodeDescription Protocol Reference
</p>
</div>
<div class="content-wrapper">
<nav class="sidebar">
<ul class="nav-groups">
<li class="nav-group-name">
<a href="../Classes.html">Classes</a>
<ul class="nav-group-tasks">
<li class="nav-group-task">
<a href="../Classes/Node.html">Node</a>
</li>
<li class="nav-group-task">
<a href="../Classes/PlasticNode.html">PlasticNode</a>
</li>
<li class="nav-group-task">
<a href="../Classes/PlasticView.html">PlasticView</a>
</li>
<li class="nav-group-task">
<a href="../Classes/Renderer.html">Renderer</a>
</li>
<li class="nav-group-task">
<a href="../Classes/Store.html">Store</a>
</li>
<li class="nav-group-task">
<a href="../Classes/ViewsContainer.html">ViewsContainer</a>
</li>
</ul>
</li>
<li class="nav-group-name">
<a href="../Enums.html">Enums</a>
<ul class="nav-group-tasks">
<li class="nav-group-task">
<a href="../Enums/AnimationType.html">AnimationType</a>
</li>
<li class="nav-group-task">
<a href="../Enums/AsyncActionState.html">AsyncActionState</a>
</li>
<li class="nav-group-task">
<a href="../Enums.html#/s:O6Katana9EmptyKeys">EmptyKeys</a>
</li>
</ul>
</li>
<li class="nav-group-name">
<a href="../Extensions.html">Extensions</a>
<ul class="nav-group-tasks">
<li class="nav-group-task">
<a href="../Extensions/Array.html">Array</a>
</li>
<li class="nav-group-task">
<a href="../Extensions/CGSize.html">CGSize</a>
</li>
<li class="nav-group-task">
<a href="../Extensions/UIView.html">UIView</a>
</li>
<li class="nav-group-task">
<a href="../Extensions/UIView.html#/s:VE6KatanaCSo6UIView28UIViewDrawableContainerChild">– UIViewDrawableContainerChild</a>
</li>
</ul>
</li>
<li class="nav-group-name">
<a href="../Protocols.html">Protocols</a>
<ul class="nav-group-tasks">
<li class="nav-group-task">
<a href="../Protocols/Action.html">Action</a>
</li>
<li class="nav-group-task">
<a href="../Protocols/ActionWithSideEffect.html">ActionWithSideEffect</a>
</li>
<li class="nav-group-task">
<a href="../Protocols/AnyAction.html">AnyAction</a>
</li>
<li class="nav-group-task">
<a href="../Protocols/AnyActionWithSideEffect.html">AnyActionWithSideEffect</a>
</li>
<li class="nav-group-task">
<a href="../Protocols/AnyConnectedNodeDescription.html">AnyConnectedNodeDescription</a>
</li>
<li class="nav-group-task">
<a href="../Protocols/AnyNode.html">AnyNode</a>
</li>
<li class="nav-group-task">
<a href="../Protocols/AnyNodeDescription.html">AnyNodeDescription</a>
</li>
<li class="nav-group-task">
<a href="../Protocols/AnyNodeDescriptionProps.html">AnyNodeDescriptionProps</a>
</li>
<li class="nav-group-task">
<a href="../Protocols/AnyNodeDescriptionWithChildren.html">AnyNodeDescriptionWithChildren</a>
</li>
<li class="nav-group-task">
<a href="../Protocols/AnyPlasticNodeDescription.html">AnyPlasticNodeDescription</a>
</li>
<li class="nav-group-task">
<a href="../Protocols/AnyStore.html">AnyStore</a>
</li>
<li class="nav-group-task">
<a href="../Protocols/AsyncAction.html">AsyncAction</a>
</li>
<li class="nav-group-task">
<a href="../Protocols/Childrenable.html">Childrenable</a>
</li>
<li class="nav-group-task">
<a href="../Protocols/ConnectedNodeDescription.html">ConnectedNodeDescription</a>
</li>
<li class="nav-group-task">
<a href="../Protocols/DrawableContainer.html">DrawableContainer</a>
</li>
<li class="nav-group-task">
<a href="../Protocols.html#/s:P6Katana22DrawableContainerChild">DrawableContainerChild</a>
</li>
<li class="nav-group-task">
<a href="../Protocols/NodeDescription.html">NodeDescription</a>
</li>
<li class="nav-group-task">
<a href="../Protocols/NodeDescriptionProps.html">NodeDescriptionProps</a>
</li>
<li class="nav-group-task">
<a href="../Protocols/NodeDescriptionState.html">NodeDescriptionState</a>
</li>
<li class="nav-group-task">
<a href="../Protocols/NodeDescriptionWithChildren.html">NodeDescriptionWithChildren</a>
</li>
<li class="nav-group-task">
<a href="../Protocols/PlasticNodeDescription.html">PlasticNodeDescription</a>
</li>
<li class="nav-group-task">
<a href="../Protocols/PlasticReferenceSizeable.html">PlasticReferenceSizeable</a>
</li>
<li class="nav-group-task">
<a href="../Protocols/SideEffectDependencyContainer.html">SideEffectDependencyContainer</a>
</li>
<li class="nav-group-task">
<a href="../Protocols/State.html">State</a>
</li>
<li class="nav-group-task">
<a href="../Protocols/SyncAction.html">SyncAction</a>
</li>
</ul>
</li>
<li class="nav-group-name">
<a href="../Structs.html">Structs</a>
<ul class="nav-group-tasks">
<li class="nav-group-task">
<a href="../Structs/Anchor.html">Anchor</a>
</li>
<li class="nav-group-task">
<a href="../Structs/Anchor/Kind.html">– Kind</a>
</li>
<li class="nav-group-task">
<a href="../Structs/Animation.html">Animation</a>
</li>
<li class="nav-group-task">
<a href="../Structs/AnimationContainer.html">AnimationContainer</a>
</li>
<li class="nav-group-task">
<a href="../Structs/AnimationProps.html">AnimationProps</a>
</li>
<li class="nav-group-task">
<a href="../Structs/ChildrenAnimations.html">ChildrenAnimations</a>
</li>
<li class="nav-group-task">
<a href="../Structs/EdgeInsets.html">EdgeInsets</a>
</li>
<li class="nav-group-task">
<a href="../Structs/EmptyProps.html">EmptyProps</a>
</li>
<li class="nav-group-task">
<a href="../Structs/EmptySideEffectDependencyContainer.html">EmptySideEffectDependencyContainer</a>
</li>
<li class="nav-group-task">
<a href="../Structs/EmptyState.html">EmptyState</a>
</li>
<li class="nav-group-task">
<a href="../Structs/Size.html">Size</a>
</li>
<li class="nav-group-task">
<a href="../Structs/Value.html">Value</a>
</li>
</ul>
</li>
<li class="nav-group-name">
<a href="../Typealiases.html">Typealiases</a>
<ul class="nav-group-tasks">
<li class="nav-group-task">
<a href="../Typealiases.html#/s:6Katana25AnimationPropsTransformer">AnimationPropsTransformer</a>
</li>
<li class="nav-group-task">
<a href="../Typealiases.html#/s:6Katana20NodeUpdateCompletion">NodeUpdateCompletion</a>
</li>
</ul>
</li>
<li class="nav-group-name">
<a href="../Associated Types.html">Associated Types</a>
<ul class="nav-group-tasks">
<li class="nav-group-task">
<a href="../Associated Types.html#/s:P6Katana27NodeDescriptionWithChildren9PropsType">PropsType</a>
</li>
</ul>
</li>
</ul>
</nav>
<article class="main-content">
<section>
<section class="section">
<h1>ConnectedNodeDescription</h1>
<div class="declaration">
<div class="language">
<pre class="highlight"><code><span class="kd">public</span> <span class="kd">protocol</span> <span class="kt">ConnectedNodeDescription</span><span class="p">:</span> <span class="kt"><a href="../Protocols/AnyConnectedNodeDescription.html">AnyConnectedNodeDescription</a></span><span class="p">,</span> <span class="kt"><a href="../Protocols/NodeDescription.html">NodeDescription</a></span></code></pre>
</div>
</div>
<p>In applications developed with Katana, the application information is stored in a central Store.
There are cases where you want to take pieces of information from the central store and use them in your UI.
<code>ConnectedNodeDescription</code> is the protocol that is used to implement this behaviour.</p>
<p>By implementing this protocol in a description you get two behaviours: store information merging and automatic UI update.</p>
<p><a href='#merge-description-39-s-props-and-store-39-s-state' class='anchor' aria-hidden=true><span class="header-anchor"></span></a><h3 id='merge-description-39-s-props-and-store-39-s-state'>Merge description’s props and Store’s state</h3></p>
<p>Every time there is an UI update (e.g., because either props or state are changed), Katana allows you to
inject in the props information that are taken from the central Store. You can use the <code>connect</code>
method to implement this behaviour.</p>
<p><a href='#automatic-ui-update' class='anchor' aria-hidden=true><span class="header-anchor"></span></a><h3 id='automatic-ui-update'>Automatic UI update</h3></p>
<p>Every time the Store’s state changes, you may want to update the UI. When you adopt the <code>ConnectedNodeDescription</code>
protocol, Katana will trigger an UI update to all the nodes that are related to the description.
The system will search for all the nodes that have a description that implements this protocol. It will then
calculate the new props, by invoking <code>connect</code>. If the properties are changed, then the UI update is triggered.
In this way we are able to effectively trigger UI changes only where and when needed.</p>
<div class="aside aside-see-also">
<p class="aside-title">See also</p>
<code><a href="../Classes/Store.html">Store</a></code>
</div>
</section>
<section class="section task-group-section">
<div class="task-group">
<ul>
<li class="item">
<div>
<code>
<a name="/s:ZFP6Katana24ConnectedNodeDescription7connectFT5propsRwx9PropsType2towx10StoreState_T_"></a>
<a name="//apple_ref/swift/Method/connect(props:to:)" class="dashAnchor"></a>
<a class="token" href="#/s:ZFP6Katana24ConnectedNodeDescription7connectFT5propsRwx9PropsType2towx10StoreState_T_">connect(props:to:)</a>
</code>
</div>
<div class="height-container">
<div class="pointer-container"></div>
<section class="section">
<div class="pointer"></div>
<div class="abstract">
<p>This method is used to update the properties with pieces of information taken from the
central Store state.</p>
<p>The idea of this method is that it takes the properties defined by the parent in the
<code>childrenDescriptions</code> method and the store state.
The implementation should update the props with all the information that are needed to properly
render the UI.</p>
</div>
<div class="declaration">
<h4>Declaration</h4>
<div class="language">
<p class="aside-title">Swift</p>
<pre class="highlight"><code><span class="kd">static</span> <span class="kd">func</span> <span class="nf">connect</span><span class="p">(</span><span class="nv">props</span><span class="p">:</span> <span class="k">inout</span> <span class="kt"><a href="../Associated Types.html#/s:P6Katana27NodeDescriptionWithChildren9PropsType">PropsType</a></span><span class="p">,</span> <span class="n">to</span> <span class="nv">storeState</span><span class="p">:</span> <span class="kt"><a href="../Protocols/ConnectedNodeDescription.html#/s:P6Katana24ConnectedNodeDescription10StoreState">StoreState</a></span><span class="p">)</span></code></pre>
</div>
</div>
<div>
<h4>Parameters</h4>
<table class="graybox">
<tbody>
<tr>
<td>
<code>
<em>props</em>
</code>
</td>
<td>
<div>
<p>the props defined by the parent</p>
</div>
</td>
</tr>
<tr>
<td>
<code>
<em>storeState</em>
</code>
</td>
<td>
<div>
<p>the state of the Store</p>
</div>
</td>
</tr>
</tbody>
</table>
</div>
<div class="slightly-smaller">
<a href="https://github.com/BendingSpoons/katana-swift/tree/0.2.0/Katana/Core/StoreConnection/ConnectedNodeDescription.swift#L59">Show on GitHub</a>
</div>
</section>
</div>
</li>
<li class="item">
<div>
<code>
<a name="/s:P6Katana24ConnectedNodeDescription10StoreState"></a>
<a name="//apple_ref/swift/Alias/StoreState" class="dashAnchor"></a>
<a class="token" href="#/s:P6Katana24ConnectedNodeDescription10StoreState">StoreState</a>
</code>
</div>
<div class="height-container">
<div class="pointer-container"></div>
<section class="section">
<div class="pointer"></div>
<div class="abstract">
<p>The State used in the application</p>
</div>
<div class="declaration">
<h4>Declaration</h4>
<div class="language">
<p class="aside-title">Swift</p>
<pre class="highlight"><code><span class="k">associatedtype</span> <span class="kt">StoreState</span><span class="p">:</span> <span class="kt"><a href="../Protocols/State.html">State</a></span></code></pre>
</div>
</div>
<div class="slightly-smaller">
<a href="https://github.com/BendingSpoons/katana-swift/tree/0.2.0/Katana/Core/StoreConnection/ConnectedNodeDescription.swift#L45">Show on GitHub</a>
</div>
</section>
</div>
</li>
</ul>
</div>
<div class="task-group">
<ul>
<li class="item">
<div>
<code>
<a name="/s:ZFE6KatanaPS_24ConnectedNodeDescription10anyConnectFT11parentPropsP_10storeStateP__P_"></a>
<a name="//apple_ref/swift/Method/anyConnect(parentProps:storeState:)" class="dashAnchor"></a>
<a class="token" href="#/s:ZFE6KatanaPS_24ConnectedNodeDescription10anyConnectFT11parentPropsP_10storeStateP__P_">anyConnect(parentProps:storeState:)</a>
</code>
<span class="declaration-note">
Extension method
</span>
</div>
<div class="height-container">
<div class="pointer-container"></div>
<section class="section">
<div class="pointer"></div>
<div class="abstract">
<p>Default implementation of <code>anyConnect</code>. It invokes <code><a href="../Protocols/ConnectedNodeDescription.html#/s:ZFP6Katana24ConnectedNodeDescription7connectFT5propsRwx9PropsType2towx10StoreState_T_">connect(props:to:)</a></code> by casting the parameters
to the proper types.</p>
<div class="aside aside-see-also">
<p class="aside-title">See also</p>
<code><a href="../Protocols/AnyConnectedNodeDescription.html">AnyConnectedNodeDescription</a></code>
</div>
</div>
<div class="declaration">
<h4>Declaration</h4>
<div class="language">
<p class="aside-title">Swift</p>
<pre class="highlight"><code><span class="kd">static</span> <span class="kd">func</span> <span class="nf">anyConnect</span><span class="p">(</span><span class="nv">parentProps</span><span class="p">:</span> <span class="kt">Any</span><span class="p">,</span> <span class="nv">storeState</span><span class="p">:</span> <span class="kt">Any</span><span class="p">)</span> <span class="o">-></span> <span class="kt">Any</span></code></pre>
</div>
</div>
<div class="slightly-smaller">
<a href="https://github.com/BendingSpoons/katana-swift/tree/0.2.0/Katana/Core/StoreConnection/ConnectedNodeDescription.swift#L69-L78">Show on GitHub</a>
</div>
</section>
</div>
</li>
</ul>
</div>
</section>
</section>
<section id="footer">
<p>© 2016 <a class="link" href="http://bendingspoons.com" target="_blank" rel="external">Bending Spoons Team</a>. All rights reserved. (Last updated: 2016-11-17)</p>
<p>Generated by <a class="link" href="https://github.com/realm/jazzy" target="_blank" rel="external">jazzy ♪♫ v0.7.2</a>, a <a class="link" href="http://realm.io" target="_blank" rel="external">Realm</a> project.</p>
</section>
</article>
</div>
</body>
</div>
</html>
| {
"pile_set_name": "Github"
} |
---
layout: "inner"
page_title: "Not Found"
noindex: true
description: |-
Page not found!
---
# Page Not Found
Sorry, the page you tried to visit does not exist. This could be our fault,
and if so we will fix that up right away.
Please go back to get back on track.
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>name</key>
<string>[Kuskus] Kusto Explorer (Dark)</string>
<key>settings</key>
<array>
<dict>
<key>settings</key>
<dict>
<key>background</key>
<string>#272822</string>
<key>caret</key>
<string>#F8F8F0</string>
<key>foreground</key>
<string>#F8F8F2</string>
<key>invisibles</key>
<string>#3B3A32</string>
<key>lineHighlight</key>
<string>#3E3D32</string>
<key>selection</key>
<string>#49483E</string>
<key>findHighlight</key>
<string>#FFE792</string>
<key>findHighlightForeground</key>
<string>#000000</string>
<key>selectionBorder</key>
<string>#222218</string>
<key>activeGuide</key>
<string>#9D550FB0</string>
<key>bracketsForeground</key>
<string>#F8F8F2A5</string>
<key>bracketsOptions</key>
<string>underline</string>
<key>bracketContentsForeground</key>
<string>#F8F8F2A5</string>
<key>bracketContentsOptions</key>
<string>underline</string>
<key>tagsOptions</key>
<string>stippled_underline</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Comment</string>
<key>scope</key>
<string>comment</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string>#60804B</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>String</string>
<key>scope</key>
<string>string</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string>#C16E40</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Number</string>
<key>scope</key>
<string>constant.numeric</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string><!-- NO SPECIAL COLOR --></string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Keyword operators</string>
<key>scope</key>
<string>keyword.operator.special.kusto</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string>#44C89E</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Keyword operators</string>
<key>scope</key>
<string>keyword.operator</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string>#4E84D4</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Keyword control</string>
<key>scope</key>
<string>keyword.control</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string>#4E84D4</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Storage type</string>
<key>scope</key>
<string>storage.type</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string>#4E84D4</string>
</dict>
</dict>
<!-- TODO function parameter support in kusto.tmLanguage -->
<dict>
<key>name</key>
<string>Function arguments and parameters</string>
<key>scope</key>
<string>variable.parameter</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string>#92CAEA</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Lambda definitions</string>
<key>scope</key>
<string>entity.function.name.lambda.kusto</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string>#92CAEA</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Library function</string>
<key>scope</key>
<string>support.function</string>
<key>settings</key>
<dict>
<key>fontStyle</key>
<string></string>
<key>foreground</key>
<string>#4E84D4</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Invalid (deprecated, illegal)</string>
<key>scope</key>
<string>invalid</string>
<key>settings</key>
<dict>
<key>fontStyle</key>
<string>underline</string>
<key>foreground</key>
<string>#FF0000</string>
</dict>
</dict>
<!-- TODO thoughtful diff coloring -->
<dict>
<key>name</key>
<string>diff.header</string>
<key>scope</key>
<string>meta.diff, meta.diff.header</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string>#75715E</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>diff.deleted</string>
<key>scope</key>
<string>markup.deleted</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string>#F92672</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>diff.inserted</string>
<key>scope</key>
<string>markup.inserted</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string>#A6E22E</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>diff.changed</string>
<key>scope</key>
<string>markup.changed</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string>#E6DB74</string>
</dict>
</dict>
</array>
<key>uuid</key>
<string>D8D5E82E-3D5B-46B5-B38E-8C841C21347D</string>
</dict>
</plist>
| {
"pile_set_name": "Github"
} |
loop create
set int ip address loop0 6.0.0.1/32
set int state loop0 up
set int state GigabitEthernet1b/0/0 up
set int ip address GigabitEthernet1b/0/0 192.168.1.1/24
create host-interface name vpp1
set int state host-vpp1 up
set int ip address host-vpp1 6.0.1.1/24
packet-generator new {
name udp
limit 512
rate 1e4
node ip4-input
size 100-100
interface loop0
data {
UDP: 192.168.1.2 - 192.168.2.255 -> 192.168.1.1
UDP: 4321 -> 1234
length 72
incrementing 100
}
}
session enable
| {
"pile_set_name": "Github"
} |
// SPDX-License-Identifier: GPL-2.0+
/*
* Rockchip AXI PCIe controller driver
*
* Copyright (c) 2018 Rockchip, Inc.
*
* Author: Shawn Lin <[email protected]>
*
*/
#ifndef _PCIE_ROCKCHIP_H
#define _PCIE_ROCKCHIP_H
#include <linux/kernel.h>
#include <linux/pci.h>
/*
* The upper 16 bits of PCIE_CLIENT_CONFIG are a write mask for the lower 16
* bits. This allows atomic updates of the register without locking.
*/
#define HIWORD_UPDATE(mask, val) (((mask) << 16) | (val))
#define HIWORD_UPDATE_BIT(val) HIWORD_UPDATE(val, val)
#define ENCODE_LANES(x) ((((x) >> 1) & 3) << 4)
#define MAX_LANE_NUM 4
#define MAX_REGION_LIMIT 32
#define MIN_EP_APERTURE 28
#define PCIE_CLIENT_BASE 0x0
#define PCIE_CLIENT_CONFIG (PCIE_CLIENT_BASE + 0x00)
#define PCIE_CLIENT_CONF_ENABLE HIWORD_UPDATE_BIT(0x0001)
#define PCIE_CLIENT_CONF_DISABLE HIWORD_UPDATE(0x0001, 0)
#define PCIE_CLIENT_LINK_TRAIN_ENABLE HIWORD_UPDATE_BIT(0x0002)
#define PCIE_CLIENT_ARI_ENABLE HIWORD_UPDATE_BIT(0x0008)
#define PCIE_CLIENT_CONF_LANE_NUM(x) HIWORD_UPDATE(0x0030, ENCODE_LANES(x))
#define PCIE_CLIENT_MODE_RC HIWORD_UPDATE_BIT(0x0040)
#define PCIE_CLIENT_MODE_EP HIWORD_UPDATE(0x0040, 0)
#define PCIE_CLIENT_GEN_SEL_1 HIWORD_UPDATE(0x0080, 0)
#define PCIE_CLIENT_GEN_SEL_2 HIWORD_UPDATE_BIT(0x0080)
#define PCIE_CLIENT_DEBUG_OUT_0 (PCIE_CLIENT_BASE + 0x3c)
#define PCIE_CLIENT_DEBUG_LTSSM_MASK GENMASK(5, 0)
#define PCIE_CLIENT_DEBUG_LTSSM_L1 0x18
#define PCIE_CLIENT_DEBUG_LTSSM_L2 0x19
#define PCIE_CLIENT_BASIC_STATUS1 (PCIE_CLIENT_BASE + 0x48)
#define PCIE_CLIENT_LINK_STATUS_UP 0x00300000
#define PCIE_CLIENT_LINK_STATUS_MASK 0x00300000
#define PCIE_CLIENT_INT_MASK (PCIE_CLIENT_BASE + 0x4c)
#define PCIE_CLIENT_INT_STATUS (PCIE_CLIENT_BASE + 0x50)
#define PCIE_CLIENT_INTR_MASK GENMASK(8, 5)
#define PCIE_CLIENT_INTR_SHIFT 5
#define PCIE_CLIENT_INT_LEGACY_DONE BIT(15)
#define PCIE_CLIENT_INT_MSG BIT(14)
#define PCIE_CLIENT_INT_HOT_RST BIT(13)
#define PCIE_CLIENT_INT_DPA BIT(12)
#define PCIE_CLIENT_INT_FATAL_ERR BIT(11)
#define PCIE_CLIENT_INT_NFATAL_ERR BIT(10)
#define PCIE_CLIENT_INT_CORR_ERR BIT(9)
#define PCIE_CLIENT_INT_INTD BIT(8)
#define PCIE_CLIENT_INT_INTC BIT(7)
#define PCIE_CLIENT_INT_INTB BIT(6)
#define PCIE_CLIENT_INT_INTA BIT(5)
#define PCIE_CLIENT_INT_LOCAL BIT(4)
#define PCIE_CLIENT_INT_UDMA BIT(3)
#define PCIE_CLIENT_INT_PHY BIT(2)
#define PCIE_CLIENT_INT_HOT_PLUG BIT(1)
#define PCIE_CLIENT_INT_PWR_STCG BIT(0)
#define PCIE_CLIENT_INT_LEGACY \
(PCIE_CLIENT_INT_INTA | PCIE_CLIENT_INT_INTB | \
PCIE_CLIENT_INT_INTC | PCIE_CLIENT_INT_INTD)
#define PCIE_CLIENT_INT_CLI \
(PCIE_CLIENT_INT_CORR_ERR | PCIE_CLIENT_INT_NFATAL_ERR | \
PCIE_CLIENT_INT_FATAL_ERR | PCIE_CLIENT_INT_DPA | \
PCIE_CLIENT_INT_HOT_RST | PCIE_CLIENT_INT_MSG | \
PCIE_CLIENT_INT_LEGACY_DONE | PCIE_CLIENT_INT_LEGACY | \
PCIE_CLIENT_INT_PHY)
#define PCIE_CORE_CTRL_MGMT_BASE 0x900000
#define PCIE_CORE_CTRL (PCIE_CORE_CTRL_MGMT_BASE + 0x000)
#define PCIE_CORE_PL_CONF_SPEED_5G 0x00000008
#define PCIE_CORE_PL_CONF_SPEED_MASK 0x00000018
#define PCIE_CORE_PL_CONF_LANE_MASK 0x00000006
#define PCIE_CORE_PL_CONF_LANE_SHIFT 1
#define PCIE_CORE_CTRL_PLC1 (PCIE_CORE_CTRL_MGMT_BASE + 0x004)
#define PCIE_CORE_CTRL_PLC1_FTS_MASK GENMASK(23, 8)
#define PCIE_CORE_CTRL_PLC1_FTS_SHIFT 8
#define PCIE_CORE_CTRL_PLC1_FTS_CNT 0xffff
#define PCIE_CORE_TXCREDIT_CFG1 (PCIE_CORE_CTRL_MGMT_BASE + 0x020)
#define PCIE_CORE_TXCREDIT_CFG1_MUI_MASK 0xFFFF0000
#define PCIE_CORE_TXCREDIT_CFG1_MUI_SHIFT 16
#define PCIE_CORE_TXCREDIT_CFG1_MUI_ENCODE(x) \
(((x) >> 3) << PCIE_CORE_TXCREDIT_CFG1_MUI_SHIFT)
#define PCIE_CORE_LANE_MAP (PCIE_CORE_CTRL_MGMT_BASE + 0x200)
#define PCIE_CORE_LANE_MAP_MASK 0x0000000f
#define PCIE_CORE_LANE_MAP_REVERSE BIT(16)
#define PCIE_CORE_INT_STATUS (PCIE_CORE_CTRL_MGMT_BASE + 0x20c)
#define PCIE_CORE_INT_PRFPE BIT(0)
#define PCIE_CORE_INT_CRFPE BIT(1)
#define PCIE_CORE_INT_RRPE BIT(2)
#define PCIE_CORE_INT_PRFO BIT(3)
#define PCIE_CORE_INT_CRFO BIT(4)
#define PCIE_CORE_INT_RT BIT(5)
#define PCIE_CORE_INT_RTR BIT(6)
#define PCIE_CORE_INT_PE BIT(7)
#define PCIE_CORE_INT_MTR BIT(8)
#define PCIE_CORE_INT_UCR BIT(9)
#define PCIE_CORE_INT_FCE BIT(10)
#define PCIE_CORE_INT_CT BIT(11)
#define PCIE_CORE_INT_UTC BIT(18)
#define PCIE_CORE_INT_MMVC BIT(19)
#define PCIE_CORE_CONFIG_VENDOR (PCIE_CORE_CTRL_MGMT_BASE + 0x44)
#define PCIE_CORE_INT_MASK (PCIE_CORE_CTRL_MGMT_BASE + 0x210)
#define PCIE_CORE_PHY_FUNC_CFG (PCIE_CORE_CTRL_MGMT_BASE + 0x2c0)
#define PCIE_RC_BAR_CONF (PCIE_CORE_CTRL_MGMT_BASE + 0x300)
#define ROCKCHIP_PCIE_CORE_BAR_CFG_CTRL_DISABLED 0x0
#define ROCKCHIP_PCIE_CORE_BAR_CFG_CTRL_IO_32BITS 0x1
#define ROCKCHIP_PCIE_CORE_BAR_CFG_CTRL_MEM_32BITS 0x4
#define ROCKCHIP_PCIE_CORE_BAR_CFG_CTRL_PREFETCH_MEM_32BITS 0x5
#define ROCKCHIP_PCIE_CORE_BAR_CFG_CTRL_MEM_64BITS 0x6
#define ROCKCHIP_PCIE_CORE_BAR_CFG_CTRL_PREFETCH_MEM_64BITS 0x7
#define PCIE_CORE_INT \
(PCIE_CORE_INT_PRFPE | PCIE_CORE_INT_CRFPE | \
PCIE_CORE_INT_RRPE | PCIE_CORE_INT_CRFO | \
PCIE_CORE_INT_RT | PCIE_CORE_INT_RTR | \
PCIE_CORE_INT_PE | PCIE_CORE_INT_MTR | \
PCIE_CORE_INT_UCR | PCIE_CORE_INT_FCE | \
PCIE_CORE_INT_CT | PCIE_CORE_INT_UTC | \
PCIE_CORE_INT_MMVC)
#define PCIE_RC_RP_ATS_BASE 0x400000
#define PCIE_RC_CONFIG_NORMAL_BASE 0x800000
#define PCIE_RC_CONFIG_BASE 0xa00000
#define PCIE_RC_CONFIG_RID_CCR (PCIE_RC_CONFIG_BASE + 0x08)
#define PCIE_RC_CONFIG_SCC_SHIFT 16
#define PCIE_RC_CONFIG_DCR (PCIE_RC_CONFIG_BASE + 0xc4)
#define PCIE_RC_CONFIG_DCR_CSPL_SHIFT 18
#define PCIE_RC_CONFIG_DCR_CSPL_LIMIT 0xff
#define PCIE_RC_CONFIG_DCR_CPLS_SHIFT 26
#define PCIE_RC_CONFIG_DCSR (PCIE_RC_CONFIG_BASE + 0xc8)
#define PCIE_RC_CONFIG_DCSR_MPS_MASK GENMASK(7, 5)
#define PCIE_RC_CONFIG_DCSR_MPS_256 (0x1 << 5)
#define PCIE_RC_CONFIG_LINK_CAP (PCIE_RC_CONFIG_BASE + 0xcc)
#define PCIE_RC_CONFIG_LINK_CAP_L0S BIT(10)
#define PCIE_RC_CONFIG_LCS (PCIE_RC_CONFIG_BASE + 0xd0)
#define PCIE_RC_CONFIG_L1_SUBSTATE_CTRL2 (PCIE_RC_CONFIG_BASE + 0x90c)
#define PCIE_RC_CONFIG_THP_CAP (PCIE_RC_CONFIG_BASE + 0x274)
#define PCIE_RC_CONFIG_THP_CAP_NEXT_MASK GENMASK(31, 20)
#define PCIE_CORE_AXI_CONF_BASE 0xc00000
#define PCIE_CORE_OB_REGION_ADDR0 (PCIE_CORE_AXI_CONF_BASE + 0x0)
#define PCIE_CORE_OB_REGION_ADDR0_NUM_BITS 0x3f
#define PCIE_CORE_OB_REGION_ADDR0_LO_ADDR 0xffffff00
#define PCIE_CORE_OB_REGION_ADDR1 (PCIE_CORE_AXI_CONF_BASE + 0x4)
#define PCIE_CORE_OB_REGION_DESC0 (PCIE_CORE_AXI_CONF_BASE + 0x8)
#define PCIE_CORE_OB_REGION_DESC1 (PCIE_CORE_AXI_CONF_BASE + 0xc)
#define PCIE_CORE_AXI_INBOUND_BASE 0xc00800
#define PCIE_RP_IB_ADDR0 (PCIE_CORE_AXI_INBOUND_BASE + 0x0)
#define PCIE_CORE_IB_REGION_ADDR0_NUM_BITS 0x3f
#define PCIE_CORE_IB_REGION_ADDR0_LO_ADDR 0xffffff00
#define PCIE_RP_IB_ADDR1 (PCIE_CORE_AXI_INBOUND_BASE + 0x4)
/* Size of one AXI Region (not Region 0) */
#define AXI_REGION_SIZE BIT(20)
/* Size of Region 0, equal to sum of sizes of other regions */
#define AXI_REGION_0_SIZE (32 * (0x1 << 20))
#define OB_REG_SIZE_SHIFT 5
#define IB_ROOT_PORT_REG_SIZE_SHIFT 3
#define AXI_WRAPPER_IO_WRITE 0x6
#define AXI_WRAPPER_MEM_WRITE 0x2
#define AXI_WRAPPER_TYPE0_CFG 0xa
#define AXI_WRAPPER_TYPE1_CFG 0xb
#define AXI_WRAPPER_NOR_MSG 0xc
#define MAX_AXI_IB_ROOTPORT_REGION_NUM 3
#define MIN_AXI_ADDR_BITS_PASSED 8
#define PCIE_RC_SEND_PME_OFF 0x11960
#define ROCKCHIP_VENDOR_ID 0x1d87
#define PCIE_ECAM_BUS(x) (((x) & 0xff) << 20)
#define PCIE_ECAM_DEV(x) (((x) & 0x1f) << 15)
#define PCIE_ECAM_FUNC(x) (((x) & 0x7) << 12)
#define PCIE_ECAM_REG(x) (((x) & 0xfff) << 0)
#define PCIE_ECAM_ADDR(bus, dev, func, reg) \
(PCIE_ECAM_BUS(bus) | PCIE_ECAM_DEV(dev) | \
PCIE_ECAM_FUNC(func) | PCIE_ECAM_REG(reg))
#define PCIE_LINK_IS_L2(x) \
(((x) & PCIE_CLIENT_DEBUG_LTSSM_MASK) == PCIE_CLIENT_DEBUG_LTSSM_L2)
#define PCIE_LINK_UP(x) \
(((x) & PCIE_CLIENT_LINK_STATUS_MASK) == PCIE_CLIENT_LINK_STATUS_UP)
#define PCIE_LINK_IS_GEN2(x) \
(((x) & PCIE_CORE_PL_CONF_SPEED_MASK) == PCIE_CORE_PL_CONF_SPEED_5G)
#define RC_REGION_0_ADDR_TRANS_H 0x00000000
#define RC_REGION_0_ADDR_TRANS_L 0x00000000
#define RC_REGION_0_PASS_BITS (25 - 1)
#define RC_REGION_0_TYPE_MASK GENMASK(3, 0)
#define MAX_AXI_WRAPPER_REGION_NUM 33
#define ROCKCHIP_PCIE_MSG_ROUTING_TO_RC 0x0
#define ROCKCHIP_PCIE_MSG_ROUTING_VIA_ADDR 0x1
#define ROCKCHIP_PCIE_MSG_ROUTING_VIA_ID 0x2
#define ROCKCHIP_PCIE_MSG_ROUTING_BROADCAST 0x3
#define ROCKCHIP_PCIE_MSG_ROUTING_LOCAL_INTX 0x4
#define ROCKCHIP_PCIE_MSG_ROUTING_PME_ACK 0x5
#define ROCKCHIP_PCIE_MSG_CODE_ASSERT_INTA 0x20
#define ROCKCHIP_PCIE_MSG_CODE_ASSERT_INTB 0x21
#define ROCKCHIP_PCIE_MSG_CODE_ASSERT_INTC 0x22
#define ROCKCHIP_PCIE_MSG_CODE_ASSERT_INTD 0x23
#define ROCKCHIP_PCIE_MSG_CODE_DEASSERT_INTA 0x24
#define ROCKCHIP_PCIE_MSG_CODE_DEASSERT_INTB 0x25
#define ROCKCHIP_PCIE_MSG_CODE_DEASSERT_INTC 0x26
#define ROCKCHIP_PCIE_MSG_CODE_DEASSERT_INTD 0x27
#define ROCKCHIP_PCIE_MSG_ROUTING_MASK GENMASK(7, 5)
#define ROCKCHIP_PCIE_MSG_ROUTING(route) \
(((route) << 5) & ROCKCHIP_PCIE_MSG_ROUTING_MASK)
#define ROCKCHIP_PCIE_MSG_CODE_MASK GENMASK(15, 8)
#define ROCKCHIP_PCIE_MSG_CODE(code) \
(((code) << 8) & ROCKCHIP_PCIE_MSG_CODE_MASK)
#define ROCKCHIP_PCIE_MSG_NO_DATA BIT(16)
#define ROCKCHIP_PCIE_EP_CMD_STATUS 0x4
#define ROCKCHIP_PCIE_EP_CMD_STATUS_IS BIT(19)
#define ROCKCHIP_PCIE_EP_MSI_CTRL_REG 0x90
#define ROCKCHIP_PCIE_EP_MSI_CTRL_MMC_OFFSET 17
#define ROCKCHIP_PCIE_EP_MSI_CTRL_MMC_MASK GENMASK(19, 17)
#define ROCKCHIP_PCIE_EP_MSI_CTRL_MME_OFFSET 20
#define ROCKCHIP_PCIE_EP_MSI_CTRL_MME_MASK GENMASK(22, 20)
#define ROCKCHIP_PCIE_EP_MSI_CTRL_ME BIT(16)
#define ROCKCHIP_PCIE_EP_MSI_CTRL_MASK_MSI_CAP BIT(24)
#define ROCKCHIP_PCIE_EP_DUMMY_IRQ_ADDR 0x1
#define ROCKCHIP_PCIE_EP_PCI_LEGACY_IRQ_ADDR 0x3
#define ROCKCHIP_PCIE_EP_FUNC_BASE(fn) (((fn) << 12) & GENMASK(19, 12))
#define ROCKCHIP_PCIE_AT_IB_EP_FUNC_BAR_ADDR0(fn, bar) \
(PCIE_RC_RP_ATS_BASE + 0x0840 + (fn) * 0x0040 + (bar) * 0x0008)
#define ROCKCHIP_PCIE_AT_IB_EP_FUNC_BAR_ADDR1(fn, bar) \
(PCIE_RC_RP_ATS_BASE + 0x0844 + (fn) * 0x0040 + (bar) * 0x0008)
#define ROCKCHIP_PCIE_AT_OB_REGION_PCI_ADDR0(r) \
(PCIE_RC_RP_ATS_BASE + 0x0000 + ((r) & 0x1f) * 0x0020)
#define ROCKCHIP_PCIE_AT_OB_REGION_PCI_ADDR0_DEVFN_MASK GENMASK(19, 12)
#define ROCKCHIP_PCIE_AT_OB_REGION_PCI_ADDR0_DEVFN(devfn) \
(((devfn) << 12) & \
ROCKCHIP_PCIE_AT_OB_REGION_PCI_ADDR0_DEVFN_MASK)
#define ROCKCHIP_PCIE_AT_OB_REGION_PCI_ADDR0_BUS_MASK GENMASK(27, 20)
#define ROCKCHIP_PCIE_AT_OB_REGION_PCI_ADDR0_BUS(bus) \
(((bus) << 20) & ROCKCHIP_PCIE_AT_OB_REGION_PCI_ADDR0_BUS_MASK)
#define ROCKCHIP_PCIE_AT_OB_REGION_PCI_ADDR1(r) \
(PCIE_RC_RP_ATS_BASE + 0x0004 + ((r) & 0x1f) * 0x0020)
#define ROCKCHIP_PCIE_AT_OB_REGION_DESC0_HARDCODED_RID BIT(23)
#define ROCKCHIP_PCIE_AT_OB_REGION_DESC0_DEVFN_MASK GENMASK(31, 24)
#define ROCKCHIP_PCIE_AT_OB_REGION_DESC0_DEVFN(devfn) \
(((devfn) << 24) & ROCKCHIP_PCIE_AT_OB_REGION_DESC0_DEVFN_MASK)
#define ROCKCHIP_PCIE_AT_OB_REGION_DESC0(r) \
(PCIE_RC_RP_ATS_BASE + 0x0008 + ((r) & 0x1f) * 0x0020)
#define ROCKCHIP_PCIE_AT_OB_REGION_DESC1(r) \
(PCIE_RC_RP_ATS_BASE + 0x000c + ((r) & 0x1f) * 0x0020)
#define ROCKCHIP_PCIE_AT_OB_REGION_CPU_ADDR0(r) \
(PCIE_RC_RP_ATS_BASE + 0x0018 + ((r) & 0x1f) * 0x0020)
#define ROCKCHIP_PCIE_AT_OB_REGION_CPU_ADDR1(r) \
(PCIE_RC_RP_ATS_BASE + 0x001c + ((r) & 0x1f) * 0x0020)
#define ROCKCHIP_PCIE_CORE_EP_FUNC_BAR_CFG0(fn) \
(PCIE_CORE_CTRL_MGMT_BASE + 0x0240 + (fn) * 0x0008)
#define ROCKCHIP_PCIE_CORE_EP_FUNC_BAR_CFG1(fn) \
(PCIE_CORE_CTRL_MGMT_BASE + 0x0244 + (fn) * 0x0008)
#define ROCKCHIP_PCIE_CORE_EP_FUNC_BAR_CFG_BAR_APERTURE_MASK(b) \
(GENMASK(4, 0) << ((b) * 8))
#define ROCKCHIP_PCIE_CORE_EP_FUNC_BAR_CFG_BAR_APERTURE(b, a) \
(((a) << ((b) * 8)) & \
ROCKCHIP_PCIE_CORE_EP_FUNC_BAR_CFG_BAR_APERTURE_MASK(b))
#define ROCKCHIP_PCIE_CORE_EP_FUNC_BAR_CFG_BAR_CTRL_MASK(b) \
(GENMASK(7, 5) << ((b) * 8))
#define ROCKCHIP_PCIE_CORE_EP_FUNC_BAR_CFG_BAR_CTRL(b, c) \
(((c) << ((b) * 8 + 5)) & \
ROCKCHIP_PCIE_CORE_EP_FUNC_BAR_CFG_BAR_CTRL_MASK(b))
struct rockchip_pcie {
void __iomem *reg_base; /* DT axi-base */
void __iomem *apb_base; /* DT apb-base */
bool legacy_phy;
struct phy *phys[MAX_LANE_NUM];
struct reset_control *core_rst;
struct reset_control *mgmt_rst;
struct reset_control *mgmt_sticky_rst;
struct reset_control *pipe_rst;
struct reset_control *pm_rst;
struct reset_control *aclk_rst;
struct reset_control *pclk_rst;
struct clk *aclk_pcie;
struct clk *aclk_perf_pcie;
struct clk *hclk_pcie;
struct clk *clk_pcie_pm;
struct regulator *vpcie12v; /* 12V power supply */
struct regulator *vpcie3v3; /* 3.3V power supply */
struct regulator *vpcie1v8; /* 1.8V power supply */
struct regulator *vpcie0v9; /* 0.9V power supply */
struct gpio_desc *ep_gpio;
u32 lanes;
u8 lanes_map;
u8 root_bus_nr;
int link_gen;
struct device *dev;
struct irq_domain *irq_domain;
int offset;
struct pci_bus *root_bus;
struct resource *io;
phys_addr_t io_bus_addr;
u32 io_size;
void __iomem *msg_region;
u32 mem_size;
phys_addr_t msg_bus_addr;
phys_addr_t mem_bus_addr;
bool is_rc;
struct resource *mem_res;
};
static u32 rockchip_pcie_read(struct rockchip_pcie *rockchip, u32 reg)
{
return readl(rockchip->apb_base + reg);
}
static void rockchip_pcie_write(struct rockchip_pcie *rockchip, u32 val,
u32 reg)
{
writel(val, rockchip->apb_base + reg);
}
int rockchip_pcie_parse_dt(struct rockchip_pcie *rockchip);
int rockchip_pcie_init_port(struct rockchip_pcie *rockchip);
int rockchip_pcie_get_phys(struct rockchip_pcie *rockchip);
void rockchip_pcie_deinit_phys(struct rockchip_pcie *rockchip);
int rockchip_pcie_enable_clocks(struct rockchip_pcie *rockchip);
void rockchip_pcie_disable_clocks(void *data);
void rockchip_pcie_cfg_configuration_accesses(
struct rockchip_pcie *rockchip, u32 type);
#endif /* _PCIE_ROCKCHIP_H */
| {
"pile_set_name": "Github"
} |
@extends('vendor.installer.layouts.master')
@section('template_title')
{{ trans('installer_messages.final.templateTitle') }}
@endsection
@section('title')
<i class="fa fa-flag-checkered fa-fw" aria-hidden="true"></i>
{{ trans('installer_messages.final.title') }}
@endsection
@section('container')
@if(session('message')['dbOutputLog'])
<p><strong><small>{{ trans('installer_messages.final.migration') }}</small></strong></p>
<pre><code>{{ session('message')['dbOutputLog'] }}</code></pre>
@endif
<p><strong><small>{{ trans('installer_messages.final.console') }}</small></strong></p>
<pre><code>{{ $finalMessages }}</code></pre>
<p><strong><small>{{ trans('installer_messages.final.log') }}</small></strong></p>
<pre><code>{{ $finalStatusMessage }}</code></pre>
<p><strong><small>{{ trans('installer_messages.final.env') }}</small></strong></p>
<pre><code>{{ $finalEnvFile }}</code></pre>
<div class="buttons">
<a href="{{ url('/') }}" class="button">{{ trans('installer_messages.final.exit') }}</a>
</div>
@endsection
| {
"pile_set_name": "Github"
} |
<!DOCTYPE html>
<html lang="zh-CN">
<head lang="en">
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>基本信息</title>
<link href="../../static/bootstrap-3.3.5/css/bootstrap.min.css" rel="stylesheet" type="text/css"/>
<link href="../../static/bootstrap-3.3.5/css/bootstrap-theme.min.css" rel="stylesheet" type="text/css"/>
<!--jQuery-UI-->
<link type="text/css" href="../../static/jquery-ui-bootstrap/css/custom-theme/jquery-ui-1.10.0.custom.css" rel="stylesheet" />
</head>
<body>
<br>
<div class="container">
<h2 class="text-center">数据包基本信息</h2>
</div>
<br>
<div class="container">
<form class="form-inline" method="POST" action="{{ url_for('basedata') }}">
<div class="form-group">
<label for="proto">协议过滤 : </label>
<select class="form-control" name="filter_type">
<option value="all" selected>所 有</option>
<option value="proto">协 议</option>
<option value="ipsrc">源 地 址</option>
<option value="ipdst">目 的 地 址</option>
</select>
<input type="text" class="form-control" id="proto" name="value">
</div>
<button type="submit" class="btn btn-success"> 过 滤 </button>
<button type="submit" class="btn btn-info cancle"> 清 除 </button>
</form>
</div>
<br>
<div class="bs-example" data-example-id="hoverable-table">
<table class="table table-hover table-responsive table-condensed table-striped">
<thead>
<tr>
<th class="text-center">序号</th>
<th class="text-center">时间</th>
<th class="text-center">源地址</th>
<th class="text-center">目的地址</th>
<th class="text-center">协议</th>
<th class="text-center">长度</th>
<th class="text-left">概要信息</th>
</tr>
</thead>
<tbody>
{% for count, pcap in pcaps.items() %}
<tr data-toggle="modal" data-target="#myModal">
<th scope="row" class="text-center" id="id">{{ count }}</th>
<td class="text-nowrap text-center">{{ pcap['time'] }}</td>
<td class="text-center">{{ pcap['Source'] }}</td>
<td class="text-center">{{ pcap['Destination'] }}</td>
<td class="text-center text-nowrap">{{ pcap['Procotol'] }}</td>
<td class="text-center text-nowrap">{{ pcap['len'] }}</td>
<td class="text-left text-nowrap">{{ pcap['info'] }}</td>
</tr>
{% endfor %}
</tbody>
</table>
</div>
<!-- 模态框(Modal) -->
<div class="modal fade" id="myModal" tabindex="-1" role="dialog" aria-labelledby="myModalLabel"
aria-hidden="true" style="width:600px;margin:auto; overflow:auto;word-break:break-all">
<div class="modal-dialog">
<div class="modal-content">
<div class="modal-header">
<button type="button" class="close" data-dismiss="modal" aria-hidden="true">×</button>
<h4 class="modal-title" id="myModalLabel">数据包详情</h4>
</div>
<div class="modal-body">
<div class="container-fluid">
<div class="accordion" id="accordion">
<div class="mydata"></div>
</div>
</div>
</div>
<div class="modal-footer">
<button type="button" class="btn btn-default" data-dismiss="modal">关闭</button>
<button type="button" class="btn btn-info" data-dismiss="modal" onclick="window.location.href='{{ url_for('savepdf') }}'">保存为PDF</button>
</div>
</div>
</div>
</div>
<script src="../../static/bootstrap-3.3.5/js/jquery-1.11.3.min.js" type="text/javascript"></script>
<script src="../../static/bootstrap-3.3.5/js/bootstrap.min.js" type="text/javascript"></script>
<script src="../../static/jquery-ui-bootstrap/assets/js/jquery-ui-1.10.0.custom.min.js" type="text/javascript"></script>
<!--清除过滤器-->
<script>
$(".cancle").click(function(){
$("input").val(null);
})
</script>
<!--table点击事件-->
<script>
var id=0;
$(function () {
$("table > tbody > tr").click(function () {
id=$(this).children('th').eq(0).text();
$.ajax({
type: "GET",//jquery 请求方式为 get
url: "/datashow?id="+id,//jquery 请求URL
dataType: "html",//jquery接受类型 可以 json js html 等数据
cache: false,//是否缓存
success: function(a) {
$(".mydata").html(a);
}
});
});
})
function savepdf()
{
$.ajax({
type: "GET",//jquery 请求方式为 get
url: "/savepdf?id="+id,//jquery 请求URL
dataType: "html",//jquery接受类型 可以 json js html 等数据
cache: false,//是否缓存
});
}
</script>
<!--自动补全-->
<script>
var availableTags = ["TCP", "UDP", "ARP", "ICMP", "HTTP", "HTTPS", "DNS", "SSH", "Telnet", "POP3", "SNMP", "SMTP"];
$("#proto").autocomplete({
source: availableTags
});
</script>
<!--模态框拖拽-->
<script>
$("#myModal").draggable({
handle: ".modal-header"
});
</script>
</body>
</html> | {
"pile_set_name": "Github"
} |
--- a/makefile.u
+++ b/makefile.u
@@ -69,7 +69,9 @@
OFILES = $(MISC) $(POW) $(CX) $(DCX) $(REAL) $(DBL) $(INT) \
$(HALF) $(CMP) $(EFL) $(CHAR) $(I77) $(TIME)
-all: f2c.h signal1.h sysdep1.h libf2c.a
+all: libf2c.a
+
+$(OFILES): f2c.h signal1.h sysdep1.h
libf2c.a: $(OFILES)
ar r libf2c.a $?
| {
"pile_set_name": "Github"
} |
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>jQuery UI Draggable - Revert position</title>
<link rel="stylesheet" href="../../themes/base/jquery.ui.all.css">
<script src="../../jquery-1.9.1.js"></script>
<script src="../../ui/jquery.ui.core.js"></script>
<script src="../../ui/jquery.ui.widget.js"></script>
<script src="../../ui/jquery.ui.mouse.js"></script>
<script src="../../ui/jquery.ui.draggable.js"></script>
<link rel="stylesheet" href="../demos.css">
<style>
#draggable, #draggable2 { width: 100px; height: 100px; padding: 0.5em; float: left; margin: 0 10px 10px 0; }
</style>
<script>
$(function() {
$( "#draggable" ).draggable({ revert: true });
$( "#draggable2" ).draggable({ revert: true, helper: "clone" });
});
</script>
</head>
<body>
<div id="draggable" class="ui-widget-content">
<p>Revert the original</p>
</div>
<div id="draggable2" class="ui-widget-content">
<p>Revert the helper</p>
</div>
<div class="demo-description">
<p>Return the draggable (or it's helper) to its original location when dragging stops with the boolean <code>revert</code> option.</p>
</div>
</body>
</html>
| {
"pile_set_name": "Github"
} |
;;; -*- Mode: Lisp; Package: silica; -*-
;; See the file LICENSE for the full license governing this code.
;;
#|****************************************************************************
* *
* *
* This file implements the interface to the winwidget library. It includes *
* support for push buttons, toggle buttons, radio and check boxes, list *
* panes, option panes, sliders and menu bars. *
* *
* *
****************************************************************************|#
(in-package :silica)
;;mm: Some new mixins
(defclass acl-gadget-id-mixin ()
((gadget-id :initform 0)
(gadget-id->window-map :initform (make-array '(256)))
))
(defmethod note-gadget-activated :after ((client t)
(gadget acl-gadget-id-mixin))
(let (m)
(when (setq m (sheet-direct-mirror gadget))
(win:EnableWindow m 1))))
(defmethod note-gadget-deactivated :after ((client t)
(gadget acl-gadget-id-mixin))
(let (m)
(when (setq m (sheet-direct-mirror gadget))
(win:EnableWindow m 0))))
(defmethod port-move-focus-to-gadget ((port acl-clim::acl-port)
(gadget acl-gadget-id-mixin))
(win:SetFocus (sheet-direct-mirror gadget)))
;;;acl-gadget-id-mixin
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; list panes
(defclass hlist-pane
(acl-gadget-id-mixin
mirrored-sheet-mixin
list-pane
sheet-permanently-enabled-mixin
space-requirement-mixin
basic-pane)
((x-margin :initform 0)
(y-margin :initform 0))
(:default-initargs :background +white+))
(defmethod handle-event ((pane hlist-pane)
(event window-change-event))
;; Handle WM_COMMAND message.
(let ((mirror (sheet-direct-mirror pane))
(index 0))
(when mirror
(setf index (acl-clim::frame-send-message (pane-frame pane)
mirror win:LB_GETCURSEL 0 0))
(with-slots (items value mode value-key name-key) pane
(ecase mode
(:exclusive
(setf (gadget-value pane :invoke-callback t)
(funcall value-key (elt items index))))
(:nonexclusive
(let ((new (funcall value-key (elt items index))))
(setf (gadget-value pane :invoke-callback t)
(if (member new (gadget-value pane))
(remove new (gadget-value pane))
(push new (gadget-value pane)))))))))))
(defmethod (setf gadget-value) :before
(value (pane hlist-pane) &key invoke-callback)
(declare (ignore invoke-callback))
(with-slots (mode items value-key test) pane
(let ((hwnd (sheet-direct-mirror pane)))
(when hwnd
(if (eq mode :nonexclusive)
(let ((i 0))
(dolist (item items)
(acl-clim::frame-send-message
(pane-frame pane)
;; what's the "correct" way of passing
;; both lo and hi parts without
;; combining them with an ash? (cim 9/20/96)
hwnd win:LB_SELITEMRANGE
(if (member (funcall value-key item)
value :test test)
1
0)
(+ i (ash i 16)))
(incf i)))
(let ((i (position value items
:key value-key :test test)))
(acl-clim::frame-send-message
(pane-frame pane)
hwnd win:LB_SETCURSEL (or i -1) 0)))
))))
(defmethod handle-event :after ((pane hlist-pane) (event pointer-event))
(deallocate-event event))
(defvar *default-list-pane-width* '(8 :character))
(defvar *min-list-pane-height* '(1 :line))
(defmethod compute-set-gadget-dimensions ((pane set-gadget-mixin))
(let ((name-width 0)
(tsh 0))
(with-slots (items name-key) pane
(dolist (item items)
(let ((name (funcall name-key item)))
(setq name-width (max name-width
(process-width-specification pane name)))
(setq tsh (max tsh (process-height-specification pane name)))))
(when (and (zerop tsh) (zerop name-width))
(setq name-width (process-width-specification pane *default-list-pane-width*)
tsh (process-height-specification pane *min-list-pane-height*))))
(values name-width tsh)))
(defmethod compose-space ((pane hlist-pane) &key width height)
(declare (ignore width height))
;; Note that hlists (like text-editors, whose compose-space method
;; served as a template for this) are scrolled by being given a
;; scroller-pane as a parent, but they have their own "interior"
;; scrollbars.
(with-slots (initial-space-requirement items visible-items) pane
(let* ((par (sheet-parent pane))
(scroll-mode (and (acl-clim::scroller-pane-p par)
(scroller-pane-scroll-bar-policy par)))
(name-width (compute-set-gadget-dimensions pane)))
(let ((w 0)
(min-w (process-width-specification pane '(8 :character)))
(h 0)
(min-h (process-height-specification pane *min-list-pane-height*)))
;; WIDTH
(cond ((and initial-space-requirement
(plusp (process-width-specification
pane (space-requirement-width initial-space-requirement))))
;; This is where accepting-values views factors in.
(setq w (max (process-width-specification
pane (space-requirement-width initial-space-requirement))
min-w)))
(items
(setq w name-width))
(t
(setq w (process-width-specification
pane *default-list-pane-width*))))
(when (member scroll-mode '(:vertical :both t :dynamic))
;; Allow for the vertical scrollbar
(let ((wsty (win-scroll-thick :y)))
(setq min-w (+ min-w wsty))
(setq w (+ w wsty))))
(setq w (max w min-w))
;; HEIGHT
(cond ((and initial-space-requirement
(plusp (process-height-specification
pane (space-requirement-height initial-space-requirement))))
;; This is where accepting-values views factors in.
(setq h (max (process-height-specification
pane (space-requirement-height initial-space-requirement))
(if visible-items
(process-height-specification pane `(,visible-items :line))
0)
min-h)))
(visible-items
(setq h (process-height-specification pane `(,visible-items :line))))
(items
(setq h (max (process-height-specification pane `(,(length items) :line))
min-h)))
(t
(setq h min-h)))
(when (member scroll-mode '(:horizontal :both t :dynamic))
(let ((wstx (win-scroll-thick :x)))
;; Allow for the horizontal scrollbar
(setq min-h (+ min-h wstx)
h (+ h wstx))))
(setq h (max h min-h))
(make-space-requirement
:width w :min-width min-w
:height h :min-height min-h
)))))
(defmethod (setf set-gadget-items) :before (items (pane hlist-pane))
;; When items are set in an hlist-pane the mirror must be
;; made to update its appearance appropriately.
(with-slots (name-key mirror) pane
(when mirror
(acl-clim::frame-send-message
(pane-frame pane)
mirror win:LB_RESETCONTENT 0 0)
(dolist (item items)
(let ((str (funcall name-key item))
(pos (position item items)))
(excl:with-native-string (str str)
(acl-clim::frame-send-message
(pane-frame pane)
mirror win:LB_INSERTSTRING pos str))))
(win:InvalidateRect mirror ct:hnull win:TRUE)))) ;; make sure it updates
(defmethod acl-clim::command-event :around ((gadget hlist-pane)
port sheet wparam lparam)
(let ((notifycode (acl-clim::hiword wparam)))
(when (= notifycode win:LBN_SELCHANGE)
;; Selection in list box is about to change.
(win:SetFocus (sheet-mirror sheet))
(call-next-method gadget port sheet wparam lparam))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; mswin-text-edit
(defclass mswin-text-edit (acl-gadget-id-mixin
mirrored-sheet-mixin
text-field
sheet-permanently-enabled-mixin
space-requirement-mixin
basic-pane)
((external-label :initarg :external-label)
(depth :initarg :depth)
(x-margin :initarg :x-margin)
(y-margin :initarg :y-margin)
;; We probably should be inheriting from text-editor,
;; not text-field.
(ncolumns :initarg :ncolumns
:accessor gadget-columns)
(nlines :initarg :nlines
:accessor gadget-lines)
(word-wrap :initarg :word-wrap
:accessor gadget-word-wrap))
(:default-initargs
:text-style nil
:background +white+
:external-label nil
:x-margin 2 :y-margin 2
;; needed for text-editor
:ncolumns nil
:nlines nil
:editable-p t
:word-wrap nil))
(defmethod handle-repaint ((pane mswin-text-edit) region)
(declare (ignore region))
nil)
(defun process-width-specification (sheet width)
;; text-editor is one of the few things that you can specify:
;; :width '(4 :character)
(typecase width
(cons
(assert (eq (second width) :character))
(with-sheet-medium (medium sheet)
(let* ((nchars (first width))
(style (medium-default-text-style medium))
(style-width (text-style-width style medium))
(margin (slot-value sheet 'x-margin))
(border 4))
(+ border margin (* style-width nchars) margin border))))
(string
(with-sheet-medium (medium sheet)
(let ((w
(text-size sheet width
:text-style (medium-default-text-style medium)))
(border 4)
(margin (slot-value sheet 'x-margin)))
(+ border margin w margin border))))
(otherwise width)))
(defun process-height-specification (sheet height)
;; text-editor is one of the few things that you can specify:
;; :height '(4 :line)
(typecase height
(cons
(assert (eq (second height) :line))
(with-sheet-medium (medium sheet)
(let* ((nlines (first height))
(style (medium-default-text-style medium))
(style-height (text-style-height style medium))
(margin (slot-value sheet 'y-margin))
(border 2))
(+ border margin (* style-height nlines) margin border))))
(string
(with-sheet-medium (medium sheet)
(multiple-value-bind (w h)
(text-size sheet height
:text-style (medium-default-text-style medium))
(declare (ignore w))
(let ((border 2)
(margin (slot-value sheet 'y-margin)))
(+ border margin h margin border)))))
(otherwise height)))
;; Kludge. Sorry. This is a workaround for the fact that
;; the min height in a +text-field-view+ is zero. At some
;; point, lets try modifying +text-field-view+ and
;; +text-editor-view+ to supply better default sizes than zero.
;;
;; 2007-09-03/afuchs: This used to be *min-text-field-width. The
;; minimum width was computed twice. So far, this didn't work too well
;; for text boxes in accepting-values panes that should be smaller
;; than 75 pixels. min-w seems to work better, so let's use that
;; instead, and use 75 as a default width.
(defvar *default-text-field-width* 75)
(defvar *min-text-field-height* '(1 :line))
(defmethod compose-space ((pane mswin-text-edit) &key width height)
(declare (ignore width height))
;; Note that text-editors are scrolled by being given
;; a scroller-pane as a parent, but they have their own
;; "interior" scrollbars (this is different than text-fields.)
(with-slots (x-margin y-margin initial-space-requirement nlines ncolumns) pane
(let* ((par (sheet-parent pane))
(scroll-mode (and (acl-clim::scroller-pane-p par)
(scroller-pane-scroll-bar-policy par))))
(let ((w 0)
(min-w (process-width-specification pane '(1 :character)))
(h 0)
(value (gadget-value pane))
(min-h (process-height-specification pane *min-text-field-height*)))
;; WIDTH
(cond ((and initial-space-requirement
(plusp (process-width-specification
pane (space-requirement-width initial-space-requirement))))
;; This is where accepting-values views factors in.
(setq w (max (process-width-specification
pane (space-requirement-width initial-space-requirement))
min-w)))
(ncolumns
(setq w (process-width-specification pane `(,ncolumns :character))))
((stringp value)
(setq w (max (process-width-specification pane value)
*default-text-field-width*)))
(t
(setq w *default-text-field-width*)))
(when (member scroll-mode '(:horizontal :both t :dynamic))
;; Allow for the vertical scrollbar
(let ((wsty (win-scroll-thick :y)))
(setq min-w (+ min-w wsty))
(setq w (+ w wsty))))
(setq w (max w min-w))
;; HEIGHT
(cond ((and initial-space-requirement
(plusp (process-height-specification
pane (space-requirement-height initial-space-requirement))))
;; This is where accepting-values views factors in.
(setq h (max (process-height-specification
pane (space-requirement-height initial-space-requirement))
(if nlines
(process-height-specification pane `(,nlines :line))
0)
min-h)))
(nlines
(setq h (process-height-specification pane `(,nlines :line))))
((stringp value)
(setq h (max (process-height-specification pane value)
min-h)))
(t
(setq h min-h)))
(when (member scroll-mode '(:horizontal :both t :dynamic))
(let ((wstx (win-scroll-thick :x)))
;; Allow for the horizontal scrollbar
(setq min-h (+ min-h wstx)
h (+ h wstx))))
(setq h (max h min-h))
(make-space-requirement
:width w :min-width min-w
:height h :min-height min-h
)))))
(defmethod initialize-instance :after ((sheet mswin-text-edit)
&key background label)
(declare (ignore background label))
nil)
(defmethod handle-event ((pane mswin-text-edit)
(event window-change-event))
;; Handle WM_COMMAND event
;; This event occurs when the control receives keystrokes that can
;; be entered into the buffer.
(let ((old (slot-value pane 'value))
(new (gadget-value pane)))
(unless (equal old new)
(setf (gadget-value pane :invoke-callback t) new))))
(defmethod handle-event ((pane mswin-text-edit) (event key-press-event))
;; This event occurs when a text-field recevies a RETURN character.
(activate-callback pane (gadget-client pane) (gadget-id pane))
(let ((mirror (sheet-direct-mirror pane)))
(declare (ignore mirror))
;; Give up the focus
(win:SetFocus (win:GetActiveWindow))))
(defun xlat-newline-return (str &optional (convert-to-foreign nil))
;; Given a Lisp string, create an equivalent C string.
;; Replace Newline with Return&Newline.
(let ((nl 0)
(subsize (length str))
(string nil)
(cstr nil)
(pos 0))
(dotimes (i subsize)
(when (char= (char str i) #\Newline)
(incf nl)))
(cond (convert-to-foreign
;; This doesn't do the right thing for international lisp.
;; Use excl:with-native-string for that, and phase this out.
;; Unfortunately, excl:with-native-string ignores the
;; issue of Newline.
(setq cstr (ct:callocate (:char *) :size (+ 1 nl subsize)))
(dotimes (i subsize)
(when (char= (char str i) #\Newline)
(ct:cset (:char 256) cstr ((fixnum pos)) (char-int #\Return))
(incf pos))
(ct:cset (:char 256) cstr ((fixnum pos)) (char-int (char str i)))
(incf pos))
;; terminate with null
(ct:cset (:char 256) cstr ((fixnum pos)) 0)
(values cstr pos))
((zerop nl)
(values str subsize))
(t
(setq string (make-string (+ nl subsize)))
(dotimes (i subsize)
(when (char= (char str i) #\newline)
(setf (char string pos) #\return)
(incf pos))
(setf (char string pos) (char str i))
(incf pos))
(values string pos)))))
(defun unxlat-newline-return (str)
;; Given a C string, create an equivalent Lisp string.
;; Replace Return&Newline with Newline.
(let* ((subsize (length str))
(nnl (let ((nl 0))
(dotimes (i subsize)
(when (char= (char str i) #\Return) (incf nl)) )
(- nl)))
(cstr (make-string (+ nnl subsize)))
(pos 0))
(dotimes (i subsize)
(unless (char= (char str i) #\Return)
(setf (char cstr pos) (char str i))
(incf pos)))
cstr))
;;; Set the selection in a mswin-text-edit gadget.
;;; For endpos = -1 means the end;
;;; as a result, startpos = 0 and endpos = -1 means "select all".
(defmethod set-selection ((pane mswin-text-edit) startpos endpos)
(let ((mirror (sheet-direct-mirror pane)))
(when mirror
(let* ((val (slot-value pane 'value))
(val-len (if val
(+ (length val)
(count #\newline val))
0)))
;; For endpos = -1, means the end.
(when (= endpos -1)
(setq endpos val-len))
;; Ensure the correct order.
(when (< endpos startpos)
(let ((temp endpos))
(setf endpos startpos
startpos temp)))
;; Otherwise, trim to fit.
(when (< val-len endpos)
(setq endpos val-len))
(when (< val-len startpos)
(setq startpos val-len))
(when (< startpos 0)
(setq startpos 0))
(acl-clim::frame-send-message (pane-frame pane)
mirror
win:EM_SETSEL
startpos endpos)))))
;;; Retreive the start and end position of the
;;; selection in a mswin-text-edit gadget.
(defmethod get-selection ((pane mswin-text-edit))
(declare (values startpos endpos))
(let ((mirror (sheet-direct-mirror pane)))
(cond (mirror
(let ((startptr
(make-array 1 :element-type 'acl-clim::unsigned-nat
:initial-element 0))
(endptr (make-array 1 :element-type 'acl-clim::unsigned-nat
:initial-element 0)))
(acl-clim::frame-send-message (pane-frame pane)
mirror
win:EM_GETSEL
startptr endptr)
(values (aref startptr 0)
(aref endptr 0))))
(t
(values 0 0)))))
;;; Set the postion of the caret in the mswin-text-edit gadget.
;;; NOTE: This uses set-selection, which in turn uses the
;;; mechanism for setting the selected-text (with startpos and
;;; endpos the same). This is how the caret is set on MS
;;; text-gadgets.
(defmethod set-caret-pos ((pane mswin-text-edit) pos)
(set-selection pane pos pos))
(defmethod (setf gadget-value) :before (new (pane mswin-text-edit)
&key invoke-callback)
(declare (ignore invoke-callback))
(let ((mirror (sheet-direct-mirror pane))
(leftchar 0)
(topline 0))
(unless (equal (gadget-value pane) new)
(setf (slot-value pane 'value) new)
(when mirror
(setq topline (acl-clim::frame-send-message
(pane-frame pane)
mirror win:EM_GETFIRSTVISIBLELINE 0 0))
;; Disable redraw to avoid flicker.
(acl-clim::frame-send-message
(pane-frame pane) mirror win:WM_SETREDRAW 0 0)
;; Set the new-value.
(excl:with-native-string (s1 new)
(win:SetWindowText mirror s1))
;; Removed some code here.
;; spr30683 (alemmens, 2005-11-30)
;; Try to preserve the scroll position:
(acl-clim::frame-send-message
(pane-frame pane) mirror win:EM_LINESCROLL leftchar topline)
;; Enable redraw
(acl-clim::frame-send-message
(pane-frame pane) mirror win:WM_SETREDRAW 1 0)
;; Force redraw
(win:InvalidateRect mirror 0 win:TRUE)
(acl-clim::frame-update-window (pane-frame pane) mirror)))))
(defmethod gadget-value ((pane mswin-text-edit))
(with-slots (mirror value) pane
(if mirror
(let* ((length (acl-clim::frame-send-message (pane-frame pane)
mirror
win:WM_GETTEXTLENGTH
0 0))
(buffer (make-array length :element-type '(unsigned-byte 8))))
(win:GetWindowText mirror buffer (1+ length))
;; Don't call unxlat-newline-return.
;; spr30683 (alemmens, 2005-11-30)
(let ((result (excl:mb-to-string buffer)))
;; By the way, does anyone know why
;; the second value is returned? -smh
(values result (length result))))
(values value (if (stringp value) (length value) 0)))))
(defmethod gadget-current-selection ((pane mswin-text-edit))
(let ((mirror (sheet-direct-mirror pane)))
(when mirror
(let* ((wl (acl-clim::frame-send-message (pane-frame pane)
mirror
win:WM_GETTEXTLENGTH
0 0))
(teb (make-array wl :element-type '(unsigned-byte 8)))
(tlen (win:GetWindowText mirror teb (1+ wl)))
(startptr (make-array 1 :element-type 'acl-clim::unsigned-nat
:initial-element 0))
(endptr (make-array 1 :element-type 'acl-clim::unsigned-nat
:initial-element 0)))
(declare (ignorable tlen))
(acl-clim::frame-send-message (pane-frame pane)
mirror
win:EM_GETSEL
startptr endptr)
(unxlat-newline-return
(excl:mb-to-string teb
:start (aref startptr 0)
:end (aref endptr 0)))))))
(defmethod text-edit-flags ((sheet mswin-text-edit))
(logior
(if (gadget-editable-p sheet) 0 win:ES_READONLY)
(if (gadget-word-wrap sheet) 0 win:ES_AUTOHSCROLL)
win:ES_LEFT win:WS_BORDER
win:ES_MULTILINE win:ES_AUTOVSCROLL))
(defmethod realize-mirror ((port acl-clim::acl-port) (sheet mswin-text-edit))
(multiple-value-bind (left top right bottom)
(sheet-native-region* sheet)
(fix-coordinates left top right bottom)
(let* ((parent (sheet-mirror sheet))
(parent2 (sheet-mirror (sheet-parent sheet)))
(window nil)
(width (- right left))
(height (- bottom top))
(gadget-id (allocate-gadget-id sheet)))
(assert (eq parent parent2) () "parents don't match!")
(setq window
(acl-clim::hedit-open parent gadget-id
left top width height
:editstyle (text-edit-flags sheet)
:value (slot-value sheet 'value)
:scroll-mode
(let ((p (sheet-parent sheet)))
(and (acl-clim::scroller-pane-p p)
(scroller-pane-scroll-bar-policy p)))
))
(setf (sheet-native-transformation sheet)
(sheet-native-transformation (sheet-parent sheet)))
(setf (gadget-id->window sheet gadget-id) window)
(let ((text-style (pane-text-style sheet)))
(when text-style
(let ((font (text-style-mapping port text-style)))
(acl-clim::frame-send-message
(pane-frame sheet)
window win:WM_SETFONT
(acl-clim::acl-font-index font) 0))))
;; bug12693 SPR26749
;; If Echo-character is specified, set it explicitly
;; See also (text-edit-flags mswin-text-field)
(when (typep sheet 'mswin-text-field)
(let ((echo-char (text-field-echo-character sheet)))
(when echo-char
(if (not (characterp echo-char))
(setq echo-char #\*))
(let ((code (char-code echo-char)))
(acl-clim::frame-send-message (pane-frame sheet)
window
win:EM_SETPASSWORDCHAR
code 0)))))
;; Don't know how to set the y margins, but they look pretty good anyway.
(with-slots (x-margin) sheet
(acl-clim::frame-send-message
(pane-frame sheet)
window acl-clim::EM_SETMARGINS
;;acl-clim::EC_USEFONTINFO
(logior acl-clim::EC_LEFTMARGIN
acl-clim::EC_RIGHTMARGIN)
x-margin))
;; It's too soon for this. Need to do this later,
;; after the layout has been processed, but where?
(win:ShowWindow window win:SW_SHOW)
window)))
;;; The EN_KILLFOCUS and EN_SETFOCUS messages are how the text-gadgets
;;; are told they have lost/gained focus. They are sent as part of
;;; the WM_COMMAND message.
(defmethod acl-clim::command-event :around ((gadget mswin-text-edit)
port sheet wparam lparam)
(let ((notifycode (acl-clim::hiword wparam)))
(cond ((= notifycode win:EN_KILLFOCUS)
(handle-event
gadget
(allocate-event
'focus-out-gadget-event
:gadget gadget)))
((= notifycode win:EN_SETFOCUS)
(handle-event
gadget
(allocate-event
'focus-in-gadget-event
:gadget gadget)))
(t
(call-next-method gadget port sheet wparam lparam)))))
(defclass silica::mswin-text-field (silica::mswin-text-edit)
(;;mm: spr27890
(acl-clim::ignore-wm-char :initform nil))
(:default-initargs
:text-style nil
:external-label nil
:x-margin 2 :y-margin 2))
(defmethod isa-textfield ((object t)) nil)
(defmethod isa-textfield ((object mswin-text-field)) t)
(defmethod text-edit-flags ((sheet mswin-text-field))
(logior
(if (gadget-editable-p sheet) 0 win:ES_READONLY)
win:ES_AUTOHSCROLL win:ES_LEFT win:WS_BORDER
;; bug12693 spr26749
;; If Echo-character is set, make this a password field.
(if (text-field-echo-character sheet) win:ES_PASSWORD 0)
))
(defmethod isa-viewport ((object t)) nil)
(defmethod isa-viewport ((object viewport)) t)
(defmethod compose-space ((pane mswin-text-field) &key width height)
;;; Note that text-fields are scrolled by
;;; being given a viewport as a parent (this is different
;;; than text-editors.)
;;;
;;; As a result, if the parent is a viewport, and if width/height
;;; is specified, we want to use that value (i.e. in order
;;; to fill the space provided by the viewport).
(with-slots (x-margin y-margin initial-space-requirement nlines ncolumns)
pane
(let* ((parent (sheet-parent pane))
(parent-viewport-p (isa-viewport parent)))
(let ((w 0)
(min-w (process-width-specification pane '(1 :character)))
(h 0)
(value (gadget-value pane))
(min-h (process-height-specification pane *min-text-field-height*)))
;; WIDTH
(cond (parent-viewport-p
(setq w (process-width-specification pane width)))
((and initial-space-requirement
(plusp (process-width-specification
pane (space-requirement-width initial-space-requirement))))
;; This is where accepting-values views factors in.
(setq w (max (process-width-specification
pane (space-requirement-width initial-space-requirement))
min-w)))
(ncolumns
(setq w (process-width-specification pane `(,ncolumns :character))))
((stringp value)
(setq w (max (process-width-specification pane value)
*default-text-field-width*)))
(t
(setq w *default-text-field-width*)))
(setq w (max w min-w))
;; HEIGHT
(cond (parent-viewport-p
(setq h (process-height-specification pane height)))
((and initial-space-requirement
(plusp (process-height-specification
pane (space-requirement-height initial-space-requirement))))
;; This is where accepting-values views factors in.
(setq h (max (process-height-specification
pane (space-requirement-height initial-space-requirement))
min-h)))
(nlines
(setq h (process-height-specification pane `(,nlines :line))))
((stringp value)
(setq h (process-height-specification pane value)))
(t
(setq h min-h)))
(setq h (max h min-h))
(make-space-requirement
:width w :min-width min-w
:height h :min-height min-h
)))))
(defmethod handle-event ((pane mswin-text-field) (event key-press-event))
;;mm: spr27890: keep the simpler method for the more general
;; mswin-text-edit class
;; This event occurs when a text-field recevies a RETURN character.
;;mm: spr27890 OR a TAB character.
;;afuchs: also, handle shift-TAB correctly, and support tabbing in text-fields
;; that are outside accepting-values panes.
;;(format t "~&handle-event in~%") (sleep 2) (format t "~&handle-event start~%")
(let ((keysym (slot-value event 'key-name))
(shifted-p (not (zerop (logand +shift-key+ (event-modifier-state event))))))
(case keysym
(:newline
(acl-clim::win-catch-command
#'(lambda (pane)
(activate-callback pane (gadget-client pane) (gadget-id pane)))
pane)))
(labels ((text-panes (frame)
(let ((result nil))
(map-over-sheets (lambda (sheet)
(typecase sheet
(mswin-text-edit (push sheet result))))
(frame-top-level-sheet frame))
(nreverse result))))
(or
(let* ((frame (pane-frame pane))
(children (text-panes frame)))
(flet ((scan (all)
(do (p (tl all (cdr tl)))
((atom tl) nil)
(setf p (first tl))
(when (typecase p
;; use TAB or ENTER to skip from one text-field
;; to the next
(mswin-text-edit (slot-value p 'silica::editable-p))
(otherwise nil))
(port-move-focus-to-gadget acl-clim::*acl-port* p)
(return t)))))
(or (scan (cdr (member pane (if (and (eql keysym :tab) shifted-p)
(reverse children)
children))))
(case keysym
(:tab
;; wrap around if TAB
(scan (if shifted-p (reverse children) children)))
(:newline
;; press the OK button if pressed ENTER on the last field
(acl-clim::activate-default-gadget frame) nil)))))
;; Give up the focus
(win:SetFocus (win:GetActiveWindow)))
;;(format t "~&handle-event out~%")
)))
;;
;; text-field-cursor
;; spr 30683 (alemmens, 2005-11-30)
;;
(defmethod text-field-cursor ((text-field silica::mswin-text-edit))
(let ((text (clim:gadget-value text-field))
(windows-cursor (silica::get-selection text-field)))
;; The Windows representation of the text uses
;; two characters (newline and return) for each
;; newline, so we need to correct the cursor.
(let ((result 0))
(loop with actual = 0
for c = (if (< result (length text))
(char text result)
#\Space)
while (< actual windows-cursor)
do (progn
(incf actual
(if (char= c #\newline) 2 1))
(incf result)))
result)))
(defmethod (setf text-field-cursor) (cursor (text-field mswin-text-edit))
(let* ((text (clim:gadget-value text-field))
;; Add 1 for each newline before the cursor.
(extra (count #\newline text :end cursor)))
(set-caret-pos text-field (+ cursor extra))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; push buttons
(defvar *default-picture-button-op*
boole-and)
(defclass hpbutton-pane (acl-gadget-id-mixin
mirrored-sheet-mixin
push-button
;; JPM: button-pane-mixin is for push buttons,
;; not toggle buttons, because it applies
;; ACTIVATE-CALLBACK to the object. Toggle buttons
;; do not and should not support that function,
;; they use VALUE-CHANGED-CALLBACK instead.
;; I hope we aren't losing some useful behavior
;; by turning this off...
;;button-pane-mixin
space-requirement-mixin
leaf-pane
)
((external-label :initarg :external-label)
(depth :initarg :depth)
;; These slots hold the patterns we'll really use...
(normal-pattern :initarg :normal-pattern)
(depressed-pattern :initarg :depressed-pattern)
(x-margin :initarg :x-margin)
(y-margin :initarg :y-margin)
;;--- kludge because DRAW-TEXT* :ALIGN-X :CENTER is wrong
(internal-label-offset :initform 0)
;; yet another slot for picture buttons - who knows what all the
;; above slots are required for - this mix-up with the generic
;; gadgets is a real mess - some day this needs to be cleaned up
;; (cim 10/4/96)
(pixmap :initform nil)
(raster-op :initform *default-picture-button-op* :initarg :raster-op)
;; New slots to support "graying-out" deactivate
;; "colored" buttons on mswindows.
(original-pane-foreground :initform nil :accessor original-pane-foreground)
(deactivated-pane-foreground :initarg :deactivated-pane-foreground
:initform nil
:accessor deactivated-pane-foreground)
;; Slot to hold "grayed-out" pattern for a pattern-label.
(deactivated-label-pixmap :initarg :deactivated-label-pixmap
:initform nil
:accessor deactivated-label-pixmap)
)
(:default-initargs :label nil
:text-style nil
:show-as-default nil
:external-label nil
;; changed default-margins to make
;; picture-buttons look better (cim 10/4/96)
:x-margin 2 :y-margin 2))
(defvar *hbutton-width* 21)
(defvar *hbutton-height* 21)
(defvar +hpbutton-deactivate-light-gray+ (clim-utils::make-gray-color-1 0.8))
(defvar +hpbutton-deactivate-dark-gray+ (clim-utils::make-gray-color-1 0.3))
(defun guess-deactivated-color (orig-color)
(cond ((and (typep orig-color 'clim-utils::gray-color)
(< 0.6 (color-rgb orig-color)))
+hpbutton-deactivate-dark-gray+)
(t
+hpbutton-deactivate-light-gray+)))
(defun make-deactivated-label-pixmap (label deactivated-pane-foreground)
(cond ((acl-clim::isa-pattern label)
(let ((orig-array (clim-utils::pattern-array label))
(orig-designs (clim-utils::pattern-designs label)))
(let ((num-designs (length orig-designs)))
(cond ((= num-designs 0)
;; What does it mean to have no designs?
;; Just return the original label
label)
(t
(let ((orig-background (aref orig-designs 0))
(new-designs (make-array num-designs)))
;; Copy the original designs...
(loop for iii below num-designs
do (setf (aref new-designs iii)
(aref orig-designs iii)))
;; ...assume the first color is the background,
;; and try to replace it...
(setf (aref new-designs 0)
(or deactivated-pane-foreground
(guess-deactivated-color orig-background)))
;; ...and return the new pattern.
(make-pattern orig-array
new-designs)))))))
(t
;; Can't do anything with a pixmap,
;; So just use it.
label)))
(defmethod realize-mirror ((port acl-clim::acl-port) (sheet hpbutton-pane))
(multiple-value-bind (left top right bottom)
(sheet-native-region* sheet)
(fix-coordinates left top right bottom)
(let* ((parent (sheet-mirror sheet))
(parent2 (sheet-mirror (sheet-parent sheet)))
(window nil)
(buttonstyle nil)
(value nil)
(width (- right left))
(height (- bottom top))
gadget-id)
(assert (eq parent parent2) () "parents don't match!")
(setq gadget-id (silica::allocate-gadget-id sheet))
(setf buttonstyle
(if (push-button-show-as-default sheet)
win:BS_DEFPUSHBUTTON
win:BS_PUSHBUTTON))
(multiple-value-bind (cwidth cheight)
(compute-gadget-label-size sheet)
(setq top (+ top (* gadget-id 25)))
(setq left (+ left 50))
(setq width (+ cwidth (* 2 *hbutton-width*)))
(setq height (max cheight (* 1 *hbutton-height*))))
(setq window
(let ((label (slot-value sheet 'silica::label))
(resources (acl-clim::port-default-resources port)))
(when (or (acl-clim::isa-pixmap label)
(acl-clim::isa-pattern label))
(setf (slot-value sheet 'silica::pixmap)
(if (acl-clim::isa-pattern label)
(with-sheet-medium (medium sheet)
(with-output-to-pixmap
(stream medium
:width (pattern-width label)
:height (pattern-height label))
(draw-pattern* stream label 0 0)))
label))
;; If we have pixmap-label, try to setup a "grayed-out" pixmap.
;; 1] Use the deactivated-label-pixmap if it is already defined.
;; 2] If not, try to make a "best-guess" pixmap from
;; the current label-pixmap
(let ((alt-pixmap (or (deactivated-label-pixmap sheet)
(make-deactivated-label-pixmap
label
(deactivated-pane-foreground sheet)))))
(when alt-pixmap
(setf (deactivated-label-pixmap sheet)
(if (acl-clim::isa-pattern alt-pixmap)
(with-sheet-medium (medium sheet)
(with-output-to-pixmap
(stream medium
:width (pattern-width alt-pixmap)
:height (pattern-height alt-pixmap))
(draw-pattern* stream alt-pixmap 0 0)))
alt-pixmap))
))
(setq buttonstyle win:BS_OWNERDRAW ;; pnc Aug97 for clim2bug740
label nil))
(unless (eq (pane-background sheet)
(getf resources :background))
(setq buttonstyle win:BS_OWNERDRAW))
(unless (eq (pane-foreground sheet)
(getf resources :foreground))
(setq buttonstyle win:BS_OWNERDRAW))
(acl-clim::hbutton-open parent gadget-id
left top width height
:buttonstyle buttonstyle
:value value
:label label)))
(setf (sheet-native-transformation sheet)
(sheet-native-transformation (sheet-parent sheet)))
(setf (silica::gadget-id->window sheet gadget-id) window)
(let ((text-style (pane-text-style sheet)))
(when text-style
(let ((font (text-style-mapping port text-style)))
(acl-clim::frame-send-message
(pane-frame sheet)
window win:WM_SETFONT
(acl-clim::acl-font-index font) 0))))
(setf (original-pane-foreground sheet)
(or (pane-foreground sheet)
(let ((resources (acl-clim::port-default-resources port)))
(getf resources :foreground))))
(when (sheet-enabled-p sheet)
;; It's too soon for this. Need to do this later,
;; after the layout has been processed, but where?
(win:ShowWindow window win:SW_SHOW))
window)))
(defmethod compose-space ((pane hpbutton-pane) &key width height)
(declare (ignore width height))
(with-slots (external-label x-margin y-margin initial-space-requirement) pane
(let* ((ext-label-width 0)
(ext-label-height 0))
(when external-label
(let ((text-style (slot-value pane 'text-style)))
(with-sheet-medium (medium pane)
(multiple-value-bind (w h)
(text-size medium external-label :text-style text-style)
(setq ext-label-width (+ w (text-style-width text-style medium))
ext-label-height (+ h (floor
(text-style-height
text-style medium)
2)))))))
(multiple-value-bind (width height)
(compute-gadget-label-size pane)
(when (member (acl-clim::get-system-version)
;;mm: Windows NT seems to look better too this way
'(:win31 :winnt))
(setq width (floor (* width 4) 3)))
(let ((w (+ x-margin ext-label-width width x-margin))
(h (+ y-margin (max ext-label-height height) y-margin)))
;;mm: set minimum dimensions for buttons
(make-space-requirement
:width w :min-width w
:height h :min-height h)))
)))
(defmethod draw-picture-button ((pane hpbutton-pane) state hdc rect)
;; Handle the drawing part of owner-drawn buttons (BS_OWNERDRAW).
(assert (acl-clim::valid-handle hdc))
(let ((bg (acl-clim::color->wincolor (pane-background pane)))
(fg (acl-clim::color->wincolor (pane-foreground pane))))
(multiple-value-bind (bwidth bheight)
(bounding-rectangle-size pane)
(win:SetBkMode hdc win:OPAQUE)
(win:SetBkColor hdc bg)
(win:SetTextColor hdc fg)
(win:SetROP2 hdc win:R2_COPYPEN)
(let* ((dc-image
(with-sheet-medium (m pane)
(acl-clim::dc-image-for-ink
m (pane-background pane))))
(brush (acl-clim::dc-image-brush dc-image)))
(win:SelectObject hdc brush)
(win:DrawEdge hdc
rect
(if (logtest state win:ODS_SELECTED)
win:BDR_SUNKEN
win:BDR_RAISED)
(+ win:BF_RECT win:BF_MIDDLE))
(win:Rectangle hdc 1 1 (- bwidth 2) (- bheight 2))
(let ((pixmap (slot-value pane 'pixmap))
(label (gadget-label pane)))
(cond (pixmap
(let ((pixmap2 (if (gadget-active-p pane)
pixmap
(or (deactivated-label-pixmap pane)
(slot-value pane 'pixmap)))))
(let* ((op (slot-value pane 'raster-op))
(width (pixmap-width pixmap2))
(height (pixmap-height pixmap2))
(x (floor (- bwidth width) 2))
(y (floor (- bheight height) 2)))
(when (logtest state win:ODS_SELECTED)
(incf x)
(incf y))
(win:BitBlt hdc x y width height (acl-clim::pixmap-cdc pixmap2) 0 0
(acl-clim::bop->winop op)))))
(label
(acl-clim::adjust-gadget-colors pane hdc)
(with-sheet-medium (medium pane)
(let* ((port (port medium))
(text-style (medium-merged-text-style medium))
(font (text-style-mapping port text-style))
(index (acl-clim::acl-font-index font)))
(when (acl-clim::valid-handle index) (win:SelectObject hdc index))
(multiple-value-bind (cstr len)
(silica::xlat-newline-return label)
(multiple-value-bind (width height)
(text-size medium label :text-style text-style)
(let ((x (floor (- bwidth width) 2))
(y (floor (- bheight height) 2)))
(excl:with-native-string (cstr cstr)
(or (win:TextOut hdc x y cstr len)
(acl-clim::check-last-error "TextOut" :action :warn))))
)))))))
))))
;; deallocate and pixmap associated with a picture button when it's
;; destroyed - this is the only note-sheet-degrafted method in the
;; aclpc directory - someone should check what other resources
;; (if any) need to be deallocated when controls are destroyed
;; (cim 10/11/96)
(defmethod note-sheet-degrafted :after ((pane hpbutton-pane))
;; only destroy the pixmap if it was created from a pattern label - if
;; the the label was given as a pixmap leave it to the user to destroy
;; (cim 10/14/96)
(let ((pixmap (slot-value pane 'pixmap))
(label (gadget-label pane)))
(when (and pixmap (acl-clim::isa-pattern label))
(port-deallocate-pixmap (port pixmap) pixmap))))
(defmethod handle-event ((pane hpbutton-pane)
(event window-change-event))
;; Handle WM_COMMAND event.
;; SPR18779. This code runs as a result of pushing a button.
;; Turn on output recording in case the callback does any
;; output. Output recording is otherwise turned off by
;; clim-internals::invoke-with-input-editing at this point.
;; JPM 11/98.
(with-output-recording-options (*standard-input* :record t)
(activate-callback pane (gadget-client pane) (gadget-id pane))))
(defmethod handle-event ((pane hpbutton-pane)
(event pointer-enter-event))
(armed-callback pane (gadget-client pane) (gadget-id pane)))
(defmethod handle-event ((pane hpbutton-pane)
(event pointer-exit-event))
(disarmed-callback pane (gadget-client pane) (gadget-id pane)))
(defmethod (setf gadget-label) :after (str (pane hpbutton-pane))
(with-slots (mirror) pane
(when mirror
(excl:with-native-string (str str)
(win:SetWindowText mirror str)))))
(defmethod (setf pane-background) :after (clr (pane hpbutton-pane))
(declare (ignore clr))
(with-slots (mirror) pane
(when mirror
;;; Work-around to force button to refresh.
(win:SetWindowText mirror (or (gadget-label pane) "")))))
(defmethod (setf pane-foreground) :after (clr (pane hpbutton-pane))
(declare (ignore clr))
(with-slots (mirror) pane
(when mirror
;;; Work-around to force button to refresh.
(win:SetWindowText mirror (or (gadget-label pane) "")))))
(defmethod reload-label-string ((gadget hpbutton-pane))
(with-slots (mirror) gadget
(when mirror
(let ((str (gadget-label gadget)))
(when (stringp str)
(excl:with-native-string (str str)
(win:SetWindowText mirror str)))))))
(defmethod note-gadget-activated :after ((client t)
(gadget hpbutton-pane))
(when (or (pane-foreground gadget)
(pane-background gadget))
(setf (pane-foreground gadget)
(or (original-pane-foreground gadget)
(let ((port (port gadget)))
(getf (acl-clim::port-default-resources port)
:background))
clim:+black+)))
;; Refresh the label after activation.
(reload-label-string gadget)
)
(defmethod note-gadget-deactivated :after ((client t)
(gadget hpbutton-pane))
(when (or (pane-foreground gadget)
(pane-background gadget))
(let ((dpf (deactivated-pane-foreground gadget)))
(when (null dpf)
;; If not specified, make a best guess at
;; color to use for deactivated-pane-foreground.
(let ((opf (original-pane-foreground gadget)))
(cond ((and (typep opf 'clim-utils::gray-color)
(< 0.6 (color-rgb opf)))
(setq dpf +hpbutton-deactivate-dark-gray+))
(t
(setq dpf +hpbutton-deactivate-light-gray+))))
(setf (deactivated-pane-foreground gadget) dpf))
(setf (pane-foreground gadget) dpf)))
;; Refresh the label after activation.
(reload-label-string gadget)
)
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; hbutton-pane
(defclass hbutton-pane (acl-gadget-id-mixin
mirrored-sheet-mixin
toggle-button
button-pane-mixin)
((pixmap :initform nil)
(raster-op :initform *default-picture-button-op* :initarg :raster-op))
(:default-initargs :label nil
;; We no longer want this as it overrides the the
;; system font returned by get-sheet-resources
;; in acl-medi.lisp (cim 10/12/96)
:text-style nil
))
(defmethod realize-mirror ((port acl-clim::acl-port) (sheet hbutton-pane))
(multiple-value-bind (left top right bottom)
(sheet-native-region* sheet)
(fix-coordinates left top right bottom)
(let* ((parent (sheet-mirror sheet))
(parent2 (sheet-mirror (sheet-parent sheet)))
(window nil)
(buttonstyle nil)
(value nil)
(width (- right left))
(height (- bottom top))
gadget-id
(button-label-justify (gadget-button-label-justify sheet)) ; bug12221
)
(assert (eq parent parent2) () "parents don't match!")
(setq gadget-id (silica::allocate-gadget-id sheet))
(setq value (slot-value sheet 'silica::value))
(setq buttonstyle
(ecase (gadget-indicator-type sheet)
(:one-of win:BS_RADIOBUTTON)
(:some-of win:BS_CHECKBOX)))
(multiple-value-bind (cwidth cheight)
(compute-gadget-label-size sheet)
(setq top (+ top (* gadget-id 25)))
(setq left (+ left 50))
(setq width (+ cwidth (* 2 *hbutton-width*)))
(setq height (max cheight (* 1 *hbutton-height*))))
(setq window
(let ((label (slot-value sheet 'silica::label))
(resources (acl-clim::port-default-resources port)))
resources
(when (or (acl-clim::isa-pixmap label)
(acl-clim::isa-pattern label))
(setf (slot-value sheet 'silica::pixmap)
(if (acl-clim::isa-pattern label)
(with-sheet-medium (medium sheet)
(with-output-to-pixmap
(stream medium
:width (pattern-width label)
:height (pattern-height label))
(draw-pattern* stream label 0 0)))
label))
(setq buttonstyle win:BS_OWNERDRAW ;; pnc Aug97 for clim2bug740
label nil))
;; pnc Dec99 for spr20626
;; If this is set, the toggle button is drawn
;; as a OWNERDRAWn push-button.
#+IGNORE (unless (eq (pane-background sheet)
(getf resources :background))
(setq buttonstyle win:BS_OWNERDRAW))
#+IGNORE (unless (eq (pane-foreground sheet)
(getf resources :foreground))
(setq buttonstyle win:BS_OWNERDRAW))
(acl-clim::hbutton-open parent gadget-id
left top width height
:buttonstyle buttonstyle
:value value
:label label
:button-label-justify button-label-justify ; bug12221
)))
(setf (sheet-native-transformation sheet)
(sheet-native-transformation (sheet-parent sheet)))
(setf (silica::gadget-id->window sheet gadget-id) window)
(let ((text-style (pane-text-style sheet)))
(when text-style
(let ((font (text-style-mapping port text-style)))
(acl-clim::frame-send-message
(pane-frame sheet)
window win:WM_SETFONT
(acl-clim::acl-font-index font) 0))))
(when (sheet-enabled-p sheet)
;; It's too soon for this. Need to do this later,
;; after the layout has been processed, but where?
(win:ShowWindow window win:SW_SHOW))
window)))
(defmethod draw-picture-button ((pane hbutton-pane) state hdc rect)
(multiple-value-bind (bwidth bheight)
(bounding-rectangle-size pane)
(win:SetBkMode hdc win:OPAQUE)
(win:SetBkColor hdc (acl-clim::color->wincolor (pane-background pane)))
(win:SetTextColor hdc (acl-clim::color->wincolor (pane-foreground pane)))
(win:SetROP2 hdc win:R2_COPYPEN)
(let* ((dc-image
(with-sheet-medium (m pane)
(acl-clim::dc-image-for-ink
m (pane-background pane))))
(brush (acl-clim::dc-image-brush dc-image)))
(win:SelectObject hdc brush)
(win:DrawEdge hdc
rect
(if (logtest state win:ODS_SELECTED)
win:BDR_SUNKEN
win:BDR_RAISED)
(+ win:BF_RECT win:BF_MIDDLE))
(let ((margin 1))
(win:Rectangle hdc margin margin
(- bwidth margin margin) (- bheight margin margin)))
(let* ((pixmap (slot-value pane 'pixmap)))
(when pixmap
(let* ((op (slot-value pane 'raster-op))
(width (pixmap-width pixmap))
(height (pixmap-height pixmap))
(x (floor (- bwidth width) 2))
(y (floor (- bheight height) 2)))
(when (logtest state win:ODS_SELECTED)
(incf x)
(incf y))
(win:BitBlt hdc x y width height (acl-clim::pixmap-cdc pixmap) 0 0
(acl-clim::bop->winop op))))))))
(defmethod compose-space ((pane hbutton-pane) &key width height)
(declare (ignore width height))
(multiple-value-bind (width height)
(compute-gadget-label-size pane)
;;--- Should either make radio buttons and check boxes different classes
;;--- or generalize this
(when (member (acl-clim::get-system-version)
;;mm: looks better in winnt
'(:win31 :winnt))
(setq width (floor (* width 4) 3)))
(let* ((button-width *hbutton-width*)
(button-height *hbutton-height*)
(w (+ width (* button-width 1)))
(h (max height (* button-height 1))))
;;mm: set min dimensions too
;; We allow 1/2 button width on each side as margin
(make-space-requirement :width w :min-width w
:height h :min-height h))
))
;; Highlighting is a no-op
(defmethod highlight-button ((pane hbutton-pane) medium)
(declare (ignore medium)))
(defmethod handle-event ((pane hbutton-pane)
(event window-change-event))
;; Handle WM_COMMAND event.
(setf (gadget-value pane :invoke-callback t)
(or (eq (gadget-indicator-type pane) :one-of)
(not (gadget-value pane)))))
(defmethod (setf gadget-indicator-type) :before (value (pane hbutton-pane))
(declare (ignore value))
(error "Cannot change the indicator-type of a checkbox at this time"))
;;; When an hbutton is set, update its checkmark appropriately.
(defmethod (setf gadget-value) :before (value (pane hbutton-pane)
&key invoke-callback)
(declare (ignore invoke-callback))
(with-slots (mirror) pane
(when mirror
(acl-clim::frame-send-message (pane-frame pane)
mirror win:BM_SETCHECK (if value 1 0) 0))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; option panes
;;;
;;; mswin option panes have been replced by mswin-combo-box
;;; clim\db-list
(defclass acl-clim::winwidget-mixin () ())
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; Combo Box
(defclass mswin-combo-box-pane
(acl-gadget-id-mixin
mirrored-sheet-mixin
option-pane
sheet-permanently-enabled-mixin
space-requirement-mixin
basic-pane)
((x-margin :initform 0)
(y-margin :initform 0))
(:default-initargs :background +white+))
(defmethod initialize-instance :after ((sheet mswin-combo-box-pane)
&key visible-items)
(declare (ignore visible-items))
nil)
(defmethod initialize-instance :after ((sheet application-pane)
&key foreground)
(declare (ignore foreground))
nil)
(defmethod handle-event ((pane mswin-combo-box-pane)
(event window-change-event))
;; Handle WM_COMMAND event.
(let ((mirror (sheet-direct-mirror pane))
(index 0))
(with-slots (items value mode value-key) pane
(when (and mirror items)
(setf index (acl-clim::frame-send-message (pane-frame pane)
mirror win:CB_GETCURSEL 0 0))
(setf (gadget-value pane :invoke-callback t)
(funcall value-key (elt items index)))))))
(defmethod (setf gadget-value) :before
(value (pane mswin-combo-box-pane) &key invoke-callback)
(declare (ignore invoke-callback))
(with-slots (mode items value-key test) pane
(let ((hwnd (sheet-direct-mirror pane))
(i (position value items
:key value-key :test test)))
(when (and hwnd i)
(acl-clim::frame-send-message (pane-frame pane)
hwnd win:CB_SETCURSEL i 0)))))
(defmethod handle-event :after ((pane mswin-combo-box-pane)
(event pointer-event))
(deallocate-event event))
(defmethod compose-space ((pane mswin-combo-box-pane) &key width height)
(declare (ignore width height))
(with-slots (items name-key text-style visible-items
space-requirement) pane
(let (;;(name "")
(name-width 0)
(name-height 0)
;;(index 0)
(tsh 0))
(multiple-value-setq (name-width tsh)
(compute-set-gadget-dimensions pane))
;; this specifies the regular size, not the dropped-down size
(setq name-height tsh)
(make-space-requirement
:width (+ name-width 27)
:height (+ name-height 10)))))
(defmethod acl-clim::command-event ((gadget mswin-combo-box-pane)
port sheet wparam lparam)
(let ((notifycode (acl-clim::hiword wparam)))
(when (= notifycode win:CBN_CLOSEUP)
;; List box of a combo box has been closed.
(call-next-method gadget port sheet wparam lparam))))
(defvar *combo-box-maximum-height* 300)
;;; This next method controls the space allocated to a combo-box control.
;;; Height is the dropped-down height, i.e. the number of list
;;; items multiplied by the line height. We set an upper limit
;;; on height in case the height exceeds the height of the display.
;;; If there are more items than will fit within the selected
;;; height, a scroll bar should automatically appear.
(defmethod set-sheet-mirror-edges* ((port acl-clim::acl-port)
(sheet mswin-combo-box-pane)
left top right bottom)
(fix-coordinates left top right bottom)
(let* ((hwnd (sheet-mirror sheet))
(height (min
(* (+ 2 (acl-clim::frame-send-message (pane-frame sheet)
hwnd win:CB_GETCOUNT 0 0))
;; I'd have expected the wparam to be 0 here
;; according to the docs but this doesn't work
;; right (cim 9/25/96)
(acl-clim::frame-send-message (pane-frame sheet)
hwnd win:CB_GETITEMHEIGHT -1 0))
*combo-box-maximum-height*)))
(win:SetWindowPos hwnd
(ct:null-handle win:hwnd) ; we really want win:HWND_TOP
left top
(- right left)
height
(logior win:SWP_NOACTIVATE
win:SWP_NOZORDER))))
;;; When items are set in an combo-pane the mirror must be
;;; made to update its appearance appropriately.
(defmethod (setf set-gadget-items) :after (items (pane mswin-combo-box-pane))
(with-slots (name-key mirror) pane
(when mirror
(acl-clim::frame-send-message
(pane-frame pane)
mirror win:CB_RESETCONTENT 0 0)
(dolist (item items)
(let ((str (acl-clim::nstringify (funcall name-key item)))
(pos (position item items)))
;;(break "insert gadget item [~a @ ~a]" str pos)
(excl:with-native-string (str str)
(acl-clim::frame-send-message
(pane-frame pane)
mirror win:CB_INSERTSTRING pos str))))
;; make sure it updates
(win:InvalidateRect mirror ct:hnull win:TRUE)
(note-sheet-region-changed pane))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; widget button Menu bars
;;; Windoz will not give you a menu bar anywhere but the
;;; top level. Period. To implement this, we have to
;;; use "non-native" menu bars.
(defclass mswin-menu-bar-pane (menu-bar
sheet-permanently-enabled-mixin
space-requirement-mixin
sheet-single-child-mixin
basic-pane)
())
(defmethod compose-space ((pane mswin-menu-bar-pane) &key width height)
(unless width (setq width 150))
(unless height (setq height 25))
(make-space-requirement
:width (max width 40)
:min-width 40
:height (max height 25)
:min-height 25))
(defmethod allocate-space ((pane mswin-menu-bar-pane) width height)
(let ((child (sheet-child pane)))
(when child (move-and-resize-sheet child 0 0 width height))))
(defmethod initialize-instance :after ((object mswin-menu-bar-pane)
&rest options
&key command-table frame)
(when (and frame command-table)
(let ((inferiors (compute-menu-bar-pane-1 frame command-table)))
(sheet-adopt-child object inferiors))))
(defclass mswin-menu-bar-button (hpbutton-pane)
((next-menu :initform nil :initarg :next-menu)))
(defmethod handle-event ((pane mswin-menu-bar-button)
(event pointer-enter-event))
(with-slots (armed next-menu) pane
(unless armed
(setf armed t)
(armed-callback pane (gadget-client pane) (gadget-id pane)))))
(defmethod handle-event ((pane mswin-menu-bar-button)
(event pointer-exit-event))
(with-slots (armed next-menu) pane
(when armed
(setf armed nil))))
(defmethod handle-event ((pane mswin-menu-bar-button)
(event window-change-event))
;; Handle WM_COMMAND event.
(with-slots (armed next-menu) pane
(with-sheet-medium (medium pane)
(declare (ignore medium))
(when armed (setf armed :active))
(let ((choice
(menu-choose next-menu :associated-window pane)))
(when choice
(apply #'queue-command pane choice)))
(setf armed t)
)))
(defun queue-command (button command command-table)
(let ((frame (pane-frame button)))
(distribute-event
(port button)
(allocate-event 'presentation-event
:frame frame
:sheet (frame-top-level-sheet frame)
:presentation-type `(command :command-table ,command-table)
:value command))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; scroll-bar
;; Not to be confused with a scroller-pane, which provides scroll bars
;; to an application pane. This is a naked scroll-bar gadget that
;; acts like a slider gadget.
(defclass mswin-scroll-bar (acl-gadget-id-mixin
mirrored-sheet-mixin
scroll-bar
space-requirement-mixin
leaf-pane
;;sheet-permanently-enabled-mixin basic-pane
)
())
(defmethod initialize-instance :after ((scroll-bar mswin-scroll-bar) &key &allow-other-keys)
;; Initialization is now taken care of by an initialize-instance after method
;; on scroll-bar in silica/gadgets.lisp. This dummy is only necessary as long
;; as CLIM hasn't been recompiled from scratch. (alemmens, 2004-12-2004)
'ignore)
(defmethod native-gadget-range* ((scroll-bar mswin-scroll-bar))
(values 0 (acl-clim::win-scroll-grain acl-clim::*acl-port*)))
(defmethod realize-mirror ((port acl-clim::acl-port) (sheet mswin-scroll-bar))
(multiple-value-bind (left top right bottom)
(sheet-native-region* sheet)
(fix-coordinates left top right bottom)
(let* ((parent (sheet-mirror sheet))
(parent2 (sheet-mirror (sheet-parent sheet)))
(window nil)
(orientation (gadget-orientation sheet))
(width (- right left))
(height (- bottom top))
(gadget-id (allocate-gadget-id sheet)))
(assert (eq parent parent2) () "parents don't match!")
(setq window
(acl-clim::scrollbar-open parent left top width height orientation))
(setf (sheet-native-transformation sheet)
(sheet-native-transformation (sheet-parent sheet)))
(setf (gadget-id->window sheet gadget-id) window)
(win:ShowWindow window win:SW_SHOW)
(setf (sheet-direct-mirror sheet) window) ; needed only to initialize
;; this from rfe4072
;; Make sure defaults are sensible at the time the WIN32 scroll bar is
;; created.
(setf (gadget-min-value sheet)
(or (gadget-min-value sheet) 0))
(setf (gadget-max-value sheet)
(or (gadget-max-value sheet) 1))
(change-scroll-bar-values
sheet
;; remaining from rfe4072
:slider-size (or (scroll-bar-size sheet) 1)
:value (or (gadget-value sheet) 0)
:line-increment (or (scroll-bar-line-increment sheet) 1))
window)))
(defmethod compose-space ((m mswin-scroll-bar) &key width height)
(declare (ignore width height))
(let (x)
(ecase (gadget-orientation m)
(:vertical
(setq x (win-scroll-thick :y))
(make-space-requirement :width x
:min-height x
:height (* 2 x)
:max-height +fill+))
(:horizontal
(setq x (win-scroll-thick :x))
(make-space-requirement :height x
:min-width x
:width (* 2 x)
:max-width +fill+)))))
(defmethod change-scroll-bar-values ((sb mswin-scroll-bar)
&key
slider-size
value
line-increment
(page-increment slider-size))
;; I simplified this (and improved dealing with unusual gadget ranges)
;; by using convert-scroll-bar-xxx-out (alemmens, 2004-12-24).
(declare (ignore page-increment line-increment))
(let ((mirror (sheet-direct-mirror sb))
(range (gadget-range sb)))
(when mirror
(unless slider-size (setq slider-size (scroll-bar-size sb)))
(setq slider-size (min slider-size range)) ; sanity check
(unless value (setq value (gadget-value sb)))
(let* ((struct (ct:ccallocate win:scrollinfo))
(page (convert-scroll-bar-size-out sb slider-size))
(position (convert-scroll-bar-value-out sb value)))
(ct:csets
win:scrollinfo struct
cbSize (ct:sizeof win:scrollinfo)
fMask win:SIF_ALL
nMin 0
nMax (acl-clim::win-scroll-grain acl-clim::*acl-port*)
nPage page
nPos position)
(win:SetScrollInfo mirror win:SB_CTL struct 1)))))
(defmethod (setf gadget-value) :before
(nv (gadget mswin-scroll-bar) &key invoke-callback)
(declare (ignore invoke-callback))
(change-scroll-bar-values gadget :value nv))
(defmethod (setf scroll-bar-size) :before (nv (gadget mswin-scroll-bar))
(change-scroll-bar-values gadget :slider-size nv))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; pull-down-menu
(defclass mswin-pull-down-menu-button (hpbutton-pane)
((next-menu :initform nil :initarg :next-menu)))
(defmethod handle-event ((pane mswin-pull-down-menu-button)
(event pointer-enter-event))
(when (port pane) ;the menu is sometimes disabled...
(let* ((pointer (port-pointer (port pane)))
(pointer-button-state (pointer-button-state pointer)))
(unless (= pointer-button-state 0)
(with-slots (armed) pane
(unless (eq armed :active)
(with-sheet-medium (medium pane)
(setq armed :active)
(highlight-button pane medium))
(armed-callback pane (gadget-client pane) (gadget-id pane))))))))
(defmethod handle-event ((pane mswin-pull-down-menu-button)
(event pointer-exit-event))
(when (port pane) ;the menu is often disabled...
(with-slots (armed) pane
(when armed
(setq armed nil)
(disarmed-callback pane (gadget-client pane) (gadget-id pane))))))
(defmethod isa-pull-down-menu ((object t)) nil)
(defmethod isa-pull-down-menu ((object pull-down-menu)) t)
(defmethod handle-event ((pane mswin-pull-down-menu-button)
(event pointer-motion-event))
(with-slots (next-menu x-margin normal-pattern) pane
(when next-menu
(with-bounding-rectangle* (left top right bottom) (sheet-region pane)
(declare (ignore top right bottom))
(let* ((pattern-width (pattern-width normal-pattern))
(sensitive-region 16)
(x (pointer-event-x event)))
(when (and next-menu
(> x (- (+ left x-margin pattern-width)
sensitive-region)))
(if (isa-pull-down-menu next-menu)
(choose-from-pull-down-menu next-menu pane :cascade-p t)
(funcall next-menu pane))))))))
;; We really shouldn't ever get one of these - the button must have been down
;; for us to get here.
(defmethod handle-event ((pane mswin-pull-down-menu-button)
(event pointer-button-press-event))
(with-slots (armed) pane
(setf armed :active))
(armed-callback pane (gadget-client pane) (gadget-id pane))
)
(defmethod handle-event ((pane mswin-pull-down-menu-button)
(event window-change-event))
;; Handle WM_COMMAND event.
(with-slots (armed) pane
(when (eq armed :active)
(setf armed t)
(activate-callback pane (gadget-client pane) (gadget-id pane))
;;--- This modularity is a bit dubious. Oh well.
(throw 'exit-pull-down-menu (values)))))
(defun winhandle-equal (x y)
(cond (nil
(eql (ct:lhandle-value x)
(ct:lhandle-value y)))
(t (equal x y))))
(defvar acl-clim::*generic-gadgets* nil)
;;; +++ needs work for integration: *generic-gadgets*
;;; clim\db-menu
(defmethod handle-event ((pane pull-down-menu) (event pointer-exit-event))
;; Don't punt if we've never entered the menu, or if we are entering
;; one of the buttons within the menu
(when (and acl-clim:*generic-gadgets*
(pull-down-menu-entered pane)
(not (eq (pointer-boundary-event-kind event) :inferior)))
(throw 'exit-pull-down-menu (values))
))
(defvar *subsidiary-pull-down-menu* nil)
;;; +++ needs work for integration: *generic-gadgets*
;;; clim\db-menu
(defun choose-from-pull-down-menu (menu &optional button &key cascade-p)
(let ((menu-frame (pane-frame menu))
(event-queue (sheet-event-queue menu))
(mirror (sheet-mirror menu)))
(when (and acl-clim::*generic-gadgets* button)
(with-bounding-rectangle* (bleft btop bright bbottom)
(sheet-device-region button)
(declare (ignore bright))
(multiple-value-bind (fleft ftop fright fbottom)
(let ((tls (get-top-level-sheet button)))
(mirror-region* (port tls) tls))
(declare (ignore fright fbottom))
(if cascade-p
(let ((pattern-width (pattern-width (slot-value button 'normal-pattern)))
(button-x-margin (slot-value button 'x-margin)))
(move-sheet (frame-top-level-sheet menu-frame)
(+ bleft fleft pattern-width button-x-margin -16)
(+ btop ftop)))
(move-sheet (frame-top-level-sheet menu-frame)
(+ bleft fleft 4)
(+ bbottom ftop 23))))))
(when (and (not acl-clim::*generic-gadgets*) button)
(let ()
(multiple-value-bind (bleft btop bright bbottom)
(acl-clim::mirror-native-edges*
acl-clim::*acl-port* button)
(declare (ignore bright))
(if cascade-p
(let ((pattern-width (pattern-width (slot-value button 'normal-pattern)))
(button-x-margin (slot-value button 'x-margin)))
(move-sheet (frame-top-level-sheet menu-frame)
(+ bleft pattern-width button-x-margin -16)
(+ btop)))
(move-sheet (frame-top-level-sheet menu-frame)
(+ bleft 0)
(+ bbottom 0))))))
(enable-frame menu-frame)
;; Share the event queue with the application frame
(setf (sheet-event-queue (frame-top-level-sheet (pane-frame menu)))
(sheet-event-queue (frame-top-level-sheet *application-frame*)))
;; Ensure no surprise exit events
(setf (pull-down-menu-entered menu) nil)
;; Wait for an event and then handle it
;; make sure that the pulldown has the focus
(win:SetFocus mirror)
(setf (acl-clim::acl-port-mirror-with-focus
acl-clim::*acl-port*) mirror)
(unwind-protect
(flet ((waiter ()
(not (queue-empty-p event-queue))))
(declare (dynamic-extent #'waiter))
(catch (if *subsidiary-pull-down-menu*
'|Not exit-pull-down-menu|
'exit-pull-down-menu)
(let ((*subsidiary-pull-down-menu* t))
(loop
(unless (winhandle-equal mirror
(acl-clim::acl-port-mirror-with-focus
acl-clim::*acl-port*))
(throw 'exit-pull-down-menu (values)))
(port-event-wait (port menu) #'waiter
:wait-reason "Pull-Down Menu" :timeout 2)
(let ((event (queue-get event-queue)))
(when event
(handle-event (event-sheet event) event)))))))
(disable-frame menu-frame))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; slider fixes
(setq *default-horizontal-slider-pattern*
(make-pattern #2A((0 0 0 0 0 0 0 0 0 0 0 0 0 0)
(0 0 0 0 0 0 0 0 0 0 0 0 1 1)
(0 0 1 0 1 0 1 0 1 0 1 0 1 1)
(0 0 1 1 0 1 1 0 0 1 0 1 1 1)
(0 0 1 0 1 0 1 0 1 0 1 0 1 1)
(0 0 1 1 0 1 1 0 0 1 0 1 1 1)
(0 0 1 0 1 0 1 0 1 0 1 0 1 1)
(0 0 1 1 0 1 1 0 0 1 0 1 1 1)
(0 0 1 0 1 0 1 0 1 0 1 0 1 1)
(0 0 1 1 0 1 1 0 0 1 0 1 1 1)
(0 0 1 0 1 0 1 0 1 0 1 0 1 1)
(0 0 1 1 0 1 1 0 0 1 0 1 1 1)
(0 0 1 0 1 0 1 0 1 0 1 0 1 1)
(0 0 1 1 0 1 1 0 0 1 0 1 1 1)
(0 0 1 0 1 0 1 0 1 0 1 0 1 1)
(0 0 1 1 0 1 1 0 0 1 0 1 1 1)
(0 0 1 0 1 0 1 0 1 0 1 0 1 1)
(0 0 1 1 0 1 1 0 0 1 0 1 1 1)
(0 0 1 0 1 0 1 0 1 0 1 0 1 1)
(0 1 1 1 1 1 1 1 1 1 1 1 1 1)
(0 1 1 1 1 1 1 1 1 1 1 1 1 0))
(list +background-ink+ +foreground-ink+)))
(setq *slider-rail-ink* (make-gray-color .5))
(setq *default-vertical-slider-pattern*
(make-pattern #2a((0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0)
(0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1)
(0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1)
(0 0 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 1)
(0 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 1 1)
(0 0 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 1)
(0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1)
(0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1)
(0 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 1 1)
(0 0 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 1)
(0 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 1 1)
(0 0 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 0 1 1)
(0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1)
(0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0))
(list +background-ink+ +foreground-ink+)))
;;mm: Some new mixins
(defmethod allocate-gadget-id ((x acl-gadget-id-mixin))
(with-slots (gadget-id gadget-id->window-map) x
(let ((l (length gadget-id->window-map)))
(if (< gadget-id l)
(prog1 gadget-id (incf gadget-id))
(error "Too many gadgets in pane: ~S" x)))))
(defmethod gadget-id->window ((x acl-gadget-id-mixin) id)
(with-slots (gadget-id->window-map) x
(aref gadget-id->window-map id)))
(defmethod (setf gadget-id->window) (window (x acl-gadget-id-mixin) id)
(with-slots (gadget-id->window-map) x
(setf (aref gadget-id->window-map id) window)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; text editor
(in-package :acl-clim)
(defmethod update-mirror-transformation :around ((port acl-port)
(sheet winwidget-mixin))
(call-next-method)
(setf (sheet-native-transformation sheet) (sheet-transformation sheet)))
;;mm: allow :LABEL initarg
(defclass acl-text-editor-pane (silica::mswin-text-edit)
()
(:default-initargs :ncolumns 30 :nlines 1))
(defmethod initialize-instance :after ((x acl-text-editor-pane)
&key label &allow-other-keys)
(declare (ignore label))
)
(defmethod acl-clim::sheet-wants-default-pointer
((object acl-clim::acl-text-editor-pane))
t)
;; new code to deal with resources - ie background foreground and
;; text-stlye for various windows controls (cim 10/12/96)
;; There is a question here of when is it safe to delete the brush
;; used for painting the background. We assume in the code below that
;; by the time the next wm_ctlcolorxxx message arrives the previously
;; returned brush can be freed - is this a reasonable assumption?
;; (cim 10/11/96)
(defmethod adjust-gadget-colors (pane hdc)
(excl:without-interrupts ; due to global variable
(when (acl-clim::valid-handle *background-brush*)
(or (win:DeleteObject *background-brush*)
(error "DeleteObject")))
(let* ((bg (color->wincolor (pane-background pane)))
(fg (color->wincolor (pane-foreground pane))))
(win:SetBkColor hdc bg)
(win:SetTextColor hdc fg)
(setq *background-brush* (win:CreateSolidBrush bg)))))
(defmethod get-sheet-resources ((port acl-port) sheet)
(declare (ignore sheet))
(port-default-resources port))
(defparameter *windows-system-text-style* nil)
(defparameter *gadget-default-resources* nil)
;; specializing the following method on acl-gadget-id-mixin causes
;; the text-style to be specified for those sheets that are mirrored
;; directly by windows controls - as opposed to for CLIM stream panes
;; which should probably fallback to using *default-text-style* if no
;; explicit text-style is given. (cim 10/14/96)
(defmethod get-sheet-resources :around ((port acl-port)
(sheet t))
(or *windows-system-text-style*
(setq *windows-system-text-style*
#+ignore
(make-device-font-text-style *acl-port* win:SYSTEM_FONT)
#-ignore
(make-text-style :sans-serif :roman :small)
;; this should get the real system font but I device fonts don't
;; seem to work as expected - for the moment though the above
;; looks pretty good (cim 10/12/96)
))
(or *gadget-default-resources*
(setq *gadget-default-resources*
(let ((resources (call-next-method)))
`(:text-style ,*windows-system-text-style* ,@resources)))))
(defmethod standardize-text-style ((port basic-port) style
&optional (character-set
*standard-character-set*))
(standardize-text-style-1 port style character-set
acl-clim::*acl-logical-size-alist*))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Outlined-pane
(defclass mswin-outlined-pane (silica::acl-gadget-id-mixin
mirrored-sheet-mixin
outlined-pane)
()
(:default-initargs :thickness 2))
(defun outline-open (parent left top width height)
(let ((winstyle
(logior win:WS_CHILD
win:WS_CLIPCHILDREN
win:WS_CLIPSIBLINGS))
(exstyle win:WS_EX_CLIENTEDGE)
(window nil))
(setq window
(excl:with-native-string (clim-class (clim-class *acl-port*))
(excl:with-native-string (win-name (win-name *acl-port*))
(win:CreateWindowEx exstyle
clim-class
win-name
winstyle
left top width height
(or parent 0)
0 ; menu
(hinst *acl-port*)
(symbol-name (gensym)) ))))
(when (zerop window)
(or (check-last-error "CreateWindowEx")
(error "CreateWindowEx: unknown error")))
window))
(defmethod realize-mirror ((port acl-port)
(sheet mswin-outlined-pane))
(multiple-value-bind (left top right bottom)
(sheet-native-region* sheet)
(fix-coordinates left top right bottom)
(let* ((parent (sheet-mirror sheet))
(window nil)
(width (- right left))
(height (- bottom top))
(gadget-id (silica::allocate-gadget-id sheet)))
(setq window
(outline-open parent left top width height))
(setf (sheet-native-transformation sheet)
(sheet-native-transformation (sheet-parent sheet)))
(setf (silica::gadget-id->window sheet gadget-id) window)
(win:ShowWindow window win:SW_SHOW)
(setf (sheet-direct-mirror sheet) window)
window)))
(defmethod handle-repaint ((pane mswin-outlined-pane) region)
(declare (ignore region))
nil)
(defmethod compose-space ((pane mswin-outlined-pane) &key width height)
(let ((thickness (slot-value pane 'silica::thickness))
(child (sheet-child pane)))
(space-requirement+
(compose-space child :width width :height height)
(make-space-requirement
:width (* 2 thickness)
:height (* 2 thickness)))))
(defmethod allocate-space ((pane mswin-outlined-pane) width height)
(let ((thickness (slot-value pane 'silica::thickness)))
(move-and-resize-sheet
(sheet-child pane)
0 0
(- width (* 2 thickness)) (- height (* 2 thickness)))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; Labelled-pane
(defmethod (setf gadget-label) :after
((new-label string) (gadget labelled-gadget-mixin))
(handle-repaint gadget t)
)
| {
"pile_set_name": "Github"
} |
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""The VectorDiffeomixture distribution class."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib.distributions.python.ops import distribution_util
from tensorflow.contrib.distributions.python.ops.bijectors.affine_linear_operator import AffineLinearOperator
from tensorflow.contrib.distributions.python.ops.bijectors.softmax_centered import SoftmaxCentered
from tensorflow.contrib.linalg.python.ops import linear_operator_addition as linop_add_lib
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops.distributions import categorical as categorical_lib
from tensorflow.python.ops.distributions import distribution as distribution_lib
from tensorflow.python.ops.distributions import normal as normal_lib
from tensorflow.python.ops.linalg import linear_operator_diag as linop_diag_lib
from tensorflow.python.ops.linalg import linear_operator_full_matrix as linop_full_lib
from tensorflow.python.ops.linalg import linear_operator_identity as linop_identity_lib
from tensorflow.python.ops.linalg import linear_operator_lower_triangular as linop_tril_lib
__all__ = [
"VectorDiffeomixture",
"quadrature_scheme_softmaxnormal_gauss_hermite",
"quadrature_scheme_softmaxnormal_quantiles",
]
def quadrature_scheme_softmaxnormal_gauss_hermite(
normal_loc, normal_scale, quadrature_size,
validate_args=False, name=None):
"""Use Gauss-Hermite quadrature to form quadrature on `K - 1` simplex.
A `SoftmaxNormal` random variable `Y` may be generated via
```
Y = SoftmaxCentered(X),
X = Normal(normal_loc, normal_scale)
```
Note: for a given `quadrature_size`, this method is generally less accurate
than `quadrature_scheme_softmaxnormal_quantiles`.
Args:
normal_loc: `float`-like `Tensor` with shape `[b1, ..., bB, K-1]`, B>=0.
The location parameter of the Normal used to construct the SoftmaxNormal.
normal_scale: `float`-like `Tensor`. Broadcastable with `normal_loc`.
The scale parameter of the Normal used to construct the SoftmaxNormal.
quadrature_size: Python `int` scalar representing the number of quadrature
points.
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs.
name: Python `str` name prefixed to Ops created by this class.
Returns:
grid: Shape `[b1, ..., bB, K, quadrature_size]` `Tensor` representing the
convex combination of affine parameters for `K` components.
`grid[..., :, n]` is the `n`-th grid point, living in the `K - 1` simplex.
probs: Shape `[b1, ..., bB, K, quadrature_size]` `Tensor` representing the
associated with each grid point.
"""
with ops.name_scope(name, "quadrature_scheme_softmaxnormal_gauss_hermite",
[normal_loc, normal_scale]):
normal_loc = ops.convert_to_tensor(normal_loc, name="normal_loc")
dt = normal_loc.dtype.base_dtype
normal_scale = ops.convert_to_tensor(
normal_scale, dtype=dt, name="normal_scale")
normal_scale = maybe_check_quadrature_param(
normal_scale, "normal_scale", validate_args)
grid, probs = np.polynomial.hermite.hermgauss(deg=quadrature_size)
grid = grid.astype(dt.dtype.as_numpy_dtype)
probs = probs.astype(dt.dtype.as_numpy_dtype)
probs /= np.linalg.norm(probs, ord=1, keepdims=True)
probs = ops.convert_to_tensor(probs, name="probs", dtype=dt)
grid = softmax(
-distribution_util.pad(
(normal_loc[..., array_ops.newaxis] +
np.sqrt(2.) * normal_scale[..., array_ops.newaxis] * grid),
axis=-2,
front=True),
axis=-2) # shape: [B, components, deg]
return grid, probs
def quadrature_scheme_softmaxnormal_quantiles(
normal_loc, normal_scale, quadrature_size,
validate_args=False, name=None):
"""Use SoftmaxNormal quantiles to form quadrature on `K - 1` simplex.
A `SoftmaxNormal` random variable `Y` may be generated via
```
Y = SoftmaxCentered(X),
X = Normal(normal_loc, normal_scale)
```
Args:
normal_loc: `float`-like `Tensor` with shape `[b1, ..., bB, K-1]`, B>=0.
The location parameter of the Normal used to construct the SoftmaxNormal.
normal_scale: `float`-like `Tensor`. Broadcastable with `normal_loc`.
The scale parameter of the Normal used to construct the SoftmaxNormal.
quadrature_size: Python `int` scalar representing the number of quadrature
points.
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs.
name: Python `str` name prefixed to Ops created by this class.
Returns:
grid: Shape `[b1, ..., bB, K, quadrature_size]` `Tensor` representing the
convex combination of affine parameters for `K` components.
`grid[..., :, n]` is the `n`-th grid point, living in the `K - 1` simplex.
probs: Shape `[b1, ..., bB, K, quadrature_size]` `Tensor` representing the
associated with each grid point.
"""
with ops.name_scope(name, "softmax_normal_grid_and_probs",
[normal_loc, normal_scale]):
normal_loc = ops.convert_to_tensor(normal_loc, name="normal_loc")
dt = normal_loc.dtype.base_dtype
normal_scale = ops.convert_to_tensor(
normal_scale, dtype=dt, name="normal_scale")
normal_scale = maybe_check_quadrature_param(
normal_scale, "normal_scale", validate_args)
dist = normal_lib.Normal(loc=normal_loc, scale=normal_scale)
def _get_batch_ndims():
"""Helper to get dist.batch_shape.ndims, statically if possible."""
ndims = dist.batch_shape.ndims
if ndims is None:
ndims = array_ops.shape(dist.batch_shape_tensor())[0]
return ndims
batch_ndims = _get_batch_ndims()
def _get_final_shape(qs):
"""Helper to build `TensorShape`."""
bs = dist.batch_shape.with_rank_at_least(1)
num_components = bs[-1].value
if num_components is not None:
num_components += 1
tail = tensor_shape.TensorShape([num_components, qs])
return bs[:-1].concatenate(tail)
def _compute_quantiles():
"""Helper to build quantiles."""
# Omit {0, 1} since they might lead to Inf/NaN.
zero = array_ops.zeros([], dtype=dist.dtype)
edges = math_ops.linspace(zero, 1., quadrature_size + 3)[1:-1]
# Expand edges so its broadcast across batch dims.
edges = array_ops.reshape(edges, shape=array_ops.concat([
[-1], array_ops.ones([batch_ndims], dtype=dtypes.int32)], axis=0))
quantiles = dist.quantile(edges)
quantiles = SoftmaxCentered().forward(quantiles)
# Cyclically permute left by one.
perm = array_ops.concat([
math_ops.range(1, 1 + batch_ndims), [0]], axis=0)
quantiles = array_ops.transpose(quantiles, perm)
quantiles.set_shape(_get_final_shape(quadrature_size + 1))
return quantiles
quantiles = _compute_quantiles()
# Compute grid as quantile midpoints.
grid = (quantiles[..., :-1] + quantiles[..., 1:]) / 2.
# Set shape hints.
grid.set_shape(_get_final_shape(quadrature_size))
# By construction probs is constant, i.e., `1 / quadrature_size`. This is
# important, because non-constant probs leads to non-reparameterizable
# samples.
probs = array_ops.fill(
dims=[quadrature_size],
value=1. / math_ops.cast(quadrature_size, dist.dtype))
return grid, probs
class VectorDiffeomixture(distribution_lib.Distribution):
"""VectorDiffeomixture distribution.
A vector diffeomixture (VDM) is a distribution parameterized by a convex
combination of `K` component `loc` vectors, `loc[k], k = 0,...,K-1`, and `K`
`scale` matrices `scale[k], k = 0,..., K-1`. It approximates the following
[compound distribution]
(https://en.wikipedia.org/wiki/Compound_probability_distribution)
```none
p(x) = int p(x | z) p(z) dz,
where z is in the K-simplex, and
p(x | z) := p(x | loc=sum_k z[k] loc[k], scale=sum_k z[k] scale[k])
```
The integral `int p(x | z) p(z) dz` is approximated with a quadrature scheme
adapted to the mixture density `p(z)`. The `N` quadrature points `z_{N, n}`
and weights `w_{N, n}` (which are non-negative and sum to 1) are chosen
such that
```q_N(x) := sum_{n=1}^N w_{n, N} p(x | z_{N, n}) --> p(x)```
as `N --> infinity`.
Since `q_N(x)` is in fact a mixture (of `N` points), we may sample from
`q_N` exactly. It is important to note that the VDM is *defined* as `q_N`
above, and *not* `p(x)`. Therefore, sampling and pdf may be implemented as
exact (up to floating point error) methods.
A common choice for the conditional `p(x | z)` is a multivariate Normal.
The implemented marginal `p(z)` is the `SoftmaxNormal`, which is a
`K-1` dimensional Normal transformed by a `SoftmaxCentered` bijector, making
it a density on the `K`-simplex. That is,
```
Z = SoftmaxCentered(X),
X = Normal(mix_loc / temperature, 1 / temperature)
```
The default quadrature scheme chooses `z_{N, n}` as `N` midpoints of
the quantiles of `p(z)` (generalized quantiles if `K > 2`).
See [Dillon and Langmore (2018)][1] for more details.
#### About `Vector` distributions in TensorFlow.
The `VectorDiffeomixture` is a non-standard distribution that has properties
particularly useful in [variational Bayesian
methods](https://en.wikipedia.org/wiki/Variational_Bayesian_methods).
Conditioned on a draw from the SoftmaxNormal, `X|z` is a vector whose
components are linear combinations of affine transformations, thus is itself
an affine transformation.
Note: The marginals `X_1|v, ..., X_d|v` are *not* generally identical to some
parameterization of `distribution`. This is due to the fact that the sum of
draws from `distribution` are not generally itself the same `distribution`.
#### About `Diffeomixture`s and reparameterization.
The `VectorDiffeomixture` is designed to be reparameterized, i.e., its
parameters are only used to transform samples from a distribution which has no
trainable parameters. This property is important because backprop stops at
sources of stochasticity. That is, as long as the parameters are used *after*
the underlying source of stochasticity, the computed gradient is accurate.
Reparametrization means that we can use gradient-descent (via backprop) to
optimize Monte-Carlo objectives. Such objectives are a finite-sample
approximation of an expectation and arise throughout scientific computing.
WARNING: If you backprop through a VectorDiffeomixture sample and the "base"
distribution is both: not `FULLY_REPARAMETERIZED` and a function of trainable
variables, then the gradient is not guaranteed correct!
#### Examples
```python
tfd = tf.contrib.distributions
# Create two batches of VectorDiffeomixtures, one with mix_loc=[0.],
# another with mix_loc=[1]. In both cases, `K=2` and the affine
# transformations involve:
# k=0: loc=zeros(dims) scale=LinearOperatorScaledIdentity
# k=1: loc=[2.]*dims scale=LinOpDiag
dims = 5
vdm = tfd.VectorDiffeomixture(
mix_loc=[[0.], [1]],
temperature=[1.],
distribution=tfd.Normal(loc=0., scale=1.),
loc=[
None, # Equivalent to `np.zeros(dims, dtype=np.float32)`.
np.float32([2.]*dims),
],
scale=[
tf.linalg.LinearOperatorScaledIdentity(
num_rows=dims,
multiplier=np.float32(1.1),
is_positive_definite=True),
tf.linalg.LinearOperatorDiag(
diag=np.linspace(2.5, 3.5, dims, dtype=np.float32),
is_positive_definite=True),
],
validate_args=True)
```
#### References
[1]: Joshua Dillon and Ian Langmore. Quadrature Compound: An approximating
family of distributions. _arXiv preprint arXiv:1801.03080_, 2018.
https://arxiv.org/abs/1801.03080
"""
def __init__(self,
mix_loc,
temperature,
distribution,
loc=None,
scale=None,
quadrature_size=8,
quadrature_fn=quadrature_scheme_softmaxnormal_quantiles,
validate_args=False,
allow_nan_stats=True,
name="VectorDiffeomixture"):
"""Constructs the VectorDiffeomixture on `R^d`.
The vector diffeomixture (VDM) approximates the compound distribution
```none
p(x) = int p(x | z) p(z) dz,
where z is in the K-simplex, and
p(x | z) := p(x | loc=sum_k z[k] loc[k], scale=sum_k z[k] scale[k])
```
Args:
mix_loc: `float`-like `Tensor` with shape `[b1, ..., bB, K-1]`.
In terms of samples, larger `mix_loc[..., k]` ==>
`Z` is more likely to put more weight on its `kth` component.
temperature: `float`-like `Tensor`. Broadcastable with `mix_loc`.
In terms of samples, smaller `temperature` means one component is more
likely to dominate. I.e., smaller `temperature` makes the VDM look more
like a standard mixture of `K` components.
distribution: `tf.Distribution`-like instance. Distribution from which `d`
iid samples are used as input to the selected affine transformation.
Must be a scalar-batch, scalar-event distribution. Typically
`distribution.reparameterization_type = FULLY_REPARAMETERIZED` or it is
a function of non-trainable parameters. WARNING: If you backprop through
a VectorDiffeomixture sample and the `distribution` is not
`FULLY_REPARAMETERIZED` yet is a function of trainable variables, then
the gradient will be incorrect!
loc: Length-`K` list of `float`-type `Tensor`s. The `k`-th element
represents the `shift` used for the `k`-th affine transformation. If
the `k`-th item is `None`, `loc` is implicitly `0`. When specified,
must have shape `[B1, ..., Bb, d]` where `b >= 0` and `d` is the event
size.
scale: Length-`K` list of `LinearOperator`s. Each should be
positive-definite and operate on a `d`-dimensional vector space. The
`k`-th element represents the `scale` used for the `k`-th affine
transformation. `LinearOperator`s must have shape `[B1, ..., Bb, d, d]`,
`b >= 0`, i.e., characterizes `b`-batches of `d x d` matrices
quadrature_size: Python `int` scalar representing number of
quadrature points. Larger `quadrature_size` means `q_N(x)` better
approximates `p(x)`.
quadrature_fn: Python callable taking `normal_loc`, `normal_scale`,
`quadrature_size`, `validate_args` and returning `tuple(grid, probs)`
representing the SoftmaxNormal grid and corresponding normalized weight.
normalized) weight.
Default value: `quadrature_scheme_softmaxnormal_quantiles`.
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs.
allow_nan_stats: Python `bool`, default `True`. When `True`,
statistics (e.g., mean, mode, variance) use the value "`NaN`" to
indicate the result is undefined. When `False`, an exception is raised
if one or more of the statistic's batch members are undefined.
name: Python `str` name prefixed to Ops created by this class.
Raises:
ValueError: if `not scale or len(scale) < 2`.
ValueError: if `len(loc) != len(scale)`
ValueError: if `quadrature_grid_and_probs is not None` and
`len(quadrature_grid_and_probs[0]) != len(quadrature_grid_and_probs[1])`
ValueError: if `validate_args` and any not scale.is_positive_definite.
TypeError: if any scale.dtype != scale[0].dtype.
TypeError: if any loc.dtype != scale[0].dtype.
NotImplementedError: if `len(scale) != 2`.
ValueError: if `not distribution.is_scalar_batch`.
ValueError: if `not distribution.is_scalar_event`.
"""
parameters = locals()
with ops.name_scope(name, values=[mix_loc, temperature]):
if not scale or len(scale) < 2:
raise ValueError("Must specify list (or list-like object) of scale "
"LinearOperators, one for each component with "
"num_component >= 2.")
if loc is None:
loc = [None]*len(scale)
if len(loc) != len(scale):
raise ValueError("loc/scale must be same-length lists "
"(or same-length list-like objects).")
dtype = scale[0].dtype.base_dtype
loc = [ops.convert_to_tensor(loc_, dtype=dtype, name="loc{}".format(k))
if loc_ is not None else None
for k, loc_ in enumerate(loc)]
for k, scale_ in enumerate(scale):
if validate_args and not scale_.is_positive_definite:
raise ValueError("scale[{}].is_positive_definite = {} != True".format(
k, scale_.is_positive_definite))
if scale_.dtype.base_dtype != dtype:
raise TypeError(
"dtype mismatch; scale[{}].base_dtype=\"{}\" != \"{}\"".format(
k, scale_.dtype.base_dtype.name, dtype.name))
self._endpoint_affine = [
AffineLinearOperator(shift=loc_,
scale=scale_,
event_ndims=1,
validate_args=validate_args,
name="endpoint_affine_{}".format(k))
for k, (loc_, scale_) in enumerate(zip(loc, scale))]
# TODO(jvdillon): Remove once we support k-mixtures.
# We make this assertion here because otherwise `grid` would need to be a
# vector not a scalar.
if len(scale) != 2:
raise NotImplementedError("Currently only bimixtures are supported; "
"len(scale)={} is not 2.".format(len(scale)))
mix_loc = ops.convert_to_tensor(
mix_loc, dtype=dtype, name="mix_loc")
temperature = ops.convert_to_tensor(
temperature, dtype=dtype, name="temperature")
self._grid, probs = tuple(quadrature_fn(
mix_loc / temperature,
1. / temperature,
quadrature_size,
validate_args))
# Note: by creating the logits as `log(prob)` we ensure that
# `self.mixture_distribution.logits` is equivalent to
# `math_ops.log(self.mixture_distribution.probs)`.
self._mixture_distribution = categorical_lib.Categorical(
logits=math_ops.log(probs),
validate_args=validate_args,
allow_nan_stats=allow_nan_stats)
asserts = distribution_util.maybe_check_scalar_distribution(
distribution, dtype, validate_args)
if asserts:
self._grid = control_flow_ops.with_dependencies(
asserts, self._grid)
self._distribution = distribution
self._interpolated_affine = [
AffineLinearOperator(shift=loc_,
scale=scale_,
event_ndims=1,
validate_args=validate_args,
name="interpolated_affine_{}".format(k))
for k, (loc_, scale_) in enumerate(zip(
interpolate_loc(self._grid, loc),
interpolate_scale(self._grid, scale)))]
[
self._batch_shape_,
self._batch_shape_tensor_,
self._event_shape_,
self._event_shape_tensor_,
] = determine_batch_event_shapes(self._grid,
self._endpoint_affine)
super(VectorDiffeomixture, self).__init__(
dtype=dtype,
# We hard-code `FULLY_REPARAMETERIZED` because when
# `validate_args=True` we verify that indeed
# `distribution.reparameterization_type == FULLY_REPARAMETERIZED`. A
# distribution which is a function of only non-trainable parameters
# also implies we can use `FULLY_REPARAMETERIZED`. However, we cannot
# easily test for that possibility thus we use `validate_args=False`
# as a "back-door" to allow users a way to use non
# `FULLY_REPARAMETERIZED` distribution. In such cases IT IS THE USERS
# RESPONSIBILITY to verify that the base distribution is a function of
# non-trainable parameters.
reparameterization_type=distribution_lib.FULLY_REPARAMETERIZED,
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
parameters=parameters,
graph_parents=(
distribution._graph_parents # pylint: disable=protected-access
+ [loc_ for loc_ in loc if loc_ is not None]
+ [p for scale_ in scale for p in scale_.graph_parents]),
name=name)
@property
def mixture_distribution(self):
"""Distribution used to select a convex combination of affine transforms."""
return self._mixture_distribution
@property
def distribution(self):
"""Base scalar-event, scalar-batch distribution."""
return self._distribution
@property
def grid(self):
"""Grid of mixing probabilities, one for each grid point."""
return self._grid
@property
def endpoint_affine(self):
"""Affine transformation for each of `K` components."""
return self._endpoint_affine
@property
def interpolated_affine(self):
"""Affine transformation for each convex combination of `K` components."""
return self._interpolated_affine
def _batch_shape_tensor(self):
return self._batch_shape_tensor_
def _batch_shape(self):
return self._batch_shape_
def _event_shape_tensor(self):
return self._event_shape_tensor_
def _event_shape(self):
return self._event_shape_
def _sample_n(self, n, seed=None):
x = self.distribution.sample(
sample_shape=concat_vectors(
[n],
self.batch_shape_tensor(),
self.event_shape_tensor()),
seed=seed) # shape: [n, B, e]
x = [aff.forward(x) for aff in self.endpoint_affine]
# Get ids as a [n, batch_size]-shaped matrix, unless batch_shape=[] then get
# ids as a [n]-shaped vector.
batch_size = self.batch_shape.num_elements()
if batch_size is None:
batch_size = array_ops.reduce_prod(self.batch_shape_tensor())
mix_batch_size = self.mixture_distribution.batch_shape.num_elements()
if mix_batch_size is None:
mix_batch_size = math_ops.reduce_prod(
self.mixture_distribution.batch_shape_tensor())
ids = self.mixture_distribution.sample(
sample_shape=concat_vectors(
[n],
distribution_util.pick_vector(
self.is_scalar_batch(),
np.int32([]),
[batch_size // mix_batch_size])),
seed=distribution_util.gen_new_seed(
seed, "vector_diffeomixture"))
# We need to flatten batch dims in case mixture_distribution has its own
# batch dims.
ids = array_ops.reshape(ids, shape=concat_vectors(
[n],
distribution_util.pick_vector(
self.is_scalar_batch(),
np.int32([]),
np.int32([-1]))))
# Stride `components * quadrature_size` for `batch_size` number of times.
stride = self.grid.shape.with_rank_at_least(
2)[-2:].num_elements()
if stride is None:
stride = array_ops.reduce_prod(
array_ops.shape(self.grid)[-2:])
offset = math_ops.range(start=0,
limit=batch_size * stride,
delta=stride,
dtype=ids.dtype)
weight = array_ops.gather(
array_ops.reshape(self.grid, shape=[-1]),
ids + offset)
# At this point, weight flattened all batch dims into one.
# We also need to append a singleton to broadcast with event dims.
if self.batch_shape.is_fully_defined():
new_shape = [-1] + self.batch_shape.as_list() + [1]
else:
new_shape = array_ops.concat(
([-1], self.batch_shape_tensor(), [1]), axis=0)
weight = array_ops.reshape(weight, shape=new_shape)
if len(x) != 2:
# We actually should have already triggered this exception. However as a
# policy we're putting this exception wherever we exploit the bimixture
# assumption.
raise NotImplementedError("Currently only bimixtures are supported; "
"len(scale)={} is not 2.".format(len(x)))
# Alternatively:
# x = weight * x[0] + (1. - weight) * x[1]
x = weight * (x[0] - x[1]) + x[1]
return x
def _log_prob(self, x):
# By convention, we always put the grid points right-most.
y = array_ops.stack(
[aff.inverse(x) for aff in self.interpolated_affine],
axis=-1)
log_prob = math_ops.reduce_sum(self.distribution.log_prob(y), axis=-2)
# Because the affine transformation has a constant Jacobian, it is the case
# that `affine.fldj(x) = -affine.ildj(x)`. This is not true in general.
fldj = array_ops.stack(
[aff.forward_log_det_jacobian(x) for aff in self.interpolated_affine],
axis=-1)
return math_ops.reduce_logsumexp(
self.mixture_distribution.logits - fldj + log_prob, axis=-1)
def _mean(self):
p = self._expand_mix_distribution_probs()
m = self._expand_base_distribution_mean()
mean = None
for k, aff in enumerate(self.interpolated_affine):
# aff.forward is going to do this:
# y = array_ops.squeeze(aff.scale.matmul(m), axis=[-1])
# if aff.shift is not None:
# y += aff.shift
mean = add(mean, p[..., k] * aff.forward(m))
return mean
def _covariance(self):
# Law of total variance:
#
# Cov[Z] = E[Cov[Z | V]] + Cov[E[Z | V]]
#
# where,
#
# E[Cov[Z | V]] = sum_i mix_prob[i] Scale[i]
# Cov[E[Z | V]] = sum_i mix_prob[i] osquare(loc[i])
# - osquare(sum_i mix_prob[i] loc[i])
#
# osquare(x) = x.transpose @ x
return add(
self._mean_of_covariance_given_quadrature_component(diag_only=False),
self._covariance_of_mean_given_quadrature_component(diag_only=False))
def _variance(self):
# Equivalent to: tf.diag_part(self._covariance()),
return add(
self._mean_of_covariance_given_quadrature_component(diag_only=True),
self._covariance_of_mean_given_quadrature_component(diag_only=True))
def _mean_of_covariance_given_quadrature_component(self, diag_only):
p = self.mixture_distribution.probs
# To compute E[Cov(Z|V)], we'll add matrices within three categories:
# scaled-identity, diagonal, and full. Then we'll combine these at the end.
scale_identity_multiplier = None
diag = None
full = None
for k, aff in enumerate(self.interpolated_affine):
s = aff.scale # Just in case aff.scale has side-effects, we'll call once.
if (s is None
or isinstance(s, linop_identity_lib.LinearOperatorIdentity)):
scale_identity_multiplier = add(scale_identity_multiplier,
p[..., k, array_ops.newaxis])
elif isinstance(s, linop_identity_lib.LinearOperatorScaledIdentity):
scale_identity_multiplier = add(
scale_identity_multiplier,
(p[..., k, array_ops.newaxis] * math_ops.square(s.multiplier)))
elif isinstance(s, linop_diag_lib.LinearOperatorDiag):
diag = add(diag, (p[..., k, array_ops.newaxis] *
math_ops.square(s.diag_part())))
else:
x = (p[..., k, array_ops.newaxis, array_ops.newaxis] *
s.matmul(s.to_dense(), adjoint_arg=True))
if diag_only:
x = array_ops.matrix_diag_part(x)
full = add(full, x)
# We must now account for the fact that the base distribution might have a
# non-unity variance. Recall that, since X ~ iid Law(X_0),
# `Cov(SX+m) = S Cov(X) S.T = S S.T Diag(Var(X_0))`.
# We can scale by `Var(X)` (vs `Cov(X)`) since X corresponds to `d` iid
# samples from a scalar-event distribution.
v = self.distribution.variance()
if scale_identity_multiplier is not None:
scale_identity_multiplier *= v
if diag is not None:
diag *= v[..., array_ops.newaxis]
if full is not None:
full *= v[..., array_ops.newaxis]
if diag_only:
# Apparently we don't need the full matrix, just the diagonal.
r = add(diag, full)
if r is None and scale_identity_multiplier is not None:
ones = array_ops.ones(self.event_shape_tensor(), dtype=self.dtype)
return scale_identity_multiplier[..., array_ops.newaxis] * ones
return add(r, scale_identity_multiplier)
# `None` indicates we don't know if the result is positive-definite.
is_positive_definite = (True if all(aff.scale.is_positive_definite
for aff in self.endpoint_affine)
else None)
to_add = []
if diag is not None:
to_add.append(linop_diag_lib.LinearOperatorDiag(
diag=diag,
is_positive_definite=is_positive_definite))
if full is not None:
to_add.append(linop_full_lib.LinearOperatorFullMatrix(
matrix=full,
is_positive_definite=is_positive_definite))
if scale_identity_multiplier is not None:
to_add.append(linop_identity_lib.LinearOperatorScaledIdentity(
num_rows=self.event_shape_tensor()[0],
multiplier=scale_identity_multiplier,
is_positive_definite=is_positive_definite))
return (linop_add_lib.add_operators(to_add)[0].to_dense()
if to_add else None)
def _covariance_of_mean_given_quadrature_component(self, diag_only):
square = math_ops.square if diag_only else vec_osquare
p = self._expand_mix_distribution_probs()
if not diag_only:
p = p[..., array_ops.newaxis, :] # Assuming event.ndims=1.
m = self._expand_base_distribution_mean()
cov_e_z_given_v = None
e_z_given_v = self._mean()
for k, aff in enumerate(self.interpolated_affine):
y = aff.forward(m)
cov_e_z_given_v = add(cov_e_z_given_v,
p[..., k] * square(y - e_z_given_v))
return cov_e_z_given_v
def _expand_base_distribution_mean(self):
"""Ensures `self.distribution.mean()` has `[batch, event]` shape."""
single_draw_shape = concat_vectors(self.batch_shape_tensor(),
self.event_shape_tensor())
m = array_ops.reshape(
self.distribution.mean(), # A scalar.
shape=array_ops.ones_like(single_draw_shape,
dtype=dtypes.int32))
m = array_ops.tile(m, multiples=single_draw_shape)
m.set_shape(self.batch_shape.concatenate(self.event_shape))
return m
def _expand_mix_distribution_probs(self):
p = self.mixture_distribution.probs # [B, deg]
deg = p.shape.with_rank_at_least(1)[-1].value
if deg is None:
deg = array_ops.shape(p)[-1]
event_ndims = self.event_shape.ndims
if event_ndims is None:
event_ndims = array_ops.shape(self.event_shape_tensor())[0]
expand_shape = array_ops.concat([
self.mixture_distribution.batch_shape_tensor(),
array_ops.ones([event_ndims], dtype=dtypes.int32),
[deg],
], axis=0)
return array_ops.reshape(p, shape=expand_shape)
def maybe_check_quadrature_param(param, name, validate_args):
"""Helper which checks validity of `loc` and `scale` init args."""
with ops.name_scope(name="check_" + name, values=[param]):
assertions = []
if param.shape.ndims is not None:
if param.shape.ndims == 0:
raise ValueError("Mixing params must be a (batch of) vector; "
"{}.rank={} is not at least one.".format(
name, param.shape.ndims))
elif validate_args:
assertions.append(check_ops.assert_rank_at_least(
param, 1,
message=("Mixing params must be a (batch of) vector; "
"{}.rank is not at least one.".format(
name))))
# TODO(jvdillon): Remove once we support k-mixtures.
if param.shape.with_rank_at_least(1)[-1] is not None:
if param.shape[-1].value != 1:
raise NotImplementedError("Currently only bimixtures are supported; "
"{}.shape[-1]={} is not 1.".format(
name, param.shape[-1].value))
elif validate_args:
assertions.append(check_ops.assert_equal(
array_ops.shape(param)[-1], 1,
message=("Currently only bimixtures are supported; "
"{}.shape[-1] is not 1.".format(name))))
if assertions:
return control_flow_ops.with_dependencies(assertions, param)
return param
def determine_batch_event_shapes(grid, endpoint_affine):
"""Helper to infer batch_shape and event_shape."""
with ops.name_scope(name="determine_batch_event_shapes"):
# grid # shape: [B, k, q]
# endpoint_affine # len=k, shape: [B, d, d]
batch_shape = grid.shape[:-2]
batch_shape_tensor = array_ops.shape(grid)[:-2]
event_shape = None
event_shape_tensor = None
def _set_event_shape(shape, shape_tensor):
if event_shape is None:
return shape, shape_tensor
return (array_ops.broadcast_static_shape(event_shape, shape),
array_ops.broadcast_dynamic_shape(
event_shape_tensor, shape_tensor))
for aff in endpoint_affine:
if aff.shift is not None:
batch_shape = array_ops.broadcast_static_shape(
batch_shape, aff.shift.shape[:-1])
batch_shape_tensor = array_ops.broadcast_dynamic_shape(
batch_shape_tensor, array_ops.shape(aff.shift)[:-1])
event_shape, event_shape_tensor = _set_event_shape(
aff.shift.shape[-1:], array_ops.shape(aff.shift)[-1:])
if aff.scale is not None:
batch_shape = array_ops.broadcast_static_shape(
batch_shape, aff.scale.batch_shape)
batch_shape_tensor = array_ops.broadcast_dynamic_shape(
batch_shape_tensor, aff.scale.batch_shape_tensor())
event_shape, event_shape_tensor = _set_event_shape(
tensor_shape.TensorShape([aff.scale.range_dimension]),
aff.scale.range_dimension_tensor()[array_ops.newaxis])
return batch_shape, batch_shape_tensor, event_shape, event_shape_tensor
def interpolate_loc(grid, loc):
"""Helper which interpolates between two locs."""
if len(loc) != 2:
raise NotImplementedError("Currently only bimixtures are supported; "
"len(scale)={} is not 2.".format(len(loc)))
deg = grid.shape.with_rank_at_least(1)[-1].value
if deg is None:
raise ValueError("Num quadrature grid points must be known prior "
"to graph execution.")
with ops.name_scope("interpolate_loc", values=[grid, loc]):
if loc is None or loc[0] is None and loc[1] is None:
return [None]*deg
# shape: [B, 1, k, deg]
w = grid[..., array_ops.newaxis, :, :]
loc = [x[..., array_ops.newaxis] # shape: [B, e, 1]
if x is not None else None for x in loc]
if loc[0] is None:
x = w[..., 1, :] * loc[1] # shape: [B, e, deg]
elif loc[1] is None:
x = w[..., 0, :] * loc[0] # shape: [B, e, deg]
else:
delta = loc[0] - loc[1]
x = w[..., 0, :] * delta + loc[1] # shape: [B, e, deg]
return [x[..., k] for k in range(deg)] # list(shape:[B, e])
def interpolate_scale(grid, scale):
"""Helper which interpolates between two scales."""
if len(scale) != 2:
raise NotImplementedError("Currently only bimixtures are supported; "
"len(scale)={} is not 2.".format(len(scale)))
deg = grid.shape.with_rank_at_least(1)[-1].value
if deg is None:
raise ValueError("Num quadrature grid points must be known prior "
"to graph execution.")
with ops.name_scope("interpolate_scale", values=[grid]):
return [linop_add_lib.add_operators([
linop_scale(grid[..., k, q], s)
for k, s in enumerate(scale)
])[0] for q in range(deg)]
def linop_scale(w, op):
# We assume w > 0. (This assumption only relates to the is_* attributes.)
with ops.name_scope("linop_scale", values=[w]):
# TODO(b/35301104): LinearOperatorComposition doesn't combine operators, so
# special case combinations here. Once it does, this function can be
# replaced by:
# return linop_composition_lib.LinearOperatorComposition([
# scaled_identity(w), op])
def scaled_identity(w):
return linop_identity_lib.LinearOperatorScaledIdentity(
num_rows=op.range_dimension_tensor(),
multiplier=w,
is_non_singular=op.is_non_singular,
is_self_adjoint=op.is_self_adjoint,
is_positive_definite=op.is_positive_definite)
if isinstance(op, linop_identity_lib.LinearOperatorIdentity):
return scaled_identity(w)
if isinstance(op, linop_identity_lib.LinearOperatorScaledIdentity):
return scaled_identity(w * op.multiplier)
if isinstance(op, linop_diag_lib.LinearOperatorDiag):
return linop_diag_lib.LinearOperatorDiag(
diag=w[..., array_ops.newaxis] * op.diag_part(),
is_non_singular=op.is_non_singular,
is_self_adjoint=op.is_self_adjoint,
is_positive_definite=op.is_positive_definite)
if isinstance(op, linop_tril_lib.LinearOperatorLowerTriangular):
return linop_tril_lib.LinearOperatorLowerTriangular(
tril=w[..., array_ops.newaxis, array_ops.newaxis] * op.to_dense(),
is_non_singular=op.is_non_singular,
is_self_adjoint=op.is_self_adjoint,
is_positive_definite=op.is_positive_definite)
raise NotImplementedError(
"Unsupported Linop type ({})".format(type(op).__name__))
def concat_vectors(*args):
"""Concatenates input vectors, statically if possible."""
args_ = [distribution_util.static_value(x) for x in args]
if any(vec is None for vec in args_):
return array_ops.concat(args, axis=0)
return [val for vec in args_ for val in vec]
def add(x, y):
"""Adds inputs; interprets `None` as zero."""
if x is None:
return y
if y is None:
return x
return x + y
def vec_osquare(x):
"""Computes the outer-product of a (batch of) vector, i.e., x.T x."""
return x[..., :, array_ops.newaxis] * x[..., array_ops.newaxis, :]
def softmax(x, axis, name=None):
"""Equivalent to tf.nn.softmax but works around b/70297725."""
with ops.name_scope(name, "softmax", [x, axis]):
x = ops.convert_to_tensor(x, name="x")
ndims = (x.shape.ndims if x.shape.ndims is not None
else array_ops.rank(x, name="ndims"))
axis = ops.convert_to_tensor(axis, dtype=dtypes.int32, name="axis")
axis_ = tensor_util.constant_value(axis)
if axis_ is not None:
axis = np.int(ndims + axis_ if axis_ < 0 else axis_)
else:
axis = array_ops.where(axis < 0, ndims + axis, axis)
return nn_ops.softmax(x, axis=axis)
| {
"pile_set_name": "Github"
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.