text
stringlengths 2
100k
| meta
dict |
---|---|
op {
name: "TensorListGetItem"
input_arg {
name: "input_handle"
type: DT_VARIANT
}
input_arg {
name: "index"
type: DT_INT32
}
input_arg {
name: "element_shape"
type: DT_INT32
}
output_arg {
name: "item"
type_attr: "element_dtype"
}
attr {
name: "element_dtype"
type: "type"
}
}
| {
"pile_set_name": "Github"
} |
// Boost.Geometry (aka GGL, Generic Geometry Library)
// Copyright (c) 2007-2012 Barend Gehrels, Amsterdam, the Netherlands.
// Copyright (c) 2008-2012 Bruno Lalande, Paris, France.
// Copyright (c) 2009-2012 Mateusz Loskot, London, UK.
// Parts of Boost.Geometry are redesigned from Geodan's Geographic Library
// (geolib/GGL), copyright (c) 1995-2010 Geodan, Amsterdam, the Netherlands.
// Use, modification and distribution is subject to the Boost Software License,
// Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#ifndef BOOST_GEOMETRY_GEOMETRIES_BOX_HPP
#define BOOST_GEOMETRY_GEOMETRIES_BOX_HPP
#include <cstddef>
#include <boost/concept/assert.hpp>
#include <boost/config.hpp>
#include <boost/geometry/algorithms/convert.hpp>
#include <boost/geometry/geometries/concepts/point_concept.hpp>
#if defined(BOOST_GEOMETRY_ENABLE_ACCESS_DEBUGGING)
#include <boost/geometry/core/assert.hpp>
#endif
namespace boost { namespace geometry
{
namespace model
{
/*!
\brief Class box: defines a box made of two describing points
\ingroup geometries
\details Box is always described by a min_corner() and a max_corner() point. If another
rectangle is used, use linear_ring or polygon.
\note Boxes are for selections and for calculating the envelope of geometries. Not all algorithms
are implemented for box. Boxes are also used in Spatial Indexes.
\tparam Point point type. The box takes a point type as template parameter.
The point type can be any point type.
It can be 2D but can also be 3D or more dimensional.
The box can also take a latlong point type as template parameter.
\qbk{[include reference/geometries/box.qbk]}
\qbk{before.synopsis, [heading Model of]}
\qbk{before.synopsis, [link geometry.reference.concepts.concept_box Box Concept]}
*/
template<typename Point>
class box
{
BOOST_CONCEPT_ASSERT( (concepts::Point<Point>) );
public:
#if !defined(BOOST_GEOMETRY_ENABLE_ACCESS_DEBUGGING)
#if !defined(BOOST_NO_CXX11_DEFAULTED_FUNCTIONS)
/// \constructor_default_no_init
box() = default;
#else
/// \constructor_default_no_init
inline box()
{}
#endif
#else // defined(BOOST_GEOMETRY_ENABLE_ACCESS_DEBUGGING)
inline box()
{
m_created = 1;
}
~box()
{
m_created = 0;
}
#endif
/*!
\brief Constructor taking the minimum corner point and the maximum corner point
*/
inline box(Point const& min_corner, Point const& max_corner)
{
geometry::convert(min_corner, m_min_corner);
geometry::convert(max_corner, m_max_corner);
#if defined(BOOST_GEOMETRY_ENABLE_ACCESS_DEBUGGING)
m_created = 1;
#endif
}
inline Point const& min_corner() const
{
#if defined(BOOST_GEOMETRY_ENABLE_ACCESS_DEBUGGING)
BOOST_GEOMETRY_ASSERT(m_created == 1);
#endif
return m_min_corner;
}
inline Point const& max_corner() const
{
#if defined(BOOST_GEOMETRY_ENABLE_ACCESS_DEBUGGING)
BOOST_GEOMETRY_ASSERT(m_created == 1);
#endif
return m_max_corner;
}
inline Point& min_corner()
{
#if defined(BOOST_GEOMETRY_ENABLE_ACCESS_DEBUGGING)
BOOST_GEOMETRY_ASSERT(m_created == 1);
#endif
return m_min_corner;
}
inline Point& max_corner()
{
#if defined(BOOST_GEOMETRY_ENABLE_ACCESS_DEBUGGING)
BOOST_GEOMETRY_ASSERT(m_created == 1);
#endif
return m_max_corner;
}
private:
Point m_min_corner;
Point m_max_corner;
#if defined(BOOST_GEOMETRY_ENABLE_ACCESS_DEBUGGING)
int m_created;
#endif
};
} // namespace model
// Traits specializations for box above
#ifndef DOXYGEN_NO_TRAITS_SPECIALIZATIONS
namespace traits
{
template <typename Point>
struct tag<model::box<Point> >
{
typedef box_tag type;
};
template <typename Point>
struct point_type<model::box<Point> >
{
typedef Point type;
};
template <typename Point, std::size_t Dimension>
struct indexed_access<model::box<Point>, min_corner, Dimension>
{
typedef typename geometry::coordinate_type<Point>::type coordinate_type;
static inline coordinate_type get(model::box<Point> const& b)
{
return geometry::get<Dimension>(b.min_corner());
}
static inline void set(model::box<Point>& b, coordinate_type const& value)
{
geometry::set<Dimension>(b.min_corner(), value);
}
};
template <typename Point, std::size_t Dimension>
struct indexed_access<model::box<Point>, max_corner, Dimension>
{
typedef typename geometry::coordinate_type<Point>::type coordinate_type;
static inline coordinate_type get(model::box<Point> const& b)
{
return geometry::get<Dimension>(b.max_corner());
}
static inline void set(model::box<Point>& b, coordinate_type const& value)
{
geometry::set<Dimension>(b.max_corner(), value);
}
};
} // namespace traits
#endif // DOXYGEN_NO_TRAITS_SPECIALIZATIONS
}} // namespace boost::geometry
#endif // BOOST_GEOMETRY_GEOMETRIES_BOX_HPP
| {
"pile_set_name": "Github"
} |
package runtime
import (
"bufio"
"fmt"
"os"
"path/filepath"
"strconv"
"strings"
"time"
"golang.org/x/net/context"
"google.golang.org/grpc"
v1 "k8s.io/api/core/v1"
criapi "k8s.io/cri-api/pkg/apis/runtime/v1alpha2"
"k8s.io/klog"
"k8s.io/kubectl/pkg/util/qos"
"tkestack.io/gpu-manager/pkg/services/watchdog"
"tkestack.io/gpu-manager/pkg/types"
"tkestack.io/gpu-manager/pkg/utils"
"tkestack.io/gpu-manager/pkg/utils/cgroup"
)
type ContainerRuntimeInterface interface {
// Get pids in the given container id
GetPidsInContainers(containerID string) ([]int, error)
// InspectContainer returns the container information by the given name
InspectContainer(containerID string) (*criapi.ContainerStatus, error)
// RuntimeName returns the container runtime name
RuntimeName() string
}
type containerRuntimeManager struct {
cgroupDriver string
runtimeName string
requestTimeout time.Duration
client criapi.RuntimeServiceClient
}
var _ ContainerRuntimeInterface = (*containerRuntimeManager)(nil)
var (
containerRoot = cgroup.NewCgroupName([]string{}, "kubepods")
)
func NewContainerRuntimeManager(cgroupDriver, endpoint string, requestTimeout time.Duration) (*containerRuntimeManager, error) {
dialOptions := []grpc.DialOption{grpc.WithInsecure(), grpc.WithDialer(utils.UnixDial), grpc.WithBlock(), grpc.WithTimeout(time.Second * 5)}
conn, err := grpc.Dial(endpoint, dialOptions...)
if err != nil {
return nil, err
}
client := criapi.NewRuntimeServiceClient(conn)
m := &containerRuntimeManager{
cgroupDriver: cgroupDriver,
client: client,
requestTimeout: requestTimeout,
}
ctx, cancel := context.WithTimeout(context.Background(), m.requestTimeout)
defer cancel()
resp, err := client.Version(ctx, &criapi.VersionRequest{Version: "0.1.0"})
if err != nil {
return nil, err
}
klog.V(2).Infof("Container runtime is %s", resp.RuntimeName)
m.runtimeName = resp.RuntimeName
return m, nil
}
func (m *containerRuntimeManager) GetPidsInContainers(containerID string) ([]int, error) {
req := &criapi.ContainerStatusRequest{
ContainerId: containerID,
}
ctx, cancel := context.WithTimeout(context.Background(), m.requestTimeout)
defer cancel()
resp, err := m.client.ContainerStatus(ctx, req)
if err != nil {
klog.Errorf("can't get container %s status, %v", containerID, err)
return nil, err
}
ns := resp.Status.Labels[types.PodNamespaceLabelKey]
podName := resp.Status.Labels[types.PodNameLabelKey]
pod, err := watchdog.GetPod(ns, podName)
if err != nil {
klog.Errorf("can't get pod %s/%s, %v", ns, podName, err)
return nil, err
}
cgroupPath, err := m.getCgroupName(pod, containerID)
if err != nil {
klog.Errorf("can't get cgroup parent, %v", err)
return nil, err
}
pids := make([]int, 0)
baseDir := filepath.Clean(filepath.Join(types.CGROUP_BASE, cgroupPath))
filepath.Walk(baseDir, func(path string, info os.FileInfo, err error) error {
if info.IsDir() || info.Name() != types.CGROUP_PROCS {
return nil
}
p, err := readProcsFile(path)
if err == nil {
pids = append(pids, p...)
}
return nil
})
return pids, nil
}
func readProcsFile(file string) ([]int, error) {
f, err := os.Open(file)
if err != nil {
klog.Errorf("can't read %s, %v", file, err)
return nil, nil
}
defer f.Close()
scanner := bufio.NewScanner(f)
pids := make([]int, 0)
for scanner.Scan() {
line := scanner.Text()
if pid, err := strconv.Atoi(line); err == nil {
pids = append(pids, pid)
}
}
klog.V(4).Infof("Read from %s, pids: %v", file, pids)
return pids, nil
}
func (m *containerRuntimeManager) getCgroupName(pod *v1.Pod, containerID string) (string, error) {
podQos := qos.GetPodQOS(pod)
var parentContainer cgroup.CgroupName
switch podQos {
case v1.PodQOSGuaranteed:
parentContainer = cgroup.NewCgroupName(containerRoot)
case v1.PodQOSBurstable:
parentContainer = cgroup.NewCgroupName(containerRoot, strings.ToLower(string(v1.PodQOSBurstable)))
case v1.PodQOSBestEffort:
parentContainer = cgroup.NewCgroupName(containerRoot, strings.ToLower(string(v1.PodQOSBestEffort)))
}
podContainer := types.PodCgroupNamePrefix + string(pod.UID)
cgroupName := cgroup.NewCgroupName(parentContainer, podContainer)
switch m.cgroupDriver {
case "systemd":
return fmt.Sprintf("%s/%s-%s.scope", cgroupName.ToSystemd(), m.runtimeName, containerID), nil
case "cgroupfs":
return fmt.Sprintf("%s/%s", cgroupName.ToCgroupfs(), containerID), nil
default:
}
return "", fmt.Errorf("unsupported cgroup driver")
}
func (m *containerRuntimeManager) InspectContainer(containerID string) (*criapi.ContainerStatus, error) {
req := &criapi.ContainerStatusRequest{
ContainerId: containerID,
}
ctx, cancel := context.WithTimeout(context.Background(), m.requestTimeout)
defer cancel()
resp, err := m.client.ContainerStatus(ctx, req)
if err != nil {
return nil, err
}
return resp.Status, nil
}
func (m *containerRuntimeManager) RuntimeName() string { return m.runtimeName }
| {
"pile_set_name": "Github"
} |
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.management.network.v2020_05_01;
import java.util.Collection;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.microsoft.rest.ExpandableStringEnum;
/**
* Defines values for RoutingState.
*/
public final class RoutingState extends ExpandableStringEnum<RoutingState> {
/** Static value None for RoutingState. */
public static final RoutingState NONE = fromString("None");
/** Static value Provisioned for RoutingState. */
public static final RoutingState PROVISIONED = fromString("Provisioned");
/** Static value Provisioning for RoutingState. */
public static final RoutingState PROVISIONING = fromString("Provisioning");
/** Static value Failed for RoutingState. */
public static final RoutingState FAILED = fromString("Failed");
/**
* Creates or finds a RoutingState from its string representation.
* @param name a name to look for
* @return the corresponding RoutingState
*/
@JsonCreator
public static RoutingState fromString(String name) {
return fromString(name, RoutingState.class);
}
/**
* @return known RoutingState values
*/
public static Collection<RoutingState> values() {
return values(RoutingState.class);
}
}
| {
"pile_set_name": "Github"
} |
<?php
return [
'title' => 'Entretien d\'actifs',
'asset_name' => 'Désignation de l\'item',
'is_warranty' => 'Garantie',
'dl_csv' => 'Télécharger en CSV'
];
| {
"pile_set_name": "Github"
} |
255 250 250 snow
248 248 255 ghost white
248 248 255 GhostWhite
245 245 245 white smoke
245 245 245 WhiteSmoke
220 220 220 gainsboro
255 250 240 floral white
255 250 240 FloralWhite
253 245 230 old lace
253 245 230 OldLace
250 240 230 linen
250 235 215 antique white
250 235 215 AntiqueWhite
255 239 213 papaya whip
255 239 213 PapayaWhip
255 235 205 blanched almond
255 235 205 BlanchedAlmond
255 228 196 bisque
255 218 185 peach puff
255 218 185 PeachPuff
255 222 173 navajo white
255 222 173 NavajoWhite
255 228 181 moccasin
255 248 220 cornsilk
255 255 240 ivory
255 250 205 lemon chiffon
255 250 205 LemonChiffon
255 245 238 seashell
240 255 240 honeydew
245 255 250 mint cream
245 255 250 MintCream
240 255 255 azure
240 248 255 alice blue
240 248 255 AliceBlue
230 230 250 lavender
255 240 245 lavender blush
255 240 245 LavenderBlush
255 228 225 misty rose
255 228 225 MistyRose
255 255 255 white
0 0 0 black
47 79 79 dark slate gray
47 79 79 DarkSlateGray
47 79 79 dark slate grey
47 79 79 DarkSlateGrey
105 105 105 dim gray
105 105 105 DimGray
105 105 105 dim grey
105 105 105 DimGrey
112 128 144 slate gray
112 128 144 SlateGray
112 128 144 slate grey
112 128 144 SlateGrey
119 136 153 light slate gray
119 136 153 LightSlateGray
119 136 153 light slate grey
119 136 153 LightSlateGrey
169 169 169 darkGray
169 169 169 dark gray
169 169 169 darkGrey
169 169 169 dark grey
190 190 190 gray
190 190 190 grey
211 211 211 light grey
211 211 211 LightGrey
211 211 211 light gray
211 211 211 LightGray
25 25 112 midnight blue
25 25 112 MidnightBlue
0 0 128 navy
0 0 128 navy blue
0 0 128 NavyBlue
100 149 237 cornflower blue
100 149 237 CornflowerBlue
72 61 139 dark slate blue
72 61 139 DarkSlateBlue
106 90 205 slate blue
106 90 205 SlateBlue
123 104 238 medium slate blue
123 104 238 MediumSlateBlue
132 112 255 light slate blue
132 112 255 LightSlateBlue
0 0 205 medium blue
0 0 205 MediumBlue
65 105 225 royal blue
65 105 225 RoyalBlue
0 0 255 blue
0 0 139 dark blue
0 0 139 darkBlue
30 144 255 dodger blue
30 144 255 DodgerBlue
0 191 255 deep sky blue
0 191 255 DeepSkyBlue
135 206 235 sky blue
135 206 235 SkyBlue
135 206 250 light sky blue
135 206 250 LightSkyBlue
70 130 180 steel blue
70 130 180 SteelBlue
176 196 222 light steel blue
176 196 222 LightSteelBlue
173 216 230 light blue
173 216 230 LightBlue
176 224 230 powder blue
176 224 230 PowderBlue
175 238 238 pale turquoise
175 238 238 PaleTurquoise
0 206 209 dark turquoise
0 206 209 DarkTurquoise
72 209 204 medium turquoise
72 209 204 MediumTurquoise
64 224 208 turquoise
0 255 255 cyan
0 139 139 dark cyan
0 139 139 darkCyan
224 255 255 light cyan
224 255 255 LightCyan
95 158 160 cadet blue
95 158 160 CadetBlue
102 205 170 medium aquamarine
102 205 170 MediumAquamarine
127 255 212 aquamarine
0 100 0 dark green
0 100 0 DarkGreen
144 238 144 light green
144 238 144 lightGreen
85 107 47 dark olive green
85 107 47 DarkOliveGreen
143 188 143 dark sea green
143 188 143 DarkSeaGreen
46 139 87 sea green
46 139 87 SeaGreen
60 179 113 medium sea green
60 179 113 MediumSeaGreen
32 178 170 light sea green
32 178 170 LightSeaGreen
152 251 152 pale green
152 251 152 PaleGreen
0 255 127 spring green
0 255 127 SpringGreen
124 252 0 lawn green
124 252 0 LawnGreen
0 255 0 green
127 255 0 chartreuse
0 250 154 medium spring green
0 250 154 MediumSpringGreen
173 255 47 green yellow
173 255 47 GreenYellow
50 205 50 lime green
50 205 50 LimeGreen
154 205 50 yellow green
154 205 50 YellowGreen
34 139 34 forest green
34 139 34 ForestGreen
107 142 35 olive drab
107 142 35 OliveDrab
189 183 107 dark khaki
189 183 107 DarkKhaki
240 230 140 khaki
238 232 170 pale goldenrod
238 232 170 PaleGoldenrod
250 250 210 light goldenrod yellow
250 250 210 LightGoldenrodYellow
255 255 224 light yellow
255 255 224 LightYellow
255 255 0 yellow
255 215 0 gold
238 221 130 light goldenrod
238 221 130 LightGoldenrod
218 165 32 goldenrod
184 134 11 dark goldenrod
184 134 11 DarkGoldenrod
188 143 143 rosy brown
188 143 143 RosyBrown
205 92 92 indian red
205 92 92 IndianRed
139 69 19 saddle brown
139 69 19 SaddleBrown
160 82 45 sienna
205 133 63 peru
222 184 135 burlywood
245 245 220 beige
245 222 179 wheat
244 164 96 sandy brown
244 164 96 SandyBrown
210 180 140 tan
210 105 30 chocolate
178 34 34 firebrick
165 42 42 brown
233 150 122 dark salmon
233 150 122 DarkSalmon
250 128 114 salmon
255 160 122 light salmon
255 160 122 LightSalmon
255 165 0 orange
255 140 0 dark orange
255 140 0 DarkOrange
255 127 80 coral
240 128 128 light coral
240 128 128 LightCoral
255 99 71 tomato
255 69 0 orange red
255 69 0 OrangeRed
255 0 0 red
139 0 0 dark red
139 0 0 darkRed
255 105 180 hot pink
255 105 180 HotPink
255 20 147 deep pink
255 20 147 DeepPink
255 192 203 pink
255 182 193 light pink
255 182 193 LightPink
219 112 147 pale violet red
219 112 147 PaleVioletRed
176 48 96 maroon
199 21 133 medium violet red
199 21 133 MediumVioletRed
208 32 144 violet red
208 32 144 VioletRed
255 0 255 magenta
139 0 139 dark magenta
139 0 139 darkMagenta
238 130 238 violet
221 160 221 plum
218 112 214 orchid
186 85 211 medium orchid
186 85 211 MediumOrchid
153 50 204 dark orchid
153 50 204 DarkOrchid
148 0 211 dark violet
148 0 211 DarkViolet
138 43 226 blue violet
138 43 226 BlueViolet
160 32 240 purple
147 112 219 medium purple
147 112 219 MediumPurple
216 191 216 thistle
255 250 250 snow1
238 233 233 snow2
205 201 201 snow3
139 137 137 snow4
255 245 238 seashell1
238 229 222 seashell2
205 197 191 seashell3
139 134 130 seashell4
255 239 219 AntiqueWhite1
238 223 204 AntiqueWhite2
205 192 176 AntiqueWhite3
139 131 120 AntiqueWhite4
255 228 196 bisque1
238 213 183 bisque2
205 183 158 bisque3
139 125 107 bisque4
255 218 185 PeachPuff1
238 203 173 PeachPuff2
205 175 149 PeachPuff3
139 119 101 PeachPuff4
255 222 173 NavajoWhite1
238 207 161 NavajoWhite2
205 179 139 NavajoWhite3
139 121 94 NavajoWhite4
255 250 205 LemonChiffon1
238 233 191 LemonChiffon2
205 201 165 LemonChiffon3
139 137 112 LemonChiffon4
255 248 220 cornsilk1
238 232 205 cornsilk2
205 200 177 cornsilk3
139 136 120 cornsilk4
255 255 240 ivory1
238 238 224 ivory2
205 205 193 ivory3
139 139 131 ivory4
240 255 240 honeydew1
224 238 224 honeydew2
193 205 193 honeydew3
131 139 131 honeydew4
255 240 245 LavenderBlush1
238 224 229 LavenderBlush2
205 193 197 LavenderBlush3
139 131 134 LavenderBlush4
255 228 225 MistyRose1
238 213 210 MistyRose2
205 183 181 MistyRose3
139 125 123 MistyRose4
240 255 255 azure1
224 238 238 azure2
193 205 205 azure3
131 139 139 azure4
131 111 255 SlateBlue1
122 103 238 SlateBlue2
105 89 205 SlateBlue3
71 60 139 SlateBlue4
72 118 255 RoyalBlue1
67 110 238 RoyalBlue2
58 95 205 RoyalBlue3
39 64 139 RoyalBlue4
0 0 255 blue1
0 0 238 blue2
0 0 205 blue3
0 0 139 blue4
30 144 255 DodgerBlue1
28 134 238 DodgerBlue2
24 116 205 DodgerBlue3
16 78 139 DodgerBlue4
99 184 255 SteelBlue1
92 172 238 SteelBlue2
79 148 205 SteelBlue3
54 100 139 SteelBlue4
0 191 255 DeepSkyBlue1
0 178 238 DeepSkyBlue2
0 154 205 DeepSkyBlue3
0 104 139 DeepSkyBlue4
135 206 255 SkyBlue1
126 192 238 SkyBlue2
108 166 205 SkyBlue3
74 112 139 SkyBlue4
176 226 255 LightSkyBlue1
164 211 238 LightSkyBlue2
141 182 205 LightSkyBlue3
96 123 139 LightSkyBlue4
198 226 255 SlateGray1
185 211 238 SlateGray2
159 182 205 SlateGray3
108 123 139 SlateGray4
202 225 255 LightSteelBlue1
188 210 238 LightSteelBlue2
162 181 205 LightSteelBlue3
110 123 139 LightSteelBlue4
191 239 255 LightBlue1
178 223 238 LightBlue2
154 192 205 LightBlue3
104 131 139 LightBlue4
224 255 255 LightCyan1
209 238 238 LightCyan2
180 205 205 LightCyan3
122 139 139 LightCyan4
187 255 255 PaleTurquoise1
174 238 238 PaleTurquoise2
150 205 205 PaleTurquoise3
102 139 139 PaleTurquoise4
152 245 255 CadetBlue1
142 229 238 CadetBlue2
122 197 205 CadetBlue3
83 134 139 CadetBlue4
0 245 255 turquoise1
0 229 238 turquoise2
0 197 205 turquoise3
0 134 139 turquoise4
0 255 255 cyan1
0 238 238 cyan2
0 205 205 cyan3
0 139 139 cyan4
151 255 255 DarkSlateGray1
141 238 238 DarkSlateGray2
121 205 205 DarkSlateGray3
82 139 139 DarkSlateGray4
127 255 212 aquamarine1
118 238 198 aquamarine2
102 205 170 aquamarine3
69 139 116 aquamarine4
193 255 193 DarkSeaGreen1
180 238 180 DarkSeaGreen2
155 205 155 DarkSeaGreen3
105 139 105 DarkSeaGreen4
84 255 159 SeaGreen1
78 238 148 SeaGreen2
67 205 128 SeaGreen3
46 139 87 SeaGreen4
154 255 154 PaleGreen1
144 238 144 PaleGreen2
124 205 124 PaleGreen3
84 139 84 PaleGreen4
0 255 127 SpringGreen1
0 238 118 SpringGreen2
0 205 102 SpringGreen3
0 139 69 SpringGreen4
0 255 0 green1
0 238 0 green2
0 205 0 green3
0 139 0 green4
127 255 0 chartreuse1
118 238 0 chartreuse2
102 205 0 chartreuse3
69 139 0 chartreuse4
192 255 62 OliveDrab1
179 238 58 OliveDrab2
154 205 50 OliveDrab3
105 139 34 OliveDrab4
202 255 112 DarkOliveGreen1
188 238 104 DarkOliveGreen2
162 205 90 DarkOliveGreen3
110 139 61 DarkOliveGreen4
255 246 143 khaki1
238 230 133 khaki2
205 198 115 khaki3
139 134 78 khaki4
255 236 139 LightGoldenrod1
238 220 130 LightGoldenrod2
205 190 112 LightGoldenrod3
139 129 76 LightGoldenrod4
255 255 224 LightYellow1
238 238 209 LightYellow2
205 205 180 LightYellow3
139 139 122 LightYellow4
255 255 0 yellow1
238 238 0 yellow2
205 205 0 yellow3
139 139 0 yellow4
255 215 0 gold1
238 201 0 gold2
205 173 0 gold3
139 117 0 gold4
255 193 37 goldenrod1
238 180 34 goldenrod2
205 155 29 goldenrod3
139 105 20 goldenrod4
255 185 15 DarkGoldenrod1
238 173 14 DarkGoldenrod2
205 149 12 DarkGoldenrod3
139 101 8 DarkGoldenrod4
255 193 193 RosyBrown1
238 180 180 RosyBrown2
205 155 155 RosyBrown3
139 105 105 RosyBrown4
255 106 106 IndianRed1
238 99 99 IndianRed2
205 85 85 IndianRed3
139 58 58 IndianRed4
255 130 71 sienna1
238 121 66 sienna2
205 104 57 sienna3
139 71 38 sienna4
255 211 155 burlywood1
238 197 145 burlywood2
205 170 125 burlywood3
139 115 85 burlywood4
255 231 186 wheat1
238 216 174 wheat2
205 186 150 wheat3
139 126 102 wheat4
255 165 79 tan1
238 154 73 tan2
205 133 63 tan3
139 90 43 tan4
255 127 36 chocolate1
238 118 33 chocolate2
205 102 29 chocolate3
139 69 19 chocolate4
255 48 48 firebrick1
238 44 44 firebrick2
205 38 38 firebrick3
139 26 26 firebrick4
255 64 64 brown1
238 59 59 brown2
205 51 51 brown3
139 35 35 brown4
255 140 105 salmon1
238 130 98 salmon2
205 112 84 salmon3
139 76 57 salmon4
255 160 122 LightSalmon1
238 149 114 LightSalmon2
205 129 98 LightSalmon3
139 87 66 LightSalmon4
255 165 0 orange1
238 154 0 orange2
205 133 0 orange3
139 90 0 orange4
255 127 0 DarkOrange1
238 118 0 DarkOrange2
205 102 0 DarkOrange3
139 69 0 DarkOrange4
255 114 86 coral1
238 106 80 coral2
205 91 69 coral3
139 62 47 coral4
255 99 71 tomato1
238 92 66 tomato2
205 79 57 tomato3
139 54 38 tomato4
255 69 0 OrangeRed1
238 64 0 OrangeRed2
205 55 0 OrangeRed3
139 37 0 OrangeRed4
255 0 0 red1
238 0 0 red2
205 0 0 red3
139 0 0 red4
255 20 147 DeepPink1
238 18 137 DeepPink2
205 16 118 DeepPink3
139 10 80 DeepPink4
255 110 180 HotPink1
238 106 167 HotPink2
205 96 144 HotPink3
139 58 98 HotPink4
255 181 197 pink1
238 169 184 pink2
205 145 158 pink3
139 99 108 pink4
255 174 185 LightPink1
238 162 173 LightPink2
205 140 149 LightPink3
139 95 101 LightPink4
255 130 171 PaleVioletRed1
238 121 159 PaleVioletRed2
205 104 137 PaleVioletRed3
139 71 93 PaleVioletRed4
255 52 179 maroon1
238 48 167 maroon2
205 41 144 maroon3
139 28 98 maroon4
255 62 150 VioletRed1
238 58 140 VioletRed2
205 50 120 VioletRed3
139 34 82 VioletRed4
255 0 255 magenta1
238 0 238 magenta2
205 0 205 magenta3
139 0 139 magenta4
255 131 250 orchid1
238 122 233 orchid2
205 105 201 orchid3
139 71 137 orchid4
255 187 255 plum1
238 174 238 plum2
205 150 205 plum3
139 102 139 plum4
224 102 255 MediumOrchid1
209 95 238 MediumOrchid2
180 82 205 MediumOrchid3
122 55 139 MediumOrchid4
191 62 255 DarkOrchid1
178 58 238 DarkOrchid2
154 50 205 DarkOrchid3
104 34 139 DarkOrchid4
155 48 255 purple1
145 44 238 purple2
125 38 205 purple3
85 26 139 purple4
171 130 255 MediumPurple1
159 121 238 MediumPurple2
137 104 205 MediumPurple3
93 71 139 MediumPurple4
255 225 255 thistle1
238 210 238 thistle2
205 181 205 thistle3
139 123 139 thistle4
0 0 0 gray0
0 0 0 grey0
3 3 3 gray1
3 3 3 grey1
5 5 5 gray2
5 5 5 grey2
8 8 8 gray3
8 8 8 grey3
10 10 10 gray4
10 10 10 grey4
13 13 13 gray5
13 13 13 grey5
15 15 15 gray6
15 15 15 grey6
18 18 18 gray7
18 18 18 grey7
20 20 20 gray8
20 20 20 grey8
23 23 23 gray9
23 23 23 grey9
26 26 26 gray10
26 26 26 grey10
28 28 28 gray11
28 28 28 grey11
31 31 31 gray12
31 31 31 grey12
33 33 33 gray13
33 33 33 grey13
36 36 36 gray14
36 36 36 grey14
38 38 38 gray15
38 38 38 grey15
41 41 41 gray16
41 41 41 grey16
43 43 43 gray17
43 43 43 grey17
46 46 46 gray18
46 46 46 grey18
48 48 48 gray19
48 48 48 grey19
51 51 51 gray20
51 51 51 grey20
54 54 54 gray21
54 54 54 grey21
56 56 56 gray22
56 56 56 grey22
59 59 59 gray23
59 59 59 grey23
61 61 61 gray24
61 61 61 grey24
64 64 64 gray25
64 64 64 grey25
66 66 66 gray26
66 66 66 grey26
69 69 69 gray27
69 69 69 grey27
71 71 71 gray28
71 71 71 grey28
74 74 74 gray29
74 74 74 grey29
77 77 77 gray30
77 77 77 grey30
79 79 79 gray31
79 79 79 grey31
82 82 82 gray32
82 82 82 grey32
84 84 84 gray33
84 84 84 grey33
87 87 87 gray34
87 87 87 grey34
89 89 89 gray35
89 89 89 grey35
92 92 92 gray36
92 92 92 grey36
94 94 94 gray37
94 94 94 grey37
97 97 97 gray38
97 97 97 grey38
99 99 99 gray39
99 99 99 grey39
102 102 102 gray40
102 102 102 grey40
105 105 105 gray41
105 105 105 grey41
107 107 107 gray42
107 107 107 grey42
110 110 110 gray43
110 110 110 grey43
112 112 112 gray44
112 112 112 grey44
115 115 115 gray45
115 115 115 grey45
117 117 117 gray46
117 117 117 grey46
120 120 120 gray47
120 120 120 grey47
122 122 122 gray48
122 122 122 grey48
125 125 125 gray49
125 125 125 grey49
127 127 127 gray50
127 127 127 grey50
130 130 130 gray51
130 130 130 grey51
133 133 133 gray52
133 133 133 grey52
135 135 135 gray53
135 135 135 grey53
138 138 138 gray54
138 138 138 grey54
140 140 140 gray55
140 140 140 grey55
143 143 143 gray56
143 143 143 grey56
145 145 145 gray57
145 145 145 grey57
148 148 148 gray58
148 148 148 grey58
150 150 150 gray59
150 150 150 grey59
153 153 153 gray60
153 153 153 grey60
156 156 156 gray61
156 156 156 grey61
158 158 158 gray62
158 158 158 grey62
161 161 161 gray63
161 161 161 grey63
163 163 163 gray64
163 163 163 grey64
166 166 166 gray65
166 166 166 grey65
168 168 168 gray66
168 168 168 grey66
171 171 171 gray67
171 171 171 grey67
173 173 173 gray68
173 173 173 grey68
176 176 176 gray69
176 176 176 grey69
179 179 179 gray70
179 179 179 grey70
181 181 181 gray71
181 181 181 grey71
184 184 184 gray72
184 184 184 grey72
186 186 186 gray73
186 186 186 grey73
189 189 189 gray74
189 189 189 grey74
191 191 191 gray75
191 191 191 grey75
194 194 194 gray76
194 194 194 grey76
196 196 196 gray77
196 196 196 grey77
199 199 199 gray78
199 199 199 grey78
201 201 201 gray79
201 201 201 grey79
204 204 204 gray80
204 204 204 grey80
207 207 207 gray81
207 207 207 grey81
209 209 209 gray82
209 209 209 grey82
212 212 212 gray83
212 212 212 grey83
214 214 214 gray84
214 214 214 grey84
217 217 217 gray85
217 217 217 grey85
219 219 219 gray86
219 219 219 grey86
222 222 222 gray87
222 222 222 grey87
224 224 224 gray88
224 224 224 grey88
227 227 227 gray89
227 227 227 grey89
229 229 229 gray90
229 229 229 grey90
232 232 232 gray91
232 232 232 grey91
235 235 235 gray92
235 235 235 grey92
237 237 237 gray93
237 237 237 grey93
240 240 240 gray94
240 240 240 grey94
242 242 242 gray95
242 242 242 grey95
245 245 245 gray96
245 245 245 grey96
247 247 247 gray97
247 247 247 grey97
250 250 250 gray98
250 250 250 grey98
252 252 252 gray99
252 252 252 grey99
255 255 255 gray100
255 255 255 grey100
| {
"pile_set_name": "Github"
} |
{
"profiles": {
"SelfHost": {
"commandName": "Project",
"environmentVariables": {
"ASPNETCORE_ENVIRONMENT": "Development"
},
"applicationUrl": "http://localhost:5000"
}
}
}
| {
"pile_set_name": "Github"
} |
%YAML 1.1
%TAG !u! tag:unity3d.com,2011:
--- !u!29 &1
OcclusionCullingSettings:
m_ObjectHideFlags: 0
serializedVersion: 2
m_OcclusionBakeSettings:
smallestOccluder: 5
smallestHole: 0.25
backfaceThreshold: 100
m_SceneGUID: 00000000000000000000000000000000
m_OcclusionCullingData: {fileID: 0}
--- !u!104 &2
RenderSettings:
m_ObjectHideFlags: 0
serializedVersion: 8
m_Fog: 0
m_FogColor: {r: 0.5, g: 0.5, b: 0.5, a: 1}
m_FogMode: 3
m_FogDensity: 0.01
m_LinearFogStart: 0
m_LinearFogEnd: 300
m_AmbientSkyColor: {r: 0.212, g: 0.227, b: 0.259, a: 1}
m_AmbientEquatorColor: {r: 0.114, g: 0.125, b: 0.133, a: 1}
m_AmbientGroundColor: {r: 0.047, g: 0.043, b: 0.035, a: 1}
m_AmbientIntensity: 1
m_AmbientMode: 3
m_SubtractiveShadowColor: {r: 0.42, g: 0.478, b: 0.627, a: 1}
m_SkyboxMaterial: {fileID: 0}
m_HaloStrength: 0.5
m_FlareStrength: 1
m_FlareFadeSpeed: 3
m_HaloTexture: {fileID: 0}
m_SpotCookie: {fileID: 10001, guid: 0000000000000000e000000000000000, type: 0}
m_DefaultReflectionMode: 0
m_DefaultReflectionResolution: 128
m_ReflectionBounces: 1
m_ReflectionIntensity: 1
m_CustomReflection: {fileID: 0}
m_Sun: {fileID: 0}
m_IndirectSpecularColor: {r: 0, g: 0, b: 0, a: 1}
--- !u!157 &3
LightmapSettings:
m_ObjectHideFlags: 0
serializedVersion: 11
m_GIWorkflowMode: 1
m_GISettings:
serializedVersion: 2
m_BounceScale: 1
m_IndirectOutputScale: 1
m_AlbedoBoost: 1
m_TemporalCoherenceThreshold: 1
m_EnvironmentLightingMode: 0
m_EnableBakedLightmaps: 0
m_EnableRealtimeLightmaps: 0
m_LightmapEditorSettings:
serializedVersion: 9
m_Resolution: 2
m_BakeResolution: 40
m_TextureWidth: 1024
m_TextureHeight: 1024
m_AO: 0
m_AOMaxDistance: 1
m_CompAOExponent: 1
m_CompAOExponentDirect: 0
m_Padding: 2
m_LightmapParameters: {fileID: 0}
m_LightmapsBakeMode: 1
m_TextureCompression: 1
m_FinalGather: 0
m_FinalGatherFiltering: 1
m_FinalGatherRayCount: 256
m_ReflectionCompression: 2
m_MixedBakeMode: 1
m_BakeBackend: 0
m_PVRSampling: 1
m_PVRDirectSampleCount: 32
m_PVRSampleCount: 500
m_PVRBounces: 2
m_PVRFilterTypeDirect: 0
m_PVRFilterTypeIndirect: 0
m_PVRFilterTypeAO: 0
m_PVRFilteringMode: 0
m_PVRCulling: 1
m_PVRFilteringGaussRadiusDirect: 1
m_PVRFilteringGaussRadiusIndirect: 5
m_PVRFilteringGaussRadiusAO: 2
m_PVRFilteringAtrousPositionSigmaDirect: 0.5
m_PVRFilteringAtrousPositionSigmaIndirect: 2
m_PVRFilteringAtrousPositionSigmaAO: 1
m_ShowResolutionOverlay: 1
m_LightingDataAsset: {fileID: 0}
m_UseShadowmask: 0
--- !u!196 &4
NavMeshSettings:
serializedVersion: 2
m_ObjectHideFlags: 0
m_BuildSettings:
serializedVersion: 2
agentTypeID: 0
agentRadius: 0.5
agentHeight: 2
agentSlope: 45
agentClimb: 0.4
ledgeDropHeight: 0
maxJumpAcrossDistance: 0
minRegionArea: 2
manualCellSize: 0
cellSize: 0.16666667
manualTileSize: 0
tileSize: 256
accuratePlacement: 0
debug:
m_Flags: 0
m_NavMeshData: {fileID: 0}
--- !u!1 &79401336
GameObject:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
serializedVersion: 5
m_Component:
- component: {fileID: 79401337}
- component: {fileID: 79401338}
m_Layer: 0
m_Name: Area Light
m_TagString: Untagged
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
--- !u!4 &79401337
Transform:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 79401336}
m_LocalRotation: {x: 0, y: 0, z: 0, w: 1}
m_LocalPosition: {x: 0, y: 0, z: 0}
m_LocalScale: {x: 1, y: 1, z: 1}
m_Children: []
m_Father: {fileID: 103870420}
m_RootOrder: 0
m_LocalEulerAnglesHint: {x: 90, y: 0, z: 0}
--- !u!108 &79401338
Light:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 79401336}
m_Enabled: 1
serializedVersion: 8
m_Type: 3
m_Color: {r: 1, g: 1, b: 1, a: 1}
m_Intensity: 1
m_Range: 10
m_SpotAngle: 30
m_CookieSize: 10
m_Shadows:
m_Type: 0
m_Resolution: -1
m_CustomResolution: -1
m_Strength: 1
m_Bias: 0.05
m_NormalBias: 0.4
m_NearPlane: 0.2
m_Cookie: {fileID: 0}
m_DrawHalo: 0
m_Flare: {fileID: 0}
m_RenderMode: 0
m_CullingMask:
serializedVersion: 2
m_Bits: 4294967295
m_Lightmapping: 2
m_AreaSize: {x: 1, y: 1}
m_BounceIntensity: 1
m_ColorTemperature: 6570
m_UseColorTemperature: 0
m_ShadowRadius: 0
m_ShadowAngle: 0
--- !u!1 &103870415
GameObject:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
serializedVersion: 5
m_Component:
- component: {fileID: 103870420}
- component: {fileID: 103870419}
- component: {fileID: 103870417}
- component: {fileID: 103870416}
- component: {fileID: 103870421}
- component: {fileID: 103870422}
m_Layer: 0
m_Name: Main Camera
m_TagString: MainCamera
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
--- !u!81 &103870416
AudioListener:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 103870415}
m_Enabled: 1
--- !u!124 &103870417
Behaviour:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 103870415}
m_Enabled: 1
--- !u!20 &103870419
Camera:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 103870415}
m_Enabled: 1
serializedVersion: 2
m_ClearFlags: 1
m_BackGroundColor: {r: 0, g: 0.077205874, b: 0.19852942, a: 0}
m_NormalizedViewPortRect:
serializedVersion: 2
x: 0
y: 0
width: 1
height: 1
near clip plane: 0.3
far clip plane: 1000
field of view: 60
orthographic: 1
orthographic size: 5
m_Depth: -1
m_CullingMask:
serializedVersion: 2
m_Bits: 4294967295
m_RenderingPath: -1
m_TargetTexture: {fileID: 0}
m_TargetDisplay: 0
m_TargetEye: 3
m_HDR: 0
m_AllowMSAA: 1
m_AllowDynamicResolution: 0
m_ForceIntoRT: 0
m_OcclusionCulling: 1
m_StereoConvergence: 10
m_StereoSeparation: 0.022
--- !u!4 &103870420
Transform:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 103870415}
m_LocalRotation: {x: 0, y: 0, z: 0, w: 1}
m_LocalPosition: {x: 0, y: 0, z: -10}
m_LocalScale: {x: 1, y: 1, z: 1}
m_Children:
- {fileID: 79401337}
m_Father: {fileID: 0}
m_RootOrder: 0
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!114 &103870421
MonoBehaviour:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 103870415}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 2eee3e399dbaf1a42b6470623255ce78, type: 3}
m_Name:
m_EditorClassIdentifier:
TextHeader: {fileID: 1184638579}
Image: {fileID: 570292425}
Audio: {fileID: 103870422}
--- !u!82 &103870422
AudioSource:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 103870415}
m_Enabled: 1
serializedVersion: 4
OutputAudioMixerGroup: {fileID: 0}
m_audioClip: {fileID: 8300000, guid: 9d07a960b9553fb4eb76eb484b76d049, type: 3}
m_PlayOnAwake: 0
m_Volume: 1
m_Pitch: 1
Loop: 0
Mute: 0
Spatialize: 0
SpatializePostEffects: 0
Priority: 128
DopplerLevel: 1
MinDistance: 1
MaxDistance: 500
Pan2D: 0
rolloffMode: 0
BypassEffects: 0
BypassListenerEffects: 0
BypassReverbZones: 0
rolloffCustomCurve:
serializedVersion: 2
m_Curve:
- serializedVersion: 2
time: 0
value: 1
inSlope: 0
outSlope: 0
tangentMode: 0
- serializedVersion: 2
time: 1
value: 0
inSlope: 0
outSlope: 0
tangentMode: 0
m_PreInfinity: 2
m_PostInfinity: 2
m_RotationOrder: 4
panLevelCustomCurve:
serializedVersion: 2
m_Curve:
- serializedVersion: 2
time: 0
value: 0
inSlope: 0
outSlope: 0
tangentMode: 0
m_PreInfinity: 2
m_PostInfinity: 2
m_RotationOrder: 0
spreadCustomCurve:
serializedVersion: 2
m_Curve:
- serializedVersion: 2
time: 0
value: 0
inSlope: 0
outSlope: 0
tangentMode: 0
m_PreInfinity: 2
m_PostInfinity: 2
m_RotationOrder: 4
reverbZoneMixCustomCurve:
serializedVersion: 2
m_Curve:
- serializedVersion: 2
time: 0
value: 1
inSlope: 0
outSlope: 0
tangentMode: 0
m_PreInfinity: 2
m_PostInfinity: 2
m_RotationOrder: 0
--- !u!1 &275538404
GameObject:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
serializedVersion: 5
m_Component:
- component: {fileID: 275538405}
- component: {fileID: 275538408}
- component: {fileID: 275538407}
- component: {fileID: 275538406}
m_Layer: 0
m_Name: Back Button
m_TagString: Untagged
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
--- !u!224 &275538405
RectTransform:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 275538404}
m_LocalRotation: {x: -0, y: -0, z: -0, w: 1}
m_LocalPosition: {x: 0, y: 0, z: 0}
m_LocalScale: {x: 1, y: 1, z: 1}
m_Children:
- {fileID: 1369008950}
m_Father: {fileID: 444643682}
m_RootOrder: 2
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
m_AnchorMin: {x: 0, y: 0}
m_AnchorMax: {x: 0, y: 0}
m_AnchoredPosition: {x: 5, y: 5}
m_SizeDelta: {x: 140, y: 30}
m_Pivot: {x: 0, y: 0}
--- !u!114 &275538406
MonoBehaviour:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 275538404}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 1392445389, guid: f5f67c52d1564df4a8936ccd202a3bd8, type: 3}
m_Name:
m_EditorClassIdentifier:
m_Navigation:
m_Mode: 3
m_SelectOnUp: {fileID: 0}
m_SelectOnDown: {fileID: 0}
m_SelectOnLeft: {fileID: 0}
m_SelectOnRight: {fileID: 0}
m_Transition: 1
m_Colors:
m_NormalColor: {r: 1, g: 1, b: 1, a: 1}
m_HighlightedColor: {r: 0.9607843, g: 0.9607843, b: 0.9607843, a: 1}
m_PressedColor: {r: 0.78431374, g: 0.78431374, b: 0.78431374, a: 1}
m_DisabledColor: {r: 0.78431374, g: 0.78431374, b: 0.78431374, a: 0.5019608}
m_ColorMultiplier: 1
m_FadeDuration: 0.1
m_SpriteState:
m_HighlightedSprite: {fileID: 0}
m_PressedSprite: {fileID: 0}
m_DisabledSprite: {fileID: 0}
m_AnimationTriggers:
m_NormalTrigger: Normal
m_HighlightedTrigger: Highlighted
m_PressedTrigger: Pressed
m_DisabledTrigger: Disabled
m_Interactable: 1
m_TargetGraphic: {fileID: 275538407}
m_OnClick:
m_PersistentCalls:
m_Calls:
- m_Target: {fileID: 103870421}
m_MethodName: ClickBack
m_Mode: 1
m_Arguments:
m_ObjectArgument: {fileID: 0}
m_ObjectArgumentAssemblyTypeName: UnityEngine.Object, UnityEngine
m_IntArgument: 0
m_FloatArgument: 0
m_StringArgument:
m_BoolArgument: 0
m_CallState: 2
m_TypeName: UnityEngine.UI.Button+ButtonClickedEvent, UnityEngine.UI, Version=1.0.0.0,
Culture=neutral, PublicKeyToken=null
--- !u!114 &275538407
MonoBehaviour:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 275538404}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: -765806418, guid: f5f67c52d1564df4a8936ccd202a3bd8, type: 3}
m_Name:
m_EditorClassIdentifier:
m_Material: {fileID: 0}
m_Color: {r: 1, g: 1, b: 1, a: 1}
m_RaycastTarget: 1
m_OnCullStateChanged:
m_PersistentCalls:
m_Calls: []
m_TypeName: UnityEngine.UI.MaskableGraphic+CullStateChangedEvent, UnityEngine.UI,
Version=1.0.0.0, Culture=neutral, PublicKeyToken=null
m_Sprite: {fileID: 10905, guid: 0000000000000000f000000000000000, type: 0}
m_Type: 1
m_PreserveAspect: 0
m_FillCenter: 1
m_FillMethod: 4
m_FillAmount: 1
m_FillClockwise: 1
m_FillOrigin: 0
--- !u!222 &275538408
CanvasRenderer:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 275538404}
--- !u!1 &444643678
GameObject:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
serializedVersion: 5
m_Component:
- component: {fileID: 444643682}
- component: {fileID: 444643681}
- component: {fileID: 444643680}
- component: {fileID: 444643679}
m_Layer: 0
m_Name: Canvas
m_TagString: Untagged
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
--- !u!114 &444643679
MonoBehaviour:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 444643678}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 1301386320, guid: f5f67c52d1564df4a8936ccd202a3bd8, type: 3}
m_Name:
m_EditorClassIdentifier:
m_IgnoreReversedGraphics: 1
m_BlockingObjects: 0
m_BlockingMask:
serializedVersion: 2
m_Bits: 4294967295
--- !u!114 &444643680
MonoBehaviour:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 444643678}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 1980459831, guid: f5f67c52d1564df4a8936ccd202a3bd8, type: 3}
m_Name:
m_EditorClassIdentifier:
m_UiScaleMode: 1
m_ReferencePixelsPerUnit: 100
m_ScaleFactor: 0.7
m_ReferenceResolution: {x: 640, y: 480}
m_ScreenMatchMode: 0
m_MatchWidthOrHeight: 0.5
m_PhysicalUnit: 3
m_FallbackScreenDPI: 128
m_DefaultSpriteDPI: 128
m_DynamicPixelsPerUnit: 1
--- !u!223 &444643681
Canvas:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 444643678}
m_Enabled: 1
serializedVersion: 3
m_RenderMode: 0
m_Camera: {fileID: 0}
m_PlaneDistance: 100
m_PixelPerfect: 0
m_ReceivesEvents: 1
m_OverrideSorting: 0
m_OverridePixelPerfect: 0
m_SortingBucketNormalizedSize: 0
m_AdditionalShaderChannelsFlag: 0
m_SortingLayerID: 0
m_SortingOrder: 0
m_TargetDisplay: 0
--- !u!224 &444643682
RectTransform:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 444643678}
m_LocalRotation: {x: 0, y: 0, z: 0, w: 1}
m_LocalPosition: {x: 0, y: 0, z: 0}
m_LocalScale: {x: 0, y: 0, z: 0}
m_Children:
- {fileID: 570292424}
- {fileID: 1184638578}
- {fileID: 275538405}
m_Father: {fileID: 0}
m_RootOrder: 2
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
m_AnchorMin: {x: 0, y: 0}
m_AnchorMax: {x: 0, y: 0}
m_AnchoredPosition: {x: 0, y: 0}
m_SizeDelta: {x: 0, y: 0}
m_Pivot: {x: 0, y: 0}
--- !u!1 &570292423
GameObject:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
serializedVersion: 5
m_Component:
- component: {fileID: 570292424}
- component: {fileID: 570292426}
- component: {fileID: 570292425}
m_Layer: 0
m_Name: RawImage
m_TagString: Untagged
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
--- !u!224 &570292424
RectTransform:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 570292423}
m_LocalRotation: {x: 0, y: 0, z: 0, w: 1}
m_LocalPosition: {x: 0, y: 0, z: 0}
m_LocalScale: {x: 1, y: 1, z: 1}
m_Children: []
m_Father: {fileID: 444643682}
m_RootOrder: 0
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
m_AnchorMin: {x: 0.5, y: 0.5}
m_AnchorMax: {x: 0.5, y: 0.5}
m_AnchoredPosition: {x: 0, y: 0}
m_SizeDelta: {x: 400, y: 400}
m_Pivot: {x: 0.5, y: 0.5}
--- !u!114 &570292425
MonoBehaviour:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 570292423}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: -98529514, guid: f5f67c52d1564df4a8936ccd202a3bd8, type: 3}
m_Name:
m_EditorClassIdentifier:
m_Material: {fileID: 0}
m_Color: {r: 1, g: 1, b: 1, a: 1}
m_RaycastTarget: 1
m_OnCullStateChanged:
m_PersistentCalls:
m_Calls: []
m_TypeName: UnityEngine.UI.MaskableGraphic+CullStateChangedEvent, UnityEngine.UI,
Version=1.0.0.0, Culture=neutral, PublicKeyToken=null
m_Texture: {fileID: 0}
m_UVRect:
serializedVersion: 2
x: 0
y: 0
width: 1
height: 1
--- !u!222 &570292426
CanvasRenderer:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 570292423}
--- !u!1 &978919301
GameObject:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
serializedVersion: 5
m_Component:
- component: {fileID: 978919304}
- component: {fileID: 978919303}
- component: {fileID: 978919302}
m_Layer: 0
m_Name: EventSystem
m_TagString: Untagged
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
--- !u!114 &978919302
MonoBehaviour:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 978919301}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 1077351063, guid: f5f67c52d1564df4a8936ccd202a3bd8, type: 3}
m_Name:
m_EditorClassIdentifier:
m_HorizontalAxis: Horizontal
m_VerticalAxis: Vertical
m_SubmitButton: Submit
m_CancelButton: Cancel
m_InputActionsPerSecond: 10
m_RepeatDelay: 0.5
m_ForceModuleActive: 0
--- !u!114 &978919303
MonoBehaviour:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 978919301}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: -619905303, guid: f5f67c52d1564df4a8936ccd202a3bd8, type: 3}
m_Name:
m_EditorClassIdentifier:
m_FirstSelected: {fileID: 0}
m_sendNavigationEvents: 1
m_DragThreshold: 5
--- !u!4 &978919304
Transform:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 978919301}
m_LocalRotation: {x: 0, y: 0, z: 0, w: 1}
m_LocalPosition: {x: 0, y: 0, z: 0}
m_LocalScale: {x: 1, y: 1, z: 1}
m_Children: []
m_Father: {fileID: 0}
m_RootOrder: 1
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!1 &1184638577
GameObject:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
serializedVersion: 5
m_Component:
- component: {fileID: 1184638578}
- component: {fileID: 1184638580}
- component: {fileID: 1184638579}
m_Layer: 0
m_Name: Text
m_TagString: Untagged
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
--- !u!224 &1184638578
RectTransform:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 1184638577}
m_LocalRotation: {x: 0, y: 0, z: 0, w: 1}
m_LocalPosition: {x: 0, y: 0, z: 0}
m_LocalScale: {x: 1, y: 1, z: 1}
m_Children: []
m_Father: {fileID: 444643682}
m_RootOrder: 1
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
m_AnchorMin: {x: 0, y: 1}
m_AnchorMax: {x: 1, y: 1}
m_AnchoredPosition: {x: 0, y: 0}
m_SizeDelta: {x: 0, y: 130}
m_Pivot: {x: 0.5, y: 1}
--- !u!114 &1184638579
MonoBehaviour:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 1184638577}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 708705254, guid: f5f67c52d1564df4a8936ccd202a3bd8, type: 3}
m_Name:
m_EditorClassIdentifier:
m_Material: {fileID: 0}
m_Color: {r: 1, g: 1, b: 1, a: 1}
m_RaycastTarget: 1
m_OnCullStateChanged:
m_PersistentCalls:
m_Calls: []
m_TypeName: UnityEngine.UI.MaskableGraphic+CullStateChangedEvent, UnityEngine.UI,
Version=1.0.0.0, Culture=neutral, PublicKeyToken=null
m_FontData:
m_Font: {fileID: 10102, guid: 0000000000000000e000000000000000, type: 0}
m_FontSize: 14
m_FontStyle: 0
m_BestFit: 1
m_MinSize: 10
m_MaxSize: 32
m_Alignment: 4
m_AlignByGeometry: 0
m_RichText: 1
m_HorizontalOverflow: 0
m_VerticalOverflow: 0
m_LineSpacing: 1
m_Text:
--- !u!222 &1184638580
CanvasRenderer:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 1184638577}
--- !u!1 &1369008949
GameObject:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
serializedVersion: 5
m_Component:
- component: {fileID: 1369008950}
- component: {fileID: 1369008952}
- component: {fileID: 1369008951}
m_Layer: 0
m_Name: Text
m_TagString: Untagged
m_Icon: {fileID: 0}
m_NavMeshLayer: 0
m_StaticEditorFlags: 0
m_IsActive: 1
--- !u!224 &1369008950
RectTransform:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 1369008949}
m_LocalRotation: {x: 0, y: 0, z: 0, w: 1}
m_LocalPosition: {x: 0, y: 0, z: 0}
m_LocalScale: {x: 1, y: 1, z: 1}
m_Children: []
m_Father: {fileID: 275538405}
m_RootOrder: 0
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
m_AnchorMin: {x: 0, y: 0}
m_AnchorMax: {x: 1, y: 1}
m_AnchoredPosition: {x: 0, y: 0}
m_SizeDelta: {x: 0, y: 0}
m_Pivot: {x: 0.5, y: 0.5}
--- !u!114 &1369008951
MonoBehaviour:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 1369008949}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 708705254, guid: f5f67c52d1564df4a8936ccd202a3bd8, type: 3}
m_Name:
m_EditorClassIdentifier:
m_Material: {fileID: 0}
m_Color: {r: 0.19607843, g: 0.19607843, b: 0.19607843, a: 1}
m_RaycastTarget: 1
m_OnCullStateChanged:
m_PersistentCalls:
m_Calls: []
m_TypeName: UnityEngine.UI.MaskableGraphic+CullStateChangedEvent, UnityEngine.UI,
Version=1.0.0.0, Culture=neutral, PublicKeyToken=null
m_FontData:
m_Font: {fileID: 10102, guid: 0000000000000000e000000000000000, type: 0}
m_FontSize: 14
m_FontStyle: 0
m_BestFit: 0
m_MinSize: 10
m_MaxSize: 40
m_Alignment: 4
m_AlignByGeometry: 0
m_RichText: 1
m_HorizontalOverflow: 0
m_VerticalOverflow: 0
m_LineSpacing: 1
m_Text: Back
--- !u!222 &1369008952
CanvasRenderer:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 1369008949}
| {
"pile_set_name": "Github"
} |
+++
Talk_date = ""
Talk_start_time = ""
Talk_end_time = ""
Title = "Metatalk: An ignite about what i’ve learned giving ignites"
Type = "talk"
Speakers = ["jason-yee"]
+++
Creating and delivering a good talk talk is difficult. Like many other crafts, it requires a combination of skill and style—both of which are honed through practice. In this ignite, I’ll share what I’ve learned about writing and delivering compelling ignite talks at dozens of DevOpsDays around the world. I’ll also touch on why ignites are so valuable to DevOpsDays and lessons we can apply to DevOps. | {
"pile_set_name": "Github"
} |
.sect .text; .sect .rom; .sect .data; .sect .bss
.sect .text
.define _vhangup
vhangup = 76
.align 1
_vhangup:
.data2 0x0000
chmk $vhangup
ret
| {
"pile_set_name": "Github"
} |
CREATE TABLE IF NOT EXISTS `character_zonemap_hexgroup` (
`id` BIGINT(20) UNSIGNED NOT NULL DEFAULT '0',
`zoneMap` SMALLINT(5) UNSIGNED NOT NULL DEFAULT '0',
`hexGroup` SMALLINT(5) UNSIGNED NOT NULL DEFAULT '0',
PRIMARY KEY (`id`, `zoneMap`, `hexGroup`),
CONSTRAINT `FK__character_zonemap_hexgroup_id__character_id` FOREIGN KEY (`id`) REFERENCES `character` (`id`) ON DELETE CASCADE
);
| {
"pile_set_name": "Github"
} |
/*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.xwiki.like.test.ui;
import java.util.Arrays;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Order;
import org.junit.jupiter.api.Test;
import org.xwiki.like.test.po.LikeButton;
import org.xwiki.model.reference.DocumentReference;
import org.xwiki.test.docker.junit5.TestReference;
import org.xwiki.test.docker.junit5.UITest;
import org.xwiki.test.ui.TestUtils;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
@UITest(
properties = {
// Required for filters preferences
"xwikiDbHbmCommonExtraMappings=notification-filter-preferences.hbm.xml"
},
extraJARs = {
// It's currently not possible to install a JAR contributing a Hibernate mapping file as an Extension. Thus
// we need to provide the JAR inside WEB-INF/lib. See https://jira.xwiki.org/browse/XWIKI-8271
"org.xwiki.platform:xwiki-platform-notifications-filters-default",
// It's currently not possible to install a JAR contributing a Hibernate mapping file as an Extension. Thus
// we need to provide the JAR inside WEB-INF/lib. See https://jira.xwiki.org/browse/XWIKI-8271
"org.xwiki.platform:xwiki-platform-eventstream-store-hibernate",
// The Solr store is not ready yet to be installed as extension
"org.xwiki.platform:xwiki-platform-eventstream-store-solr"
}, resolveExtraJARs = true)
public class LikeIT
{
private static final String USER1 = "LikeUser1";
private static final String USER2 = "LikeUser2";
private static final DocumentReference LIKE_CONFIGURATION_REFERENCE =
new DocumentReference("xwiki", Arrays.asList("XWiki", "Like"), "LikeConfiguration");
private static final String LIKE_CONFIGURATION_CLASSNAME = "XWiki.Like.LikeConfigurationClass";
@BeforeEach
public void setup(TestUtils testUtils)
{
testUtils.createUser(USER1, USER1, null);
testUtils.createUser(USER2, USER2, null);
}
private void updateLikeConfiguration(TestUtils testUtils, Object... properties)
{
testUtils.updateObject(LIKE_CONFIGURATION_REFERENCE, LIKE_CONFIGURATION_CLASSNAME, 0, properties);
}
/**
* Check that guest user can only see the button if the configuration is set to force displaying it and
* can never interact with it.
*/
@Test
@Order(1)
void guestUser(TestUtils testUtils, TestReference testReference)
{
testUtils.loginAsSuperAdmin();
testUtils.createPage(testReference, "some content");
updateLikeConfiguration(testUtils, "alwaysDisplayButton", 0);
testUtils.forceGuestUser();
testUtils.gotoPage(testReference);
LikeButton likeButton = new LikeButton();
assertFalse(likeButton.isDisplayed());
testUtils.loginAsSuperAdmin();
updateLikeConfiguration(testUtils, "alwaysDisplayButton", 1);
testUtils.forceGuestUser();
testUtils.gotoPage(testReference);
likeButton = new LikeButton();
assertTrue(likeButton.isDisplayed());
assertFalse(likeButton.canBeClicked());
}
@Test
@Order(2)
void likeUnlikeDefaultConfiguration(TestUtils testUtils, TestReference testReference) throws Exception
{
testUtils.login(USER1, USER1);
testUtils.createPage(testReference, "some content");
LikeButton likeButton = new LikeButton();
assertTrue(likeButton.isDisplayed());
assertTrue(likeButton.canBeClicked());
assertEquals(0, likeButton.getLikeNumber());
likeButton.clickToLike();
assertEquals(1, likeButton.getLikeNumber());
testUtils.login(USER2, USER2);
testUtils.gotoPage(testReference);
likeButton = new LikeButton();
assertTrue(likeButton.isDisplayed());
assertEquals(1, likeButton.getLikeNumber());
likeButton.clickToLike();
assertEquals(2, likeButton.getLikeNumber());
testUtils.login(USER1, USER1);
testUtils.gotoPage(testReference);
likeButton = new LikeButton();
assertTrue(likeButton.isDisplayed());
assertEquals(2, likeButton.getLikeNumber());
likeButton.clickToUnlike();
assertEquals(1, likeButton.getLikeNumber());
// Check that the value remains after reload
testUtils.gotoPage(testReference);
likeButton = new LikeButton();
assertTrue(likeButton.isDisplayed());
assertEquals(1, likeButton.getLikeNumber());
}
}
| {
"pile_set_name": "Github"
} |
/*
* The MIT License
*
* Copyright (c) 2014 Red Hat, Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.jenkinsci.test.acceptance.po;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import org.jvnet.hudson.annotation_indexer.Indexed;
/**
* Plugin page object at <tt>$JENKINS_URL/plugin/$value/</tt>.
*
* @author ogondza
*/
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
@Indexed
public @interface PluginPageObject {
String value();
}
| {
"pile_set_name": "Github"
} |
<?php
/**
* Alerts view page.
*
* PHP version 5
* LICENSE: This source file is subject to LGPL license
* that is available through the world-wide-web at the following URI:
* http://www.gnu.org/copyleft/lesser.html
* @author Ushahidi Team <[email protected]>
* @package Ushahidi - http://source.ushahididev.com
* @module API Controller
* @copyright Ushahidi - http://www.ushahidi.com
* @license http://www.gnu.org/copyleft/lesser.html GNU Lesser General Public License (LGPL)
*/
?>
<div class="bg">
<h2>
<?php admin::manage_subtabs("alerts"); ?>
</h2>
<!-- tabs -->
<div class="tabs">
<!-- tabset -->
<ul class="tabset">
<li><a href="<?php echo url::site()."admin/manage/alerts/"; ?>" <?php if ($type == '0' OR empty($type) ) echo "class=\"active\""; ?>><?php echo Kohana::lang('ui_main.show_all');?></a></li>
<li><a href="<?php echo url::site()."admin/manage/alerts/index/"; ?>?type=1" <?php if ($type == '1') echo "class=\"active\""; ?>><?php echo Kohana::lang('ui_main.sms');?></a></li>
<li><a href="<?php echo url::site()."admin/manage/alerts/index/"; ?>?type=2" <?php if ($type == '2') echo "class=\"active\""; ?>><?php echo Kohana::lang('ui_main.email');?></a></li>
</ul>
<!-- tab -->
<div class="tab">
<?php print form::open(NULL,array('method'=>'get', 'id' => 'alertSearch', 'name' => 'alertSearch')); ?>
<input type="hidden" name="action" id="action" value="s"/>
<input type="hidden" name="type" value="<?php echo $type; ?>"/>
<ul>
<li>
<a href="#" onclick="alertAction('d','<?php echo strtoupper(Kohana::lang('ui_main.delete')); ?>', '');">
<?php echo strtoupper(Kohana::lang('ui_main.delete'));?></a>
</li>
<li style="float:right;">
<?php print form::input('ak', $keyword, ' class="text" style="float:left;height:20px;"'); ?>
<a href="#" onclick="javascript:alertSearch.submit();">
<?php echo Kohana::lang('ui_main.search');?></a>
</li>
</ul>
<?php print form::close(); ?>
</div>
</div>
<?php if ($form_error): ?>
<!-- red-box -->
<div class="red-box">
<h3><?php echo Kohana::lang('ui_main.error');?></h3>
<ul>
<?php
foreach ($errors as $error_item => $error_description)
{
// print "<li>" . $error_description . "</li>";
print (!$error_description) ? '' : "<li>" . $error_description . "</li>";
}
?>
</ul>
</div>
<?php endif; ?>
<?php if ($form_saved): ?>
<!-- green-box -->
<div class="green-box">
<h3><?php echo Kohana::lang('ui_main.alert_has_been');?> <?php echo $form_action; ?>!</h3>
</div>
<?php endif; ?>
<!-- report-table -->
<div class="report-form">
<?php print form::open(NULL,array('id' => 'alertMain', 'name' => 'alertMain')); ?>
<input type="hidden" name="action" id="action" value="">
<input type="hidden" name="alert_id[]" id="alert_single" value="">
<div class="table-holder">
<table class="table">
<thead>
<tr>
<th class="col-1"><input id="checkallalerts" type="checkbox" class="check-box" onclick="CheckAll( this.id, 'alert_id[]' )" /></th>
<th class="col-2"><?php echo Kohana::lang('ui_admin.alerts');?></th>
<th class="col-3"><?php echo Kohana::lang('ui_main.sent');?></th>
<th class="col-4"><?php echo Kohana::lang('ui_main.actions');?></th>
</tr>
</thead>
<tfoot>
<tr class="foot">
<td colspan="4"><?php echo $pagination; ?></td>
</tr>
</tfoot>
<tbody>
<?php if ($total_items == 0): ?>
<tr>
<td colspan="4" class="col">
<h3><?php echo Kohana::lang('ui_main.no_results');?></h3>
</td>
</tr>
<?php endif; ?>
<?php
foreach ($alerts as $alert)
{?>
<tr>
<td class="col-1"><input name="alert_id[]" id="alert" value="<?php echo $alert->id; ?>" type="checkbox" class="check-box"/></td>
<td class="col-2">
<div class="post">
<h4><?php echo $alert->alert_recipient; ?></h4>
</div>
<ul class="info">
<li class="none-separator">
<?php echo Kohana::lang('ui_main.location');?>:
<strong><?php echo $alert->alert_lat.','.$alert->alert_lon; ?></strong>
</li>
<li class="none-separator">
<?php echo Kohana::lang('ui_main.radius');?>:
<strong><?php echo $alert->alert_radius; ?></strong>
</li>
</ul>
</td>
<td><?php echo $alert->alert_sent->count(); ?></td>
<td class="col-4">
<ul>
<li class="none-separator"><a href="javascript:alertAction('d','DELETE','<?php echo(rawurlencode($alert->id)); ?>')" class="del"><?php echo Kohana::lang('ui_main.delete');?></a></li>
</ul>
</td>
</tr>
<?php
}
?>
</tbody>
</table>
</div>
<?php print form::close(); ?>
</div>
</div>
| {
"pile_set_name": "Github"
} |
/*
* Copyright (C) 2019 Red Hat, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy ofthe License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specificlanguage governing permissions and
* limitations under the License.
*
*/
package sflow
import (
"reflect"
"testing"
)
func TestSplit(t *testing.T) {
sm := &SFMetric{
Last: 200,
IfMetric: IfMetric{
IfInOctets: 200,
IfInUcastPkts: 200,
IfInMulticastPkts: 200,
IfInBroadcastPkts: 200,
IfInDiscards: 200,
IfInErrors: 200,
IfInUnknownProtos: 200,
IfOutOctets: 200,
IfOutUcastPkts: 200,
IfOutMulticastPkts: 200,
IfOutBroadcastPkts: 200,
IfOutDiscards: 200,
IfOutErrors: 200,
},
OvsMetric: OvsMetric{
OvsDpNHit: 200,
OvsDpNMissed: 200,
OvsDpNLost: 200,
OvsDpNMaskHit: 200,
OvsDpNFlows: 200,
OvsDpNMasks: 200,
OvsAppFdOpen: 200,
OvsAppFdMax: 200,
OvsAppConnOpen: 200,
OvsAppConnMax: 200,
OvsAppMemUsed: 200,
OvsAppMemMax: 200,
},
VlanMetric: VlanMetric{
VlanOctets: 200,
VlanUcastPkts: 200,
VlanMulticastPkts: 200,
VlanBroadcastPkts: 200,
VlanDiscards: 200,
},
EthMetric: EthMetric{
EthAlignmentErrors: 200,
EthFCSErrors: 200,
EthSingleCollisionFrames: 200,
EthMultipleCollisionFrames: 200,
EthSQETestErrors: 200,
EthDeferredTransmissions: 200,
EthLateCollisions: 200,
EthExcessiveCollisions: 200,
EthInternalMacReceiveErrors: 200,
EthInternalMacTransmitErrors: 200,
EthCarrierSenseErrors: 200,
EthFrameTooLongs: 200,
EthSymbolErrors: 200,
},
}
sm1, sm2 := sm.Split(100)
expected := &SFMetric{
Start: 0,
Last: 100,
IfMetric: IfMetric{
IfInOctets: 100,
IfInUcastPkts: 100,
IfInMulticastPkts: 100,
IfInBroadcastPkts: 100,
IfInDiscards: 100,
IfInErrors: 100,
IfInUnknownProtos: 100,
IfOutOctets: 100,
IfOutUcastPkts: 100,
IfOutMulticastPkts: 100,
IfOutBroadcastPkts: 100,
IfOutDiscards: 100,
IfOutErrors: 100,
},
OvsMetric: OvsMetric{
OvsDpNHit: 100,
OvsDpNMissed: 100,
OvsDpNLost: 100,
OvsDpNMaskHit: 100,
OvsDpNFlows: 100,
OvsDpNMasks: 100,
OvsAppFdOpen: 100,
OvsAppFdMax: 100,
OvsAppConnOpen: 100,
OvsAppConnMax: 100,
OvsAppMemUsed: 100,
OvsAppMemMax: 100,
},
VlanMetric: VlanMetric{
VlanOctets: 100,
VlanUcastPkts: 100,
VlanMulticastPkts: 100,
VlanBroadcastPkts: 100,
VlanDiscards: 100,
},
EthMetric: EthMetric{
EthAlignmentErrors: 100,
EthFCSErrors: 100,
EthSingleCollisionFrames: 100,
EthMultipleCollisionFrames: 100,
EthSQETestErrors: 100,
EthDeferredTransmissions: 100,
EthLateCollisions: 100,
EthExcessiveCollisions: 100,
EthInternalMacReceiveErrors: 100,
EthInternalMacTransmitErrors: 100,
EthCarrierSenseErrors: 100,
EthFrameTooLongs: 100,
EthSymbolErrors: 100,
},
}
if !reflect.DeepEqual(expected, sm1) {
t.Errorf("Slice 1 error, expected %+v, got %+v", expected, sm1)
}
expected = &SFMetric{
Start: 100,
Last: 200,
IfMetric: IfMetric{
IfInOctets: 100,
IfInUcastPkts: 100,
IfInMulticastPkts: 100,
IfInBroadcastPkts: 100,
IfInDiscards: 100,
IfInErrors: 100,
IfInUnknownProtos: 100,
IfOutOctets: 100,
IfOutUcastPkts: 100,
IfOutMulticastPkts: 100,
IfOutBroadcastPkts: 100,
IfOutDiscards: 100,
IfOutErrors: 100,
},
OvsMetric: OvsMetric{
OvsDpNHit: 100,
OvsDpNMissed: 100,
OvsDpNLost: 100,
OvsDpNMaskHit: 100,
OvsDpNFlows: 100,
OvsDpNMasks: 100,
OvsAppFdOpen: 100,
OvsAppFdMax: 100,
OvsAppConnOpen: 100,
OvsAppConnMax: 100,
OvsAppMemUsed: 100,
OvsAppMemMax: 100,
},
VlanMetric: VlanMetric{
VlanOctets: 100,
VlanUcastPkts: 100,
VlanMulticastPkts: 100,
VlanBroadcastPkts: 100,
VlanDiscards: 100,
},
EthMetric: EthMetric{
EthAlignmentErrors: 100,
EthFCSErrors: 100,
EthSingleCollisionFrames: 100,
EthMultipleCollisionFrames: 100,
EthSQETestErrors: 100,
EthDeferredTransmissions: 100,
EthLateCollisions: 100,
EthExcessiveCollisions: 100,
EthInternalMacReceiveErrors: 100,
EthInternalMacTransmitErrors: 100,
EthCarrierSenseErrors: 100,
EthFrameTooLongs: 100,
EthSymbolErrors: 100,
},
}
if !reflect.DeepEqual(expected, sm2) {
t.Errorf("Slice 2 error, expected %+v, got %+v", expected, sm2)
}
}
| {
"pile_set_name": "Github"
} |
__all__ = ('urlpatterns',)
urlpatterns = []
| {
"pile_set_name": "Github"
} |
while true; do
ls -l
sleep 5
done
| {
"pile_set_name": "Github"
} |
/*
* Copyright (C) 2013-2018 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "config.h"
#include "DFGOSRExitBase.h"
#if ENABLE(DFG_JIT)
#include "CodeBlock.h"
#include "DFGBasicBlock.h"
#include "DFGNode.h"
#include "InlineCallFrame.h"
#include "JSCInlines.h"
namespace JSC { namespace DFG {
void OSRExitBase::considerAddingAsFrequentExitSiteSlow(CodeBlock* profiledCodeBlock, ExitingJITType jitType)
{
CodeBlock* sourceProfiledCodeBlock =
baselineCodeBlockForOriginAndBaselineCodeBlock(
m_codeOriginForExitProfile, profiledCodeBlock);
if (sourceProfiledCodeBlock) {
ExitingInlineKind inlineKind;
if (m_codeOriginForExitProfile.inlineCallFrame())
inlineKind = ExitFromInlined;
else
inlineKind = ExitFromNotInlined;
FrequentExitSite site;
if (m_wasHoisted)
site = FrequentExitSite(HoistingFailed, jitType, inlineKind);
else
site = FrequentExitSite(m_codeOriginForExitProfile.bytecodeIndex(), m_kind, jitType, inlineKind);
ExitProfile::add(sourceProfiledCodeBlock, site);
}
}
} } // namespace JSC::DFG
#endif // ENABLE(DFG_JIT)
| {
"pile_set_name": "Github"
} |
#!/usr/bin/env python3
# Copyright (c) 2014-2019 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test mining RPCs
- getmininginfo
- getblocktemplate proposal mode
- submitblock"""
import copy
from decimal import Decimal
from test_framework.blocktools import (
create_coinbase,
TIME_GENESIS_BLOCK,
)
from test_framework.messages import (
CBlock,
CBlockHeader,
BLOCK_HEADER_SIZE
)
from test_framework.mininode import (
P2PDataStore,
)
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
bytes_to_hex_str as b2x,
connect_nodes_bi,
)
from test_framework.script import CScriptNum
def assert_template(node, block, expect, rehash=True):
if rehash:
block.hashMerkleRoot = block.calc_merkle_root()
rsp = node.getblocktemplate(template_request={'data': b2x(block.serialize()), 'mode': 'proposal', 'rules': ['segwit']})
assert_equal(rsp, expect)
class MiningTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
self.setup_clean_chain = True
def mine_chain(self):
self.log.info('Create some old blocks')
for t in range(TIME_GENESIS_BLOCK, TIME_GENESIS_BLOCK + 200 * 600, 600):
self.nodes[0].setmocktime(t)
self.nodes[0].generate(1)
mining_info = self.nodes[0].getmininginfo()
assert_equal(mining_info['blocks'], 200)
assert_equal(mining_info['currentblocktx'], 0)
assert_equal(mining_info['currentblockweight'], 4000)
self.restart_node(0)
connect_nodes_bi(self.nodes, 0, 1)
def run_test(self):
self.mine_chain()
node = self.nodes[0]
def assert_submitblock(block, result_str_1, result_str_2=None):
block.solve()
result_str_2 = result_str_2 or 'duplicate-invalid'
assert_equal(result_str_1, node.submitblock(hexdata=b2x(block.serialize())))
assert_equal(result_str_2, node.submitblock(hexdata=b2x(block.serialize())))
self.log.info('getmininginfo')
mining_info = node.getmininginfo()
assert_equal(mining_info['blocks'], 200)
assert_equal(mining_info['chain'], 'regtest')
assert 'currentblocktx' not in mining_info
assert 'currentblockweight' not in mining_info
assert_equal(mining_info['difficulty'], Decimal('4.656542373906925E-10'))
assert_equal(mining_info['networkhashps'], Decimal('0.003333333333333334'))
assert_equal(mining_info['pooledtx'], 0)
# Mine a block to leave initial block download
node.generatetoaddress(1, node.get_deterministic_priv_key().address)
tmpl = node.getblocktemplate({'rules': ['segwit']})
self.log.info("getblocktemplate: Test capability advertised")
assert 'proposal' in tmpl['capabilities']
assert 'coinbasetxn' not in tmpl
next_height = int(tmpl["height"])
coinbase_tx = create_coinbase(height=next_height)
# sequence numbers must not be max for nLockTime to have effect
coinbase_tx.vin[0].nSequence = 2 ** 32 - 2
coinbase_tx.rehash()
# round-trip the encoded bip34 block height commitment
assert_equal(CScriptNum.decode(coinbase_tx.vin[0].scriptSig), next_height)
# round-trip negative and multi-byte CScriptNums to catch python regression
assert_equal(CScriptNum.decode(CScriptNum.encode(CScriptNum(1500))), 1500)
assert_equal(CScriptNum.decode(CScriptNum.encode(CScriptNum(-1500))), -1500)
assert_equal(CScriptNum.decode(CScriptNum.encode(CScriptNum(-1))), -1)
block = CBlock()
block.nVersion = tmpl["version"]
block.hashPrevBlock = int(tmpl["previousblockhash"], 16)
block.nTime = tmpl["curtime"]
block.nBits = int(tmpl["bits"], 16)
block.nNonce = 0
block.vtx = [coinbase_tx]
self.log.info("getblocktemplate: segwit rule must be set")
assert_raises_rpc_error(-8, "getblocktemplate must be called with the segwit rule set", node.getblocktemplate)
self.log.info("getblocktemplate: Test valid block")
assert_template(node, block, None)
self.log.info("submitblock: Test block decode failure")
assert_raises_rpc_error(-22, "Block decode failed", node.submitblock, b2x(block.serialize()[:-15]))
self.log.info("getblocktemplate: Test bad input hash for coinbase transaction")
bad_block = copy.deepcopy(block)
bad_block.vtx[0].vin[0].prevout.hash += 1
bad_block.vtx[0].rehash()
assert_template(node, bad_block, 'bad-cb-missing')
self.log.info("submitblock: Test invalid coinbase transaction")
assert_raises_rpc_error(-22, "Block does not start with a coinbase", node.submitblock, b2x(bad_block.serialize()))
self.log.info("getblocktemplate: Test truncated final transaction")
assert_raises_rpc_error(-22, "Block decode failed", node.getblocktemplate, {'data': b2x(block.serialize()[:-1]), 'mode': 'proposal', 'rules': ['segwit']})
self.log.info("getblocktemplate: Test duplicate transaction")
bad_block = copy.deepcopy(block)
bad_block.vtx.append(bad_block.vtx[0])
assert_template(node, bad_block, 'bad-txns-duplicate')
assert_submitblock(bad_block, 'bad-txns-duplicate', 'bad-txns-duplicate')
self.log.info("getblocktemplate: Test invalid transaction")
bad_block = copy.deepcopy(block)
bad_tx = copy.deepcopy(bad_block.vtx[0])
bad_tx.vin[0].prevout.hash = 255
bad_tx.rehash()
bad_block.vtx.append(bad_tx)
assert_template(node, bad_block, 'bad-txns-inputs-missingorspent')
assert_submitblock(bad_block, 'bad-txns-inputs-missingorspent')
self.log.info("getblocktemplate: Test nonfinal transaction")
bad_block = copy.deepcopy(block)
bad_block.vtx[0].nLockTime = 2 ** 32 - 1
bad_block.vtx[0].rehash()
assert_template(node, bad_block, 'bad-txns-nonfinal')
assert_submitblock(bad_block, 'bad-txns-nonfinal')
self.log.info("getblocktemplate: Test bad tx count")
# The tx count is immediately after the block header
bad_block_sn = bytearray(block.serialize())
assert_equal(bad_block_sn[BLOCK_HEADER_SIZE], 1)
bad_block_sn[BLOCK_HEADER_SIZE] += 1
assert_raises_rpc_error(-22, "Block decode failed", node.getblocktemplate, {'data': b2x(bad_block_sn), 'mode': 'proposal', 'rules': ['segwit']})
self.log.info("getblocktemplate: Test bad bits")
bad_block = copy.deepcopy(block)
bad_block.nBits = 469762303 # impossible in the real world
assert_template(node, bad_block, 'bad-diffbits')
self.log.info("getblocktemplate: Test bad merkle root")
bad_block = copy.deepcopy(block)
bad_block.hashMerkleRoot += 1
assert_template(node, bad_block, 'bad-txnmrklroot', False)
assert_submitblock(bad_block, 'bad-txnmrklroot', 'bad-txnmrklroot')
self.log.info("getblocktemplate: Test bad timestamps")
bad_block = copy.deepcopy(block)
bad_block.nTime = 2 ** 31 - 1
assert_template(node, bad_block, 'time-too-new')
assert_submitblock(bad_block, 'time-too-new', 'time-too-new')
bad_block.nTime = 0
assert_template(node, bad_block, 'time-too-old')
assert_submitblock(bad_block, 'time-too-old', 'time-too-old')
self.log.info("getblocktemplate: Test not best block")
bad_block = copy.deepcopy(block)
bad_block.hashPrevBlock = 123
assert_template(node, bad_block, 'inconclusive-not-best-prevblk')
assert_submitblock(bad_block, 'prev-blk-not-found', 'prev-blk-not-found')
self.log.info('submitheader tests')
assert_raises_rpc_error(-22, 'Block header decode failed', lambda: node.submitheader(hexdata='xx' * BLOCK_HEADER_SIZE))
assert_raises_rpc_error(-22, 'Block header decode failed', lambda: node.submitheader(hexdata='ff' * (BLOCK_HEADER_SIZE-2)))
assert_raises_rpc_error(-25, 'Must submit previous header', lambda: node.submitheader(hexdata=b2x(super(CBlock, bad_block).serialize())))
block.nTime += 1
block.solve()
def chain_tip(b_hash, *, status='headers-only', branchlen=1):
return {'hash': b_hash, 'height': 202, 'branchlen': branchlen, 'status': status}
assert chain_tip(block.hash) not in node.getchaintips()
node.submitheader(hexdata=b2x(block.serialize()))
assert chain_tip(block.hash) in node.getchaintips()
node.submitheader(hexdata=b2x(CBlockHeader(block).serialize())) # Noop
assert chain_tip(block.hash) in node.getchaintips()
bad_block_root = copy.deepcopy(block)
bad_block_root.hashMerkleRoot += 2
bad_block_root.solve()
assert chain_tip(bad_block_root.hash) not in node.getchaintips()
node.submitheader(hexdata=b2x(CBlockHeader(bad_block_root).serialize()))
assert chain_tip(bad_block_root.hash) in node.getchaintips()
# Should still reject invalid blocks, even if we have the header:
assert_equal(node.submitblock(hexdata=b2x(bad_block_root.serialize())), 'bad-txnmrklroot')
assert_equal(node.submitblock(hexdata=b2x(bad_block_root.serialize())), 'bad-txnmrklroot')
assert chain_tip(bad_block_root.hash) in node.getchaintips()
# We know the header for this invalid block, so should just return early without error:
node.submitheader(hexdata=b2x(CBlockHeader(bad_block_root).serialize()))
assert chain_tip(bad_block_root.hash) in node.getchaintips()
bad_block_lock = copy.deepcopy(block)
bad_block_lock.vtx[0].nLockTime = 2**32 - 1
bad_block_lock.vtx[0].rehash()
bad_block_lock.hashMerkleRoot = bad_block_lock.calc_merkle_root()
bad_block_lock.solve()
assert_equal(node.submitblock(hexdata=b2x(bad_block_lock.serialize())), 'bad-txns-nonfinal')
assert_equal(node.submitblock(hexdata=b2x(bad_block_lock.serialize())), 'duplicate-invalid')
# Build a "good" block on top of the submitted bad block
bad_block2 = copy.deepcopy(block)
bad_block2.hashPrevBlock = bad_block_lock.sha256
bad_block2.solve()
assert_raises_rpc_error(-25, 'bad-prevblk', lambda: node.submitheader(hexdata=b2x(CBlockHeader(bad_block2).serialize())))
# Should reject invalid header right away
bad_block_time = copy.deepcopy(block)
bad_block_time.nTime = 1
bad_block_time.solve()
assert_raises_rpc_error(-25, 'time-too-old', lambda: node.submitheader(hexdata=b2x(CBlockHeader(bad_block_time).serialize())))
# Should ask for the block from a p2p node, if they announce the header as well:
node.add_p2p_connection(P2PDataStore())
node.p2p.wait_for_getheaders(timeout=5) # Drop the first getheaders
node.p2p.send_blocks_and_test(blocks=[block], node=node)
# Must be active now:
assert chain_tip(block.hash, status='active', branchlen=0) in node.getchaintips()
# Building a few blocks should give the same results
node.generatetoaddress(10, node.get_deterministic_priv_key().address)
assert_raises_rpc_error(-25, 'time-too-old', lambda: node.submitheader(hexdata=b2x(CBlockHeader(bad_block_time).serialize())))
assert_raises_rpc_error(-25, 'bad-prevblk', lambda: node.submitheader(hexdata=b2x(CBlockHeader(bad_block2).serialize())))
node.submitheader(hexdata=b2x(CBlockHeader(block).serialize()))
node.submitheader(hexdata=b2x(CBlockHeader(bad_block_root).serialize()))
assert_equal(node.submitblock(hexdata=b2x(block.serialize())), 'duplicate') # valid
if __name__ == '__main__':
MiningTest().main()
| {
"pile_set_name": "Github"
} |
package one.mixin.android.ui.landing
import android.os.Bundle
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import androidx.fragment.app.viewModels
import dagger.hilt.android.AndroidEntryPoint
import kotlinx.android.synthetic.main.fragment_time.*
import kotlinx.coroutines.Job
import one.mixin.android.Constants
import one.mixin.android.R
import one.mixin.android.extension.defaultSharedPreferences
import one.mixin.android.extension.putBoolean
import one.mixin.android.extension.shaking
import one.mixin.android.ui.common.BaseFragment
import one.mixin.android.ui.home.MainActivity
import one.mixin.android.util.ErrorHandler
@AndroidEntryPoint
class TimeFragment : BaseFragment() {
companion object {
const val TAG: String = "TimeFragment"
fun newInstance() = TimeFragment()
}
override fun onCreateView(inflater: LayoutInflater, container: ViewGroup?, savedInstanceState: Bundle?): View? =
inflater.inflate(R.layout.fragment_time, container, false)
private val loadingViewModel by viewModels<LoadingViewModel>()
override fun onResume() {
super.onResume()
checkTime()
}
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
super.onViewCreated(view, savedInstanceState)
continue_tv.setOnClickListener {
checkTime()
}
}
private var currentJob: Job? = null
private fun checkTime() {
if (currentJob == null || currentJob?.isActive == false) {
everybody_pb.visibility = View.VISIBLE
continue_tv.visibility = View.INVISIBLE
currentJob = loadingViewModel.pingServer(
{
if (isAdded) {
everybody_pb.visibility = View.INVISIBLE
continue_tv.visibility = View.VISIBLE
defaultSharedPreferences.putBoolean(Constants.Account.PREF_WRONG_TIME, false)
MainActivity.show(requireContext())
activity?.finish()
}
},
{ exception ->
if (isAdded) {
everybody_pb.visibility = View.INVISIBLE
continue_tv.visibility = View.VISIBLE
if (exception == null) {
info.shaking()
} else {
ErrorHandler.handleError(exception)
}
}
}
)
}
}
}
| {
"pile_set_name": "Github"
} |
/// Macro used to extract a parameter from a `QueryMap`.
///
/// Will return from the caller with a `BrokerError::Input` if
/// the parameter is missing and has no default.
///
/// ```
/// let foo = try_get_input_param!(params, "foo");
/// let foo = try_get_input_param!(params, "foo", "default");
/// ```
#[macro_export]
macro_rules! try_get_input_param {
( $params:expr , $key:tt ) => {
$params.remove($key).ok_or_else(|| {
crate::error::BrokerError::Input(concat!("missing request parameter ", $key).to_owned())
})?
};
( $params:expr , $key:tt , $default:expr ) => {
$params.remove($key).unwrap_or($default)
};
}
/// Macro used to extract a parameter from a `QueryMap`.
///
/// Will return from the caller with a `BrokerError::ProviderInput` if
/// the parameter is missing.
///
/// ```
/// let foo = try_get_provider_param!(params, "foo");
/// ```
#[macro_export]
macro_rules! try_get_provider_param {
( $params:expr , $key:tt ) => {
$params.remove($key).ok_or_else(|| {
crate::error::BrokerError::ProviderInput(
concat!("missing request parameter ", $key).to_owned(),
)
})?
};
}
/// Macro used to extract a typed field from a JSON Value.
///
/// Will return from the caller with a `BrokerError` if the field is missing or its value is an
/// incompatible type. `descr` is used to format the error message.
///
/// ```
/// let foo = try_get_token_field!(value, "foo", "example document");
/// ```
macro_rules! try_get_token_field {
( $input:expr, $key:tt, $conv:expr, $descr:expr ) => {
$input.get($key).and_then($conv).ok_or_else(|| {
crate::error::BrokerError::ProviderInput(format!("{} missing from {}", $key, $descr))
})?
};
( $input:expr, $key:tt, $descr:expr ) => {
try_get_token_field!($input, $key, serde_json::Value::as_str, $descr)
};
}
/// Macro used to verify a token payload field.
///
/// Will return from the caller with a `BrokerError` if the check fails. The `$key` and `$descr`
/// parameters are used in the error description.
///
/// ```
/// check_token_field!(foo == "bar", "foo", "example document");
/// ```
macro_rules! check_token_field {
( $check:expr, $key:expr, $descr:expr ) => {
if !$check {
return Err(crate::error::BrokerError::ProviderInput(format!(
"{} has incorrect value in {}",
$key, $descr
)));
}
};
}
/// Implements `Serialize` for a type that is `Display`.
#[macro_export]
macro_rules! serde_display {
( $type:ty ) => {
impl serde::Serialize for $type {
fn serialize<S: serde::Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
serializer.collect_str(self)
}
}
};
}
/// Implements `Deserialize` for a type that is `FromStr`.
#[macro_export]
macro_rules! serde_from_str {
( $type:ty ) => {
impl<'de> serde::Deserialize<'de> for $type {
fn deserialize<D: serde::Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
<String as serde::Deserialize>::deserialize(deserializer)?
.parse()
.map_err(serde::de::Error::custom)
}
}
};
}
| {
"pile_set_name": "Github"
} |
"""
AppVeyor will at least have few Pythons around so there's no point of implementing a bootstrapper in PowerShell.
This is a port of https://github.com/pypa/python-packaging-user-guide/blob/master/source/code/install.ps1
with various fixes and improvements that just weren't feasible to implement in PowerShell.
"""
from __future__ import print_function
from os import environ
from os.path import exists
from subprocess import check_call
try:
from urllib.request import urlretrieve
except ImportError:
from urllib import urlretrieve
BASE_URL = "https://www.python.org/ftp/python/"
GET_PIP_URL = "https://bootstrap.pypa.io/get-pip.py"
GET_PIP_PATH = "C:\get-pip.py"
URLS = {
("2.7", "64"): BASE_URL + "2.7.13/python-2.7.13.amd64.msi",
("2.7", "32"): BASE_URL + "2.7.13/python-2.7.13.msi",
("3.4", "64"): BASE_URL + "3.4.4/python-3.4.4.amd64.msi",
("3.4", "32"): BASE_URL + "3.4.4/python-3.4.4.msi",
("3.5", "64"): BASE_URL + "3.5.4/python-3.5.4-amd64.exe",
("3.5", "32"): BASE_URL + "3.5.4/python-3.5.4.exe",
("3.6", "64"): BASE_URL + "3.6.2/python-3.6.2-amd64.exe",
("3.6", "32"): BASE_URL + "3.6.2/python-3.6.2.exe",
}
INSTALL_CMD = {
# Commands are allowed to fail only if they are not the last command. Eg: uninstall (/x) allowed to fail.
"2.7": [
["msiexec.exe", "/L*+!", "install.log", "/qn", "/x", "{path}"],
[
"msiexec.exe",
"/L*+!",
"install.log",
"/qn",
"/i",
"{path}",
"TARGETDIR={home}",
],
],
"3.4": [
["msiexec.exe", "/L*+!", "install.log", "/qn", "/x", "{path}"],
[
"msiexec.exe",
"/L*+!",
"install.log",
"/qn",
"/i",
"{path}",
"TARGETDIR={home}",
],
],
"3.5": [["{path}", "/quiet", "TargetDir={home}"]],
"3.6": [["{path}", "/quiet", "TargetDir={home}"]],
}
def download_file(url, path):
print("Downloading: {} (into {})".format(url, path))
progress = [0, 0]
def report(count, size, total):
progress[0] = count * size
if progress[0] - progress[1] > 1000000:
progress[1] = progress[0]
print("Downloaded {:,}/{:,} ...".format(progress[1], total))
dest, _ = urlretrieve(url, path, reporthook=report)
return dest
def install_python(version, arch, home):
print("Installing Python", version, "for", arch, "bit architecture to", home)
if exists(home):
return
path = download_python(version, arch)
print("Installing", path, "to", home)
success = False
for cmd in INSTALL_CMD[version]:
cmd = [part.format(home=home, path=path) for part in cmd]
print("Running:", " ".join(cmd))
try:
check_call(cmd)
except Exception as exc:
print("Failed command", cmd, "with:", exc)
if exists("install.log"):
with open("install.log") as fh:
print(fh.read())
else:
success = True
if success:
print("Installation complete!")
else:
print("Installation failed")
def download_python(version, arch):
for _ in range(3):
try:
return download_file(URLS[version, arch], "installer.exe")
except Exception as exc:
print("Failed to download:", exc)
print("Retrying ...")
def install_pip(home):
pip_path = home + "/Scripts/pip.exe"
python_path = home + "/python.exe"
if exists(pip_path):
print("pip already installed.")
else:
print("Installing pip...")
download_file(GET_PIP_URL, GET_PIP_PATH)
print("Executing:", python_path, GET_PIP_PATH)
check_call([python_path, GET_PIP_PATH])
def install_packages(home, *packages):
cmd = [home + "/Scripts/pip.exe", "install"]
cmd.extend(packages)
check_call(cmd)
if __name__ == "__main__":
install_python(
environ["PYTHON_VERSION"], environ["PYTHON_ARCH"], environ["PYTHON_HOME"]
)
install_pip(environ["PYTHON_HOME"])
install_packages(
environ["PYTHON_HOME"],
"setuptools>=18.0.1",
"wheel",
"tox",
"virtualenv>=13.1.0",
)
| {
"pile_set_name": "Github"
} |
/*
* Copyright (c) 1997, 2018 Oracle and/or its affiliates. All rights reserved.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v. 2.0, which is available at
* http://www.eclipse.org/legal/epl-2.0.
*
* This Source Code may also be made available under the following Secondary
* Licenses when the conditions for such availability set forth in the
* Eclipse Public License v. 2.0 are satisfied: GNU General Public License,
* version 2 with the GNU Classpath Exception, which is available at
* https://www.gnu.org/software/classpath/license.html.
*
* SPDX-License-Identifier: EPL-2.0 OR GPL-2.0 WITH Classpath-exception-2.0
*/
package org.glassfish.ejb.deployment.descriptor;
import java.util.logging.Level;
import com.sun.enterprise.deployment.util.DOLUtils;
/**
* A dummy implementation of the EjbDescriptor
*
*/
public class DummyEjbDescriptor extends EjbDescriptor
{
public DummyEjbDescriptor() {
}
@Override
public void setTransactionType(String transactionType) {
DOLUtils.getDefaultLogger().log(Level.WARNING, "enterprise.deployment_dummy_set_trans_type", new Object[] {getName()});
}
@Override
public String getContainerFactoryQualifier() {
throw new UnsupportedOperationException();
}
@Override
public String getType() {
return "Dummy";
}
@Override
public void setType(String type) {
DOLUtils.getDefaultLogger().log(Level.WARNING, "enterprise.deployment_dummy_set_type", new Object[] {getName()});
}
@Override
public String getEjbTypeForDisplay() {
return "Dummy";
}
}
| {
"pile_set_name": "Github"
} |
# dbtester
[](https://travis-ci.com/etcd-io/dbtester) [](https://godoc.org/github.com/etcd-io/dbtester)
Distributed database benchmark tester: etcd, Zookeeper, Consul, zetcd, cetcd
It includes github.com/golang/freetype, which is based in part on the work of the FreeType Team.
<br><br><hr>
##### Performance Analysis
- Latest test results can be found at https://github.com/etcd-io/dbtester/tree/master/test-results
- Exploring Performance of etcd, Zookeeper and Consul Consistent Key-value Datastores (February 17, 2017)
- https://coreos.com/blog/performance-of-etcd.html
<br><br><hr>
##### Project

- Database Agent
- https://github.com/etcd-io/dbtester/tree/master/agent
- Database Client
- https://github.com/etcd-io/dbtester/tree/master/control
- System Metrics
- https://github.com/gyuho/linux-inspect
- Test Data Analysis
- https://github.com/etcd-io/dbtester/tree/master/analyze
- https://github.com/gyuho/dataframe
- https://github.com/gonum/plot
For etcd, we recommend [etcd benchmark tool](https://github.com/coreos/etcd/tree/master/tools/benchmark).
All logs and results can be found at https://github.com/etcd-io/dbtester/tree/master/test-results or https://console.cloud.google.com/storage/browser/dbtester-results/?authuser=0&project=etcd-development.
<br><br><hr>
##### Noticeable Warnings: Zookeeper
Snapshot, when writing 1-million entries (256-byte key, 1KB value value), with 500 concurrent clients
```
# snapshot warnings
cd 2017Q1-00-etcd-zookeeper-consul/02-write-1M-keys-best-throughput
grep -r -i fsync-ing\ the zookeeper-r3.4.9-java8-* | less
2017-02-10 18:55:38,997 [myid:3] - WARN [SyncThread:3:SyncRequestProcessor@148] - Too busy to snap, skipping
2017-02-10 18:55:38,998 [myid:3] - INFO [SyncThread:3:FileTxnLog@203] - Creating new log file: log.1000c0c51
2017-02-10 18:55:40,855 [myid:3] - INFO [SyncThread:3:FileTxnLog@203] - Creating new log file: log.1000cd2e6
2017-02-10 18:55:40,855 [myid:3] - INFO [Snapshot Thread:FileTxnSnapLog@240] - Snapshotting: 0x1000cd1ca to /home/gyuho/zookeeper/zookeeper.data/version-2/snapshot.1000cd1ca
2017-02-10 18:55:46,382 [myid:3] - WARN [SyncThread:3:FileTxnLog@338] - fsync-ing the write ahead log in SyncThread:3 took 1062ms which will adversely effect operation latency. See the ZooKeeper troubleshooting guide
2017-02-10 18:55:47,471 [myid:3] - WARN [SyncThread:3:FileTxnLog@338] - fsync-ing the write ahead log in SyncThread:3 took 1084ms which will adversely effect operation latency. See the ZooKeeper troubleshooting guide
2017-02-10 18:55:49,425 [myid:3] - WARN [SyncThread:3:FileTxnLog@338] - fsync-ing the write ahead log in SyncThread:3 took 1142ms which will adversely effect operation latency. See the ZooKeeper troubleshooting guide
2017-02-10 18:55:51,188 [myid:3] - WARN [SyncThread:3:FileTxnLog@338] - fsync-ing the write ahead log in SyncThread:3 took 1201ms which will adversely effect operation latency. See the ZooKeeper troubleshooting guide
2017-02-10 18:55:52,292 [myid:3] - WARN [SyncThread:3:FileTxnLog@338] - fsync-ing the write ahead log in SyncThread:3 took 1102ms which will adversely effect operation latency. See the ZooKeeper troubleshooting guide
```
When writing more than 2-million entries (256-byte key, 1KB value value) with 500 concurrent clients
```
# leader election
cd 2017Q1-00-etcd-zookeeper-consul/04-write-too-many-keys
grep -r -i election\ took zookeeper-r3.4.9-java8-* | less
# leader election is taking more than 10 seconds...
zookeeper-r3.4.9-java8-2-database.log:2017-02-10 19:22:16,549 [myid:2] - INFO [QuorumPeer[myid=2]/0:0:0:0:0:0:0:0:2181:Follower@61] - FOLLOWING - LEADER ELECTION TOOK - 22978
zookeeper-r3.4.9-java8-2-database.log:2017-02-10 19:23:02,279 [myid:2] - INFO [QuorumPeer[myid=2]/0:0:0:0:0:0:0:0:2181:Leader@361] - LEADING - LEADER ELECTION TOOK - 10210
zookeeper-r3.4.9-java8-2-database.log:2017-02-10 19:23:14,498 [myid:2] - INFO [QuorumPeer[myid=2]/0:0:0:0:0:0:0:0:2181:Leader@361] - LEADING - LEADER ELECTION TOOK - 203
zookeeper-r3.4.9-java8-2-database.log:2017-02-10 19:23:36,303 [myid:2] - INFO [QuorumPeer[myid=2]/0:0:0:0:0:0:0:0:2181:Leader@361] - LEADING - LEADER ELECTION TOOK - 9791
zookeeper-r3.4.9-java8-2-database.log:2017-02-10 19:23:52,151 [myid:2] - INFO [QuorumPeer[myid=2]/0:0:0:0:0:0:0:0:2181:Leader@361] - LEADING - LEADER ELECTION TOOK - 3836
zookeeper-r3.4.9-java8-2-database.log:2017-02-10 19:24:13,849 [myid:2] - INFO [QuorumPeer[myid=2]/0:0:0:0:0:0:0:0:2181:Leader@361] - LEADING - LEADER ELECTION TOOK - 9686
zookeeper-r3.4.9-java8-2-database.log:2017-02-10 19:24:29,694 [myid:2] - INFO [QuorumPeer[myid=2]/0:0:0:0:0:0:0:0:2181:Leader@361] - LEADING - LEADER ELECTION TOOK - 3573
zookeeper-r3.4.9-java8-2-database.log:2017-02-10 19:24:51,392 [myid:2] - INFO [QuorumPeer[myid=2]/0:0:0:0:0:0:0:0:2181:Leader@361] - LEADING - LEADER ELECTION TOOK - 8686
zookeeper-r3.4.9-java8-2-database.log:2017-02-10 19:25:07,231 [myid:2] - INFO [QuorumPeer[myid=2]/0:0:0:0:0:0:0:0:2181:Leader@361] - LEADING - LEADER ELECTION TOOK - 3827
zookeeper-r3.4.9-java8-2-database.log:2017-02-10 19:25:28,940 [myid:2] - INFO [QuorumPeer[myid=2]/0:0:0:0:0:0:0:0:2181:Leader@361] - LEADING - LEADER ELECTION TOOK - 9697
zookeeper-r3.4.9-java8-2-database.log:2017-02-10 19:25:44,772 [myid:2] - INFO [QuorumPeer[myid=2]/0:0:0:0:0:0:0:0:2181:Leader@361] - LEADING - LEADER ELECTION TOOK - 3820
```
<br><br><hr>
##### Noticeable Warnings: Consul
Snapshot, when writing 1-million entries (256-byte key, 1KB value value), with 500 concurrent clients
```
# snapshot warnings
cd 2017Q1-00-etcd-zookeeper-consul/02-write-1M-keys-best-throughput
grep -r -i installed\ remote consul-v0.7.4-go1.7.5-* | less
2017/02/10 18:58:43 [INFO] snapshot: Creating new snapshot at /home/gyuho/consul.data/raft/snapshots/2-900345-1486753123478.tmp
2017/02/10 18:58:45 [INFO] snapshot: reaping snapshot /home/gyuho/consul.data/raft/snapshots/2-849399-1486753096972
2017/02/10 18:58:46 [INFO] raft: Copied 1223270573 bytes to local snapshot
2017/02/10 18:58:55 [INFO] raft: Compacting logs from 868354 to 868801
2017/02/10 18:58:56 [INFO] raft: Installed remote snapshot
2017/02/10 18:58:57 [INFO] snapshot: Creating new snapshot at /home/gyuho/consul.data/raft/snapshots/2-911546-1486753137827.tmp
2017/02/10 18:58:59 [INFO] consul.fsm: snapshot created in 32.255µs
2017/02/10 18:59:01 [INFO] snapshot: reaping snapshot /home/gyuho/consul.data/raft/snapshots/2-873921-1486753116619
2017/02/10 18:59:02 [INFO] raft: Copied 1238491373 bytes to local snapshot
2017/02/10 18:59:11 [INFO] raft: Compacting logs from 868802 to 868801
2017/02/10 18:59:11 [INFO] raft: Installed remote snapshot
```
Logs do not tell much but average latency spikes (e.g. from 70.27517 ms to 10407.900082 ms)
<img src="https://storage.googleapis.com/dbtester-results/2017Q2-02-etcd-zookeeper-consul/2017Q2-01-write-1M-cpu-client-scaling.png" alt="2017Q2-01-write-1M-cpu-client-scaling">
<img src="https://storage.googleapis.com/dbtester-results/2017Q2-02-etcd-zookeeper-consul/2017Q2-02-write-1M-network-traffic-best-throughput.png" alt="2017Q2-02-write-1M-network-traffic-best-throughput">
<img src="https://storage.googleapis.com/dbtester-results/2017Q2-02-etcd-zookeeper-consul/2017Q2-01-write-1M-throughput-client-scaling.png" alt="2017Q2-01-write-1M-throughput-client-scaling">
<img src="https://storage.googleapis.com/dbtester-results/2017Q2-02-etcd-zookeeper-consul/2017Q2-02-write-1M-latency-best-throughput.png" alt="2017Q2-02-write-1M-latency-best-throughput">
<br><br><hr>
##### Write 1M keys, 256-byte key, 1KB value, Best Throughput (etcd 1K clients with 100 conns, Zookeeper 700, Consul 500 clients)
- Google Cloud Compute Engine
- 4 machines of 16 vCPUs + 60 GB Memory + 300 GB SSD (1 for client)
- Ubuntu 17.10 (GNU/Linux kernel 4.13.0-25-generic)
- `ulimit -n` is 120000
- etcd v3.3.0 (Go 1.9.2)
- Zookeeper r3.5.3-beta
- Java 8
- javac 1.8.0_151
- Java(TM) SE Runtime Environment (build 1.8.0_151-b12)
- Java HotSpot(TM) 64-Bit Server VM (build 25.151-b12, mixed mode)
- `/usr/bin/java -Djute.maxbuffer=33554432 -Xms50G -Xmx50G`
- Consul v1.0.2 (Go 1.9.2)
```
+---------------------------------------+---------------------+-----------------------------+-----------------------+
| | etcd-v3.3.0-go1.9.2 | zookeeper-r3.5.3-beta-java8 | consul-v1.0.2-go1.9.2 |
+---------------------------------------+---------------------+-----------------------------+-----------------------+
| TOTAL-SECONDS | 28.3623 sec | 59.2167 sec | 178.9443 sec |
| TOTAL-REQUEST-NUMBER | 1,000,000 | 1,000,000 | 1,000,000 |
| MAX-THROUGHPUT | 37,330 req/sec | 25,124 req/sec | 15,865 req/sec |
| AVG-THROUGHPUT | 35,258 req/sec | 16,842 req/sec | 5,588 req/sec |
| MIN-THROUGHPUT | 13,505 req/sec | 20 req/sec | 0 req/sec |
| FASTEST-LATENCY | 4.6073 ms | 2.9094 ms | 11.6604 ms |
| AVG-LATENCY | 28.2625 ms | 30.9499 ms | 89.4351 ms |
| SLOWEST-LATENCY | 117.4918 ms | 4564.6788 ms | 4616.2947 ms |
| Latency p10 | 13.508626 ms | 9.068163 ms | 30.408863 ms |
| Latency p25 | 16.869586 ms | 9.351597 ms | 34.224021 ms |
| Latency p50 | 22.167478 ms | 10.093377 ms | 39.881181 ms |
| Latency p75 | 34.855941 ms | 14.951189 ms | 52.644787 ms |
| Latency p90 | 54.613394 ms | 28.497256 ms | 118.340402 ms |
| Latency p95 | 59.785127 ms | 72.671788 ms | 229.129526 ms |
| Latency p99 | 74.139638 ms | 273.218523 ms | 1495.660763 ms |
| Latency p99.9 | 97.385495 ms | 2526.873285 ms | 3499.225138 ms |
| SERVER-TOTAL-NETWORK-RX-DATA-SUM | 5.1 GB | 4.6 GB | 5.6 GB |
| SERVER-TOTAL-NETWORK-TX-DATA-SUM | 3.8 GB | 3.6 GB | 4.4 GB |
| CLIENT-TOTAL-NETWORK-RX-SUM | 252 MB | 357 MB | 206 MB |
| CLIENT-TOTAL-NETWORK-TX-SUM | 1.5 GB | 1.4 GB | 1.5 GB |
| SERVER-MAX-CPU-USAGE | 446.83 % | 1122.00 % | 426.33 % |
| SERVER-MAX-MEMORY-USAGE | 1.1 GB | 15 GB | 4.6 GB |
| CLIENT-MAX-CPU-USAGE | 606.00 % | 314.00 % | 215.00 % |
| CLIENT-MAX-MEMORY-USAGE | 96 MB | 2.4 GB | 86 MB |
| CLIENT-ERROR-COUNT | 0 | 2,652 | 0 |
| SERVER-AVG-READS-COMPLETED-DELTA-SUM | 0 | 237 | 2 |
| SERVER-AVG-SECTORS-READS-DELTA-SUM | 0 | 0 | 0 |
| SERVER-AVG-WRITES-COMPLETED-DELTA-SUM | 108,067 | 157,034 | 675,072 |
| SERVER-AVG-SECTORS-WRITTEN-DELTA-SUM | 20,449,360 | 16,480,488 | 106,836,768 |
| SERVER-AVG-DISK-SPACE-USAGE | 2.6 GB | 6.9 GB | 2.9 GB |
+---------------------------------------+---------------------+-----------------------------+-----------------------+
zookeeper__r3_5_3_beta errors:
"zk: connection closed" (count 2,264)
"zk: could not connect to a server" (count 388)
```
<img src="https://storage.googleapis.com/dbtester-results/2018Q1-02-etcd-zookeeper-consul/write-1M-keys-best-throughput/AVG-LATENCY-MS.svg" alt="2018Q1-02-etcd-zookeeper-consul/write-1M-keys-best-throughput/AVG-LATENCY-MS">
<img src="https://storage.googleapis.com/dbtester-results/2018Q1-02-etcd-zookeeper-consul/write-1M-keys-best-throughput/AVG-LATENCY-MS-BY-KEY.svg" alt="2018Q1-02-etcd-zookeeper-consul/write-1M-keys-best-throughput/AVG-LATENCY-MS-BY-KEY">
<img src="https://storage.googleapis.com/dbtester-results/2018Q1-02-etcd-zookeeper-consul/write-1M-keys-best-throughput/AVG-LATENCY-MS-BY-KEY-ERROR-POINTS.svg" alt="2018Q1-02-etcd-zookeeper-consul/write-1M-keys-best-throughput/AVG-LATENCY-MS-BY-KEY-ERROR-POINTS">
<img src="https://storage.googleapis.com/dbtester-results/2018Q1-02-etcd-zookeeper-consul/write-1M-keys-best-throughput/AVG-THROUGHPUT.svg" alt="2018Q1-02-etcd-zookeeper-consul/write-1M-keys-best-throughput/AVG-THROUGHPUT">
<img src="https://storage.googleapis.com/dbtester-results/2018Q1-02-etcd-zookeeper-consul/write-1M-keys-best-throughput/AVG-VOLUNTARY-CTXT-SWITCHES.svg" alt="2018Q1-02-etcd-zookeeper-consul/write-1M-keys-best-throughput/AVG-VOLUNTARY-CTXT-SWITCHES">
<img src="https://storage.googleapis.com/dbtester-results/2018Q1-02-etcd-zookeeper-consul/write-1M-keys-best-throughput/AVG-NON-VOLUNTARY-CTXT-SWITCHES.svg" alt="2018Q1-02-etcd-zookeeper-consul/write-1M-keys-best-throughput/AVG-NON-VOLUNTARY-CTXT-SWITCHES">
<img src="https://storage.googleapis.com/dbtester-results/2018Q1-02-etcd-zookeeper-consul/write-1M-keys-best-throughput/AVG-CPU.svg" alt="2018Q1-02-etcd-zookeeper-consul/write-1M-keys-best-throughput/AVG-CPU">
<img src="https://storage.googleapis.com/dbtester-results/2018Q1-02-etcd-zookeeper-consul/write-1M-keys-best-throughput/MAX-CPU.svg" alt="2018Q1-02-etcd-zookeeper-consul/write-1M-keys-best-throughput/MAX-CPU">
<img src="https://storage.googleapis.com/dbtester-results/2018Q1-02-etcd-zookeeper-consul/write-1M-keys-best-throughput/AVG-VMRSS-MB.svg" alt="2018Q1-02-etcd-zookeeper-consul/write-1M-keys-best-throughput/AVG-VMRSS-MB">
<img src="https://storage.googleapis.com/dbtester-results/2018Q1-02-etcd-zookeeper-consul/write-1M-keys-best-throughput/AVG-VMRSS-MB-BY-KEY.svg" alt="2018Q1-02-etcd-zookeeper-consul/write-1M-keys-best-throughput/AVG-VMRSS-MB-BY-KEY">
<img src="https://storage.googleapis.com/dbtester-results/2018Q1-02-etcd-zookeeper-consul/write-1M-keys-best-throughput/AVG-VMRSS-MB-BY-KEY-ERROR-POINTS.svg" alt="2018Q1-02-etcd-zookeeper-consul/write-1M-keys-best-throughput/AVG-VMRSS-MB-BY-KEY-ERROR-POINTS">
<img src="https://storage.googleapis.com/dbtester-results/2018Q1-02-etcd-zookeeper-consul/write-1M-keys-best-throughput/AVG-READS-COMPLETED-DELTA.svg" alt="2018Q1-02-etcd-zookeeper-consul/write-1M-keys-best-throughput/AVG-READS-COMPLETED-DELTA">
<img src="https://storage.googleapis.com/dbtester-results/2018Q1-02-etcd-zookeeper-consul/write-1M-keys-best-throughput/AVG-SECTORS-READ-DELTA.svg" alt="2018Q1-02-etcd-zookeeper-consul/write-1M-keys-best-throughput/AVG-SECTORS-READ-DELTA">
<img src="https://storage.googleapis.com/dbtester-results/2018Q1-02-etcd-zookeeper-consul/write-1M-keys-best-throughput/AVG-WRITES-COMPLETED-DELTA.svg" alt="2018Q1-02-etcd-zookeeper-consul/write-1M-keys-best-throughput/AVG-WRITES-COMPLETED-DELTA">
<img src="https://storage.googleapis.com/dbtester-results/2018Q1-02-etcd-zookeeper-consul/write-1M-keys-best-throughput/AVG-SECTORS-WRITTEN-DELTA.svg" alt="2018Q1-02-etcd-zookeeper-consul/write-1M-keys-best-throughput/AVG-SECTORS-WRITTEN-DELTA">
<img src="https://storage.googleapis.com/dbtester-results/2018Q1-02-etcd-zookeeper-consul/write-1M-keys-best-throughput/AVG-READ-BYTES-NUM-DELTA.svg" alt="2018Q1-02-etcd-zookeeper-consul/write-1M-keys-best-throughput/AVG-READ-BYTES-NUM-DELTA">
<img src="https://storage.googleapis.com/dbtester-results/2018Q1-02-etcd-zookeeper-consul/write-1M-keys-best-throughput/AVG-WRITE-BYTES-NUM-DELTA.svg" alt="2018Q1-02-etcd-zookeeper-consul/write-1M-keys-best-throughput/AVG-WRITE-BYTES-NUM-DELTA">
<img src="https://storage.googleapis.com/dbtester-results/2018Q1-02-etcd-zookeeper-consul/write-1M-keys-best-throughput/AVG-RECEIVE-BYTES-NUM-DELTA.svg" alt="2018Q1-02-etcd-zookeeper-consul/write-1M-keys-best-throughput/AVG-RECEIVE-BYTES-NUM-DELTA">
<img src="https://storage.googleapis.com/dbtester-results/2018Q1-02-etcd-zookeeper-consul/write-1M-keys-best-throughput/AVG-TRANSMIT-BYTES-NUM-DELTA.svg" alt="2018Q1-02-etcd-zookeeper-consul/write-1M-keys-best-throughput/AVG-TRANSMIT-BYTES-NUM-DELTA">
| {
"pile_set_name": "Github"
} |
###############################################################################
## v # The Coq Proof Assistant ##
## <O___,, # INRIA - CNRS - LIX - LRI - PPS ##
## \VV/ # ##
## // # ##
###############################################################################
## GNUMakefile for Coq 8.10.2
# For debugging purposes (must stay here, don't move below)
INITIAL_VARS := $(.VARIABLES)
# To implement recursion we save the name of the main Makefile
SELF := $(lastword $(MAKEFILE_LIST))
PARENT := $(firstword $(MAKEFILE_LIST))
# This file is generated by coq_makefile and contains many variable
# definitions, like the list of .v files or the path to Coq
include Makefile.conf
# Put in place old names
VFILES := $(COQMF_VFILES)
MLIFILES := $(COQMF_MLIFILES)
MLFILES := $(COQMF_MLFILES)
MLGFILES := $(COQMF_MLGFILES)
MLPACKFILES := $(COQMF_MLPACKFILES)
MLLIBFILES := $(COQMF_MLLIBFILES)
CMDLINE_VFILES := $(COQMF_CMDLINE_VFILES)
INSTALLCOQDOCROOT := $(COQMF_INSTALLCOQDOCROOT)
OTHERFLAGS := $(COQMF_OTHERFLAGS)
COQ_SRC_SUBDIRS := $(COQMF_COQ_SRC_SUBDIRS)
OCAMLLIBS := $(COQMF_OCAMLLIBS)
SRC_SUBDIRS := $(COQMF_SRC_SUBDIRS)
COQLIBS := $(COQMF_COQLIBS)
COQLIBS_NOML := $(COQMF_COQLIBS_NOML)
CMDLINE_COQLIBS := $(COQMF_CMDLINE_COQLIBS)
LOCAL := $(COQMF_LOCAL)
COQLIB := $(COQMF_COQLIB)
DOCDIR := $(COQMF_DOCDIR)
OCAMLFIND := $(COQMF_OCAMLFIND)
CAMLFLAGS := $(COQMF_CAMLFLAGS)
HASNATDYNLINK := $(COQMF_HASNATDYNLINK)
OCAMLWARN := $(COQMF_WARN)
Makefile.conf: _CoqProject
coq_makefile -f _CoqProject -o Makefile
# This file can be created by the user to hook into double colon rules or
# add any other Makefile code he may need
-include Makefile.local
# Parameters ##################################################################
#
# Parameters are make variable assignments.
# They can be passed to (each call to) make on the command line.
# They can also be put in Makefile.local once an for all.
# For retro-compatibility reasons they can be put in the _CoqProject, but this
# practice is discouraged since _CoqProject better not contain make specific
# code (be nice to user interfaces).
# Print shell commands (set to non empty)
VERBOSE ?=
# Time the Coq process (set to non empty), and how (see default value)
TIMED?=
TIMECMD?=
# Use command time on linux, gtime on Mac OS
TIMEFMT?="$* (real: %e, user: %U, sys: %S, mem: %M ko)"
ifneq (,$(TIMED))
ifeq (0,$(shell command time -f $(TIMEFMT) true >/dev/null 2>/dev/null; echo $$?))
STDTIME?=command time -f $(TIMEFMT)
else
ifeq (0,$(shell gtime -f $(TIMEFMT) true >/dev/null 2>/dev/null; echo $$?))
STDTIME?=gtime -f $(TIMEFMT)
else
STDTIME?=command time
endif
endif
else
STDTIME?=command time -f $(TIMEFMT)
endif
# Coq binaries
COQC ?= "$(COQBIN)coqc"
COQTOP ?= "$(COQBIN)coqtop"
COQCHK ?= "$(COQBIN)coqchk"
COQDEP ?= "$(COQBIN)coqdep"
COQDOC ?= "$(COQBIN)coqdoc"
COQPP ?= "$(COQBIN)coqpp"
COQMKFILE ?= "$(COQBIN)coq_makefile"
# Timing scripts
COQMAKE_ONE_TIME_FILE ?= "$(COQLIB)/tools/make-one-time-file.py"
COQMAKE_BOTH_TIME_FILES ?= "$(COQLIB)/tools/make-both-time-files.py"
COQMAKE_BOTH_SINGLE_TIMING_FILES ?= "$(COQLIB)/tools/make-both-single-timing-files.py"
BEFORE ?=
AFTER ?=
# FIXME this should be generated by Coq (modules already linked by Coq)
CAMLDONTLINK=unix,str
# OCaml binaries
CAMLC ?= "$(OCAMLFIND)" ocamlc -c
CAMLOPTC ?= "$(OCAMLFIND)" opt -c
CAMLLINK ?= "$(OCAMLFIND)" ocamlc -linkpkg -dontlink $(CAMLDONTLINK)
CAMLOPTLINK ?= "$(OCAMLFIND)" opt -linkpkg -dontlink $(CAMLDONTLINK)
CAMLDOC ?= "$(OCAMLFIND)" ocamldoc
CAMLDEP ?= "$(OCAMLFIND)" ocamldep -slash -ml-synonym .mlpack
# DESTDIR is prepended to all installation paths
DESTDIR ?=
# Debug builds, typically -g to OCaml, -debug to Coq.
CAMLDEBUG ?=
COQDEBUG ?=
# Extra packages to be linked in (as in findlib -package)
CAMLPKGS ?=
# Option for making timing files
TIMING?=
# Option for changing sorting of timing output file
TIMING_SORT_BY ?= auto
# Output file names for timed builds
TIME_OF_BUILD_FILE ?= time-of-build.log
TIME_OF_BUILD_BEFORE_FILE ?= time-of-build-before.log
TIME_OF_BUILD_AFTER_FILE ?= time-of-build-after.log
TIME_OF_PRETTY_BUILD_FILE ?= time-of-build-pretty.log
TIME_OF_PRETTY_BOTH_BUILD_FILE ?= time-of-build-both.log
TIME_OF_PRETTY_BUILD_EXTRA_FILES ?= - # also output to the command line
TGTS ?=
########## End of parameters ##################################################
# What follows may be relevant to you only if you need to
# extend this Makefile. If so, look for 'Extension point' here and
# put in Makefile.local double colon rules accordingly.
# E.g. to perform some work after the all target completes you can write
#
# post-all::
# echo "All done!"
#
# in Makefile.local
#
###############################################################################
# Flags #######################################################################
#
# We define a bunch of variables combining the parameters.
# To add additional flags to coq, coqchk or coqdoc, set the
# {COQ,COQCHK,COQDOC}EXTRAFLAGS variable to whatever you want to add.
# To overwrite the default choice and set your own flags entirely, set the
# {COQ,COQCHK,COQDOC}FLAGS variable.
SHOW := $(if $(VERBOSE),@true "",@echo "")
HIDE := $(if $(VERBOSE),,@)
TIMER=$(if $(TIMED), $(STDTIME), $(TIMECMD))
OPT?=
# The DYNOBJ and DYNLIB variables are used by "coqdep -dyndep var" in .v.d
ifeq '$(OPT)' '-byte'
USEBYTE:=true
DYNOBJ:=.cma
DYNLIB:=.cma
else
USEBYTE:=
DYNOBJ:=.cmxs
DYNLIB:=.cmxs
endif
# these variables are meant to be overridden if you want to add *extra* flags
COQEXTRAFLAGS?=
COQCHKEXTRAFLAGS?=
COQDOCEXTRAFLAGS?=
# these flags do NOT contain the libraries, to make them easier to overwrite
COQFLAGS?=-q $(OTHERFLAGS) $(COQEXTRAFLAGS)
COQCHKFLAGS?=-silent -o $(COQCHKEXTRAFLAGS)
COQDOCFLAGS?=-interpolate -utf8 $(COQDOCEXTRAFLAGS)
COQDOCLIBS?=$(COQLIBS_NOML)
# The version of Coq being run and the version of coq_makefile that
# generated this makefile
COQ_VERSION:=$(shell $(COQC) --print-version | cut -d " " -f 1)
COQMAKEFILE_VERSION:=8.10.2
COQSRCLIBS?= $(foreach d,$(COQ_SRC_SUBDIRS), -I "$(COQLIB)/$(d)")
CAMLFLAGS+=$(OCAMLLIBS) $(COQSRCLIBS)
# ocamldoc fails with unknown argument otherwise
CAMLDOCFLAGS:=$(filter-out -annot, $(filter-out -bin-annot, $(CAMLFLAGS)))
CAMLFLAGS+=$(OCAMLWARN)
ifneq (,$(TIMING))
TIMING_ARG=-time
ifeq (after,$(TIMING))
TIMING_EXT=after-timing
else
ifeq (before,$(TIMING))
TIMING_EXT=before-timing
else
TIMING_EXT=timing
endif
endif
else
TIMING_ARG=
endif
# Retro compatibility (DESTDIR is standard on Unix, DSTROOT is not)
ifdef DSTROOT
DESTDIR := $(DSTROOT)
endif
concat_path = $(if $(1),$(1)/$(if $(COQMF_WINDRIVE),$(subst $(COQMF_WINDRIVE),/,$(2)),$(2)),$(2))
COQLIBINSTALL = $(call concat_path,$(DESTDIR),$(COQLIB)/user-contrib)
COQDOCINSTALL = $(call concat_path,$(DESTDIR),$(DOCDIR)/user-contrib)
COQTOPINSTALL = $(call concat_path,$(DESTDIR),$(COQLIB)/toploop)
# Files #######################################################################
#
# We here define a bunch of variables about the files being part of the
# Coq project in order to ease the writing of build target and build rules
VDFILE := .coqdeps
ALLSRCFILES := \
$(MLGFILES) \
$(MLFILES) \
$(MLPACKFILES) \
$(MLLIBFILES) \
$(MLIFILES)
# helpers
vo_to_obj = $(addsuffix .o,\
$(filter-out Warning: Error:,\
$(shell $(COQTOP) -q -noinit -batch -quiet -print-mod-uid $(1))))
strip_dotslash = $(patsubst ./%,%,$(1))
# without this we get undefined variables in the expansion for the
# targets of the [deprecated,use-mllib-or-mlpack] rule
with_undef = $(if $(filter-out undefined, $(origin $(1))),$($(1)))
VO = vo
VOFILES = $(VFILES:.v=.$(VO))
GLOBFILES = $(VFILES:.v=.glob)
HTMLFILES = $(VFILES:.v=.html)
GHTMLFILES = $(VFILES:.v=.g.html)
BEAUTYFILES = $(addsuffix .beautified,$(VFILES))
TEXFILES = $(VFILES:.v=.tex)
GTEXFILES = $(VFILES:.v=.g.tex)
CMOFILES = \
$(MLGFILES:.mlg=.cmo) \
$(MLFILES:.ml=.cmo) \
$(MLPACKFILES:.mlpack=.cmo)
CMXFILES = $(CMOFILES:.cmo=.cmx)
OFILES = $(CMXFILES:.cmx=.o)
CMAFILES = $(MLLIBFILES:.mllib=.cma) $(MLPACKFILES:.mlpack=.cma)
CMXAFILES = $(CMAFILES:.cma=.cmxa)
CMIFILES = \
$(CMOFILES:.cmo=.cmi) \
$(MLIFILES:.mli=.cmi)
# the /if/ is because old _CoqProject did not list a .ml(pack|lib) but just
# a .ml4 file
CMXSFILES = \
$(MLPACKFILES:.mlpack=.cmxs) \
$(CMXAFILES:.cmxa=.cmxs) \
$(if $(MLPACKFILES)$(CMXAFILES),,\
$(MLGFILES:.mlg=.cmxs) $(MLFILES:.ml=.cmxs))
# files that are packed into a plugin (no extension)
PACKEDFILES = \
$(call strip_dotslash, \
$(foreach lib, \
$(call strip_dotslash, \
$(MLPACKFILES:.mlpack=_MLPACK_DEPENDENCIES)),$(call with_undef,$(lib))))
# files that are archived into a .cma (mllib)
LIBEDFILES = \
$(call strip_dotslash, \
$(foreach lib, \
$(call strip_dotslash, \
$(MLLIBFILES:.mllib=_MLLIB_DEPENDENCIES)),$(call with_undef,$(lib))))
CMIFILESTOINSTALL = $(filter-out $(addsuffix .cmi,$(PACKEDFILES)),$(CMIFILES))
CMOFILESTOINSTALL = $(filter-out $(addsuffix .cmo,$(PACKEDFILES)),$(CMOFILES))
OBJFILES = $(call vo_to_obj,$(VOFILES))
ALLNATIVEFILES = \
$(OBJFILES:.o=.cmi) \
$(OBJFILES:.o=.cmx) \
$(OBJFILES:.o=.cmxs)
# trick: wildcard filters out non-existing files, so that `install` doesn't show
# warnings and `clean` doesn't pass to rm a list of files that is too long for
# the shell.
NATIVEFILES = $(wildcard $(ALLNATIVEFILES))
FILESTOINSTALL = \
$(VOFILES) \
$(VFILES) \
$(GLOBFILES) \
$(NATIVEFILES) \
$(CMIFILESTOINSTALL)
BYTEFILESTOINSTALL = \
$(CMOFILESTOINSTALL) \
$(CMAFILES)
ifeq '$(HASNATDYNLINK)' 'true'
DO_NATDYNLINK = yes
FILESTOINSTALL += $(CMXSFILES) $(CMXAFILES) $(CMOFILESTOINSTALL:.cmo=.cmx)
else
DO_NATDYNLINK =
endif
ALLDFILES = $(addsuffix .d,$(ALLSRCFILES) $(VDFILE))
# Compilation targets #########################################################
all:
$(HIDE)$(MAKE) --no-print-directory -f "$(SELF)" pre-all
$(HIDE)$(MAKE) --no-print-directory -f "$(SELF)" real-all
$(HIDE)$(MAKE) --no-print-directory -f "$(SELF)" post-all
.PHONY: all
all.timing.diff:
$(HIDE)$(MAKE) --no-print-directory -f "$(SELF)" pre-all
$(HIDE)$(MAKE) --no-print-directory -f "$(SELF)" real-all.timing.diff TIME_OF_PRETTY_BUILD_EXTRA_FILES=""
$(HIDE)$(MAKE) --no-print-directory -f "$(SELF)" post-all
.PHONY: all.timing.diff
make-pretty-timed-before:: TIME_OF_BUILD_FILE=$(TIME_OF_BUILD_BEFORE_FILE)
make-pretty-timed-after:: TIME_OF_BUILD_FILE=$(TIME_OF_BUILD_AFTER_FILE)
make-pretty-timed make-pretty-timed-before make-pretty-timed-after::
$(HIDE)rm -f pretty-timed-success.ok
$(HIDE)($(MAKE) --no-print-directory -f "$(PARENT)" $(TGTS) TIMED=1 2>&1 && touch pretty-timed-success.ok) | tee -a $(TIME_OF_BUILD_FILE)
$(HIDE)rm pretty-timed-success.ok # must not be -f; must fail if the touch failed
print-pretty-timed::
$(HIDE)$(COQMAKE_ONE_TIME_FILE) $(TIME_OF_BUILD_FILE) $(TIME_OF_PRETTY_BUILD_FILE) $(TIME_OF_PRETTY_BUILD_EXTRA_FILES)
print-pretty-timed-diff::
$(HIDE)$(COQMAKE_BOTH_TIME_FILES) --sort-by=$(TIMING_SORT_BY) $(TIME_OF_BUILD_AFTER_FILE) $(TIME_OF_BUILD_BEFORE_FILE) $(TIME_OF_PRETTY_BOTH_BUILD_FILE) $(TIME_OF_PRETTY_BUILD_EXTRA_FILES)
ifeq (,$(BEFORE))
print-pretty-single-time-diff::
@echo 'Error: Usage: $(MAKE) print-pretty-single-time-diff AFTER=path/to/file.v.after-timing BEFORE=path/to/file.v.before-timing'
$(HIDE)false
else
ifeq (,$(AFTER))
print-pretty-single-time-diff::
@echo 'Error: Usage: $(MAKE) print-pretty-single-time-diff AFTER=path/to/file.v.after-timing BEFORE=path/to/file.v.before-timing'
$(HIDE)false
else
print-pretty-single-time-diff::
$(HIDE)$(COQMAKE_BOTH_SINGLE_TIMING_FILES) --sort-by=$(TIMING_SORT_BY) $(AFTER) $(BEFORE) $(TIME_OF_PRETTY_BUILD_FILE) $(TIME_OF_PRETTY_BUILD_EXTRA_FILES)
endif
endif
pretty-timed:
$(HIDE)$(MAKE) --no-print-directory -f "$(PARENT)" make-pretty-timed
$(HIDE)$(MAKE) --no-print-directory -f "$(SELF)" print-pretty-timed
.PHONY: pretty-timed make-pretty-timed make-pretty-timed-before make-pretty-timed-after print-pretty-timed print-pretty-timed-diff print-pretty-single-time-diff
# Extension points for actions to be performed before/after the all target
pre-all::
@# Extension point
$(HIDE)if [ "$(COQMAKEFILE_VERSION)" != "$(COQ_VERSION)" ]; then\
echo "W: This Makefile was generated by Coq $(COQMAKEFILE_VERSION)";\
echo "W: while the current Coq version is $(COQ_VERSION)";\
fi
.PHONY: pre-all
post-all::
@# Extension point
.PHONY: post-all
real-all: $(VOFILES) $(if $(USEBYTE),bytefiles,optfiles)
.PHONY: real-all
real-all.timing.diff: $(VOFILES:.vo=.v.timing.diff)
.PHONY: real-all.timing.diff
bytefiles: $(CMOFILES) $(CMAFILES)
.PHONY: bytefiles
optfiles: $(if $(DO_NATDYNLINK),$(CMXSFILES))
.PHONY: optfiles
# FIXME, see Ralf's bugreport
quick: $(VOFILES:.vo=.vio)
.PHONY: quick
vio2vo:
$(TIMER) $(COQC) $(COQDEBUG) $(COQFLAGS) $(COQLIBS) \
-schedule-vio2vo $(J) $(VOFILES:%.vo=%.vio)
.PHONY: vio2vo
quick2vo:
$(HIDE)make -j $(J) quick
$(HIDE)VIOFILES=$$(for vofile in $(VOFILES); do \
viofile="$$(echo "$$vofile" | sed "s/\.vo$$/.vio/")"; \
if [ "$$vofile" -ot "$$viofile" -o ! -e "$$vofile" ]; then printf "$$viofile "; fi; \
done); \
echo "VIO2VO: $$VIOFILES"; \
if [ -n "$$VIOFILES" ]; then \
$(TIMER) $(COQC) $(COQDEBUG) $(COQFLAGS) $(COQLIBS) -schedule-vio2vo $(J) $$VIOFILES; \
fi
.PHONY: quick2vo
checkproofs:
$(TIMER) $(COQC) $(COQDEBUG) $(COQFLAGS) $(COQLIBS) \
-schedule-vio-checking $(J) $(VOFILES:%.vo=%.vio)
.PHONY: checkproofs
validate: $(VOFILES)
$(TIMER) $(COQCHK) $(COQCHKFLAGS) $(COQLIBS) $^
.PHONY: validate
only: $(TGTS)
.PHONY: only
# Documentation targets #######################################################
html: $(GLOBFILES) $(VFILES)
$(SHOW)'COQDOC -d html $(GAL)'
$(HIDE)mkdir -p html
$(HIDE)$(COQDOC) \
-toc $(COQDOCFLAGS) -html $(GAL) $(COQDOCLIBS) -d html $(VFILES)
mlihtml: $(MLIFILES:.mli=.cmi)
$(SHOW)'CAMLDOC -d $@'
$(HIDE)mkdir $@ || rm -rf $@/*
$(HIDE)$(CAMLDOC) -html \
-d $@ -m A $(CAMLDEBUG) $(CAMLDOCFLAGS) $(MLIFILES)
all-mli.tex: $(MLIFILES:.mli=.cmi)
$(SHOW)'CAMLDOC -latex $@'
$(HIDE)$(CAMLDOC) -latex \
-o $@ -m A $(CAMLDEBUG) $(CAMLDOCFLAGS) $(MLIFILES)
all.ps: $(VFILES)
$(SHOW)'COQDOC -ps $(GAL)'
$(HIDE)$(COQDOC) \
-toc $(COQDOCFLAGS) -ps $(GAL) $(COQDOCLIBS) \
-o $@ `$(COQDEP) -sort -suffix .v $(VFILES)`
all.pdf: $(VFILES)
$(SHOW)'COQDOC -pdf $(GAL)'
$(HIDE)$(COQDOC) \
-toc $(COQDOCFLAGS) -pdf $(GAL) $(COQDOCLIBS) \
-o $@ `$(COQDEP) -sort -suffix .v $(VFILES)`
# FIXME: not quite right, since the output name is different
gallinahtml: GAL=-g
gallinahtml: html
all-gal.ps: GAL=-g
all-gal.ps: all.ps
all-gal.pdf: GAL=-g
all-gal.pdf: all.pdf
# ?
beautify: $(BEAUTYFILES)
for file in $^; do mv $${file%.beautified} $${file%beautified}old && mv $${file} $${file%.beautified}; done
@echo 'Do not do "make clean" until you are sure that everything went well!'
@echo 'If there were a problem, execute "for file in $$(find . -name \*.v.old -print); do mv $${file} $${file%.old}; done" in your shell/'
.PHONY: beautify
# Installation targets ########################################################
#
# There rules can be extended in Makefile.local
# Extensions can't assume when they run.
install:
$(HIDE)code=0; for f in $(FILESTOINSTALL); do\
if ! [ -f "$$f" ]; then >&2 echo $$f does not exist; code=1; fi \
done; exit $$code
$(HIDE)for f in $(FILESTOINSTALL); do\
df="`$(COQMKFILE) -destination-of "$$f" $(COQLIBS)`";\
if [ "$$?" != "0" -o -z "$$df" ]; then\
echo SKIP "$$f" since it has no logical path;\
else\
install -d "$(COQLIBINSTALL)/$$df" &&\
install -m 0644 "$$f" "$(COQLIBINSTALL)/$$df" &&\
echo INSTALL "$$f" "$(COQLIBINSTALL)/$$df";\
fi;\
done
$(HIDE)$(MAKE) install-extra -f "$(SELF)"
install-extra::
@# Extension point
.PHONY: install install-extra
install-byte:
$(HIDE)for f in $(BYTEFILESTOINSTALL); do\
df="`$(COQMKFILE) -destination-of "$$f" $(COQLIBS)`";\
if [ "$$?" != "0" -o -z "$$df" ]; then\
echo SKIP "$$f" since it has no logical path;\
else\
install -d "$(COQLIBINSTALL)/$$df" &&\
install -m 0644 "$$f" "$(COQLIBINSTALL)/$$df" &&\
echo INSTALL "$$f" "$(COQLIBINSTALL)/$$df";\
fi;\
done
install-doc:: html mlihtml
@# Extension point
$(HIDE)install -d "$(COQDOCINSTALL)/$(INSTALLCOQDOCROOT)/html"
$(HIDE)for i in html/*; do \
dest="$(COQDOCINSTALL)/$(INSTALLCOQDOCROOT)/$$i";\
install -m 0644 "$$i" "$$dest";\
echo INSTALL "$$i" "$$dest";\
done
$(HIDE)install -d \
"$(COQDOCINSTALL)/$(INSTALLCOQDOCROOT)/mlihtml"
$(HIDE)for i in mlihtml/*; do \
dest="$(COQDOCINSTALL)/$(INSTALLCOQDOCROOT)/$$i";\
install -m 0644 "$$i" "$$dest";\
echo INSTALL "$$i" "$$dest";\
done
.PHONY: install-doc
uninstall::
@# Extension point
$(HIDE)for f in $(FILESTOINSTALL); do \
df="`$(COQMKFILE) -destination-of "$$f" $(COQLIBS)`" &&\
instf="$(COQLIBINSTALL)/$$df/`basename $$f`" &&\
rm -f "$$instf" &&\
echo RM "$$instf" &&\
(rmdir "$(call concat_path,,$(COQLIBINSTALL)/$$df/)" 2>/dev/null || true); \
done
.PHONY: uninstall
uninstall-doc::
@# Extension point
$(SHOW)'RM $(COQDOCINSTALL)/$(INSTALLCOQDOCROOT)/html'
$(HIDE)rm -rf "$(COQDOCINSTALL)/$(INSTALLCOQDOCROOT)/html"
$(SHOW)'RM $(COQDOCINSTALL)/$(INSTALLCOQDOCROOT)/mlihtml'
$(HIDE)rm -rf "$(COQDOCINSTALL)/$(INSTALLCOQDOCROOT)/mlihtml"
$(HIDE) rmdir "$(COQDOCINSTALL)/$(INSTALLCOQDOCROOT)/" || true
.PHONY: uninstall-doc
# Cleaning ####################################################################
#
# There rules can be extended in Makefile.local
# Extensions can't assume when they run.
clean::
@# Extension point
$(SHOW)'CLEAN'
$(HIDE)rm -f $(CMOFILES)
$(HIDE)rm -f $(CMIFILES)
$(HIDE)rm -f $(CMAFILES)
$(HIDE)rm -f $(CMOFILES:.cmo=.cmx)
$(HIDE)rm -f $(CMXAFILES)
$(HIDE)rm -f $(CMXSFILES)
$(HIDE)rm -f $(CMOFILES:.cmo=.o)
$(HIDE)rm -f $(CMXAFILES:.cmxa=.a)
$(HIDE)rm -f $(MLGFILES:.mlg=.ml)
$(HIDE)rm -f $(ALLDFILES)
$(HIDE)rm -f $(NATIVEFILES)
$(HIDE)find . -name .coq-native -type d -empty -delete
$(HIDE)rm -f $(VOFILES)
$(HIDE)rm -f $(VOFILES:.vo=.vio)
$(HIDE)rm -f $(BEAUTYFILES) $(VFILES:=.old)
$(HIDE)rm -f all.ps all-gal.ps all.pdf all-gal.pdf all.glob all-mli.tex
$(HIDE)rm -f $(VFILES:.v=.glob)
$(HIDE)rm -f $(VFILES:.v=.tex)
$(HIDE)rm -f $(VFILES:.v=.g.tex)
$(HIDE)rm -f pretty-timed-success.ok
$(HIDE)rm -rf html mlihtml
.PHONY: clean
cleanall:: clean
@# Extension point
$(SHOW)'CLEAN *.aux *.timing'
$(HIDE)rm -f $(foreach f,$(VFILES:.v=),$(dir $(f)).$(notdir $(f)).aux)
$(HIDE)rm -f $(TIME_OF_BUILD_FILE) $(TIME_OF_BUILD_BEFORE_FILE) $(TIME_OF_BUILD_AFTER_FILE) $(TIME_OF_PRETTY_BUILD_FILE) $(TIME_OF_PRETTY_BOTH_BUILD_FILE)
$(HIDE)rm -f $(VOFILES:.vo=.v.timing)
$(HIDE)rm -f $(VOFILES:.vo=.v.before-timing)
$(HIDE)rm -f $(VOFILES:.vo=.v.after-timing)
$(HIDE)rm -f $(VOFILES:.vo=.v.timing.diff)
.PHONY: cleanall
archclean::
@# Extension point
$(SHOW)'CLEAN *.cmx *.o'
$(HIDE)rm -f $(NATIVEFILES)
$(HIDE)rm -f $(CMOFILES:%.cmo=%.cmx)
.PHONY: archclean
# Compilation rules ###########################################################
$(MLIFILES:.mli=.cmi): %.cmi: %.mli
$(SHOW)'CAMLC -c $<'
$(HIDE)$(CAMLC) $(CAMLDEBUG) $(CAMLFLAGS) $(CAMLPKGS) $<
$(MLGFILES:.mlg=.ml): %.ml: %.mlg
$(SHOW)'COQPP $<'
$(HIDE)$(COQPP) $<
# Stupid hack around a deficient syntax: we cannot concatenate two expansions
$(filter %.cmo, $(MLFILES:.ml=.cmo) $(MLGFILES:.mlg=.cmo)): %.cmo: %.ml
$(SHOW)'CAMLC -c $<'
$(HIDE)$(CAMLC) $(CAMLDEBUG) $(CAMLFLAGS) $(CAMLPKGS) $<
# Same hack
$(filter %.cmx, $(MLFILES:.ml=.cmx) $(MLGFILES:.mlg=.cmx)): %.cmx: %.ml
$(SHOW)'CAMLOPT -c $(FOR_PACK) $<'
$(HIDE)$(CAMLOPTC) $(CAMLDEBUG) $(CAMLFLAGS) $(CAMLPKGS) $(FOR_PACK) $<
$(MLLIBFILES:.mllib=.cmxs): %.cmxs: %.cmxa
$(SHOW)'CAMLOPT -shared -o $@'
$(HIDE)$(CAMLOPTLINK) $(CAMLDEBUG) $(CAMLFLAGS) $(CAMLPKGS) \
-linkall -shared -o $@ $<
$(MLLIBFILES:.mllib=.cma): %.cma: | %.mllib
$(SHOW)'CAMLC -a -o $@'
$(HIDE)$(CAMLLINK) $(CAMLDEBUG) $(CAMLFLAGS) $(CAMLPKGS) -a -o $@ $^
$(MLLIBFILES:.mllib=.cmxa): %.cmxa: | %.mllib
$(SHOW)'CAMLOPT -a -o $@'
$(HIDE)$(CAMLOPTLINK) $(CAMLDEBUG) $(CAMLFLAGS) $(CAMLPKGS) -a -o $@ $^
$(MLPACKFILES:.mlpack=.cmxs): %.cmxs: %.cmxa
$(SHOW)'CAMLOPT -shared -o $@'
$(HIDE)$(CAMLOPTLINK) $(CAMLDEBUG) $(CAMLFLAGS) $(CAMLPKGS) \
-shared -linkall -o $@ $<
$(MLPACKFILES:.mlpack=.cmxa): %.cmxa: %.cmx
$(SHOW)'CAMLOPT -a -o $@'
$(HIDE)$(CAMLOPTLINK) $(CAMLDEBUG) $(CAMLFLAGS) -a -o $@ $<
$(MLPACKFILES:.mlpack=.cma): %.cma: %.cmo | %.mlpack
$(SHOW)'CAMLC -a -o $@'
$(HIDE)$(CAMLLINK) $(CAMLDEBUG) $(CAMLFLAGS) $(CAMLPKGS) -a -o $@ $^
$(MLPACKFILES:.mlpack=.cmo): %.cmo: | %.mlpack
$(SHOW)'CAMLC -pack -o $@'
$(HIDE)$(CAMLLINK) $(CAMLDEBUG) $(CAMLFLAGS) -pack -o $@ $^
$(MLPACKFILES:.mlpack=.cmx): %.cmx: | %.mlpack
$(SHOW)'CAMLOPT -pack -o $@'
$(HIDE)$(CAMLOPTLINK) $(CAMLDEBUG) $(CAMLFLAGS) -pack -o $@ $^
# This rule is for _CoqProject with no .mllib nor .mlpack
$(filter-out $(MLLIBFILES:.mllib=.cmxs) $(MLPACKFILES:.mlpack=.cmxs) $(addsuffix .cmxs,$(PACKEDFILES)) $(addsuffix .cmxs,$(LIBEDFILES)),$(MLFILES:.ml=.cmxs) $(MLGFILES:.mlg=.cmxs)): %.cmxs: %.cmx
$(SHOW)'[deprecated,use-mllib-or-mlpack] CAMLOPT -shared -o $@'
$(HIDE)$(CAMLOPTLINK) $(CAMLDEBUG) $(CAMLFLAGS) $(CAMLPKGS) \
-shared -o $@ $<
ifneq (,$(TIMING))
TIMING_EXTRA = > $<.$(TIMING_EXT)
else
TIMING_EXTRA =
endif
$(VOFILES): %.vo: %.v
$(SHOW)COQC $<
$(HIDE)$(TIMER) $(COQC) $(COQDEBUG) $(TIMING_ARG) $(COQFLAGS) $(COQLIBS) $< $(TIMING_EXTRA)
# FIXME ?merge with .vo / .vio ?
$(GLOBFILES): %.glob: %.v
$(TIMER) $(COQC) $(COQDEBUG) $(COQFLAGS) $(COQLIBS) $<
$(VFILES:.v=.vio): %.vio: %.v
$(SHOW)COQC -quick $<
$(HIDE)$(TIMER) $(COQC) -quick $(COQDEBUG) $(COQFLAGS) $(COQLIBS) $<
$(addsuffix .timing.diff,$(VFILES)): %.timing.diff : %.before-timing %.after-timing
$(SHOW)PYTHON TIMING-DIFF $<
$(HIDE)$(MAKE) --no-print-directory -f "$(SELF)" print-pretty-single-time-diff BEFORE=$*.before-timing AFTER=$*.after-timing TIME_OF_PRETTY_BUILD_FILE="$@"
$(BEAUTYFILES): %.v.beautified: %.v
$(SHOW)'BEAUTIFY $<'
$(HIDE)$(TIMER) $(COQC) $(COQDEBUG) $(COQFLAGS) $(COQLIBS) -beautify $<
$(TEXFILES): %.tex: %.v
$(SHOW)'COQDOC -latex $<'
$(HIDE)$(COQDOC) $(COQDOCFLAGS) -latex $< -o $@
$(GTEXFILES): %.g.tex: %.v
$(SHOW)'COQDOC -latex -g $<'
$(HIDE)$(COQDOC) $(COQDOCFLAGS) -latex -g $< -o $@
$(HTMLFILES): %.html: %.v %.glob
$(SHOW)'COQDOC -html $<'
$(HIDE)$(COQDOC) $(COQDOCFLAGS) -html $< -o $@
$(GHTMLFILES): %.g.html: %.v %.glob
$(SHOW)'COQDOC -html -g $<'
$(HIDE)$(COQDOC) $(COQDOCFLAGS) -html -g $< -o $@
# Dependency files ############################################################
ifndef MAKECMDGOALS
-include $(ALLDFILES)
else
ifneq ($(filter-out archclean clean cleanall printenv make-pretty-timed make-pretty-timed-before make-pretty-timed-after print-pretty-timed print-pretty-timed-diff print-pretty-single-time-diff,$(MAKECMDGOALS)),)
-include $(ALLDFILES)
endif
endif
.SECONDARY: $(ALLDFILES)
redir_if_ok = > "$@" || ( RV=$$?; rm -f "$@"; exit $$RV )
GENMLFILES:=$(MLGFILES:.mlg=.ml)
$(addsuffix .d,$(ALLSRCFILES)): $(GENMLFILES)
$(addsuffix .d,$(MLIFILES)): %.mli.d: %.mli
$(SHOW)'CAMLDEP $<'
$(HIDE)$(CAMLDEP) $(OCAMLLIBS) "$<" $(redir_if_ok)
$(addsuffix .d,$(MLGFILES)): %.mlg.d: %.ml
$(SHOW)'CAMLDEP $<'
$(HIDE)$(CAMLDEP) $(OCAMLLIBS) "$<" $(redir_if_ok)
$(addsuffix .d,$(MLFILES)): %.ml.d: %.ml
$(SHOW)'CAMLDEP $<'
$(HIDE)$(CAMLDEP) $(OCAMLLIBS) "$<" $(redir_if_ok)
$(addsuffix .d,$(MLLIBFILES)): %.mllib.d: %.mllib
$(SHOW)'COQDEP $<'
$(HIDE)$(COQDEP) $(OCAMLLIBS) -c "$<" $(redir_if_ok)
$(addsuffix .d,$(MLPACKFILES)): %.mlpack.d: %.mlpack
$(SHOW)'COQDEP $<'
$(HIDE)$(COQDEP) $(OCAMLLIBS) -c "$<" $(redir_if_ok)
# If this makefile is created using a _CoqProject we have coqdep get
# options from it. This avoids argument length limits for pathological
# projects. Note that extra options might be on the command line.
VDFILE_FLAGS:=$(if _CoqProject,-f _CoqProject,) $(CMDLINE_COQLIBS) $(CMDLINE_VFILES)
$(VDFILE).d: $(VFILES)
$(SHOW)'COQDEP VFILES'
$(HIDE)$(COQDEP) -dyndep var $(VDFILE_FLAGS) $(redir_if_ok)
# Misc ########################################################################
byte:
$(HIDE)$(MAKE) all "OPT:=-byte" -f "$(SELF)"
.PHONY: byte
opt:
$(HIDE)$(MAKE) all "OPT:=-opt" -f "$(SELF)"
.PHONY: opt
# This is deprecated. To extend this makefile use
# extension points and Makefile.local
printenv::
$(warning printenv is deprecated)
$(warning write extensions in Makefile.local or include Makefile.conf)
@echo 'LOCAL = $(LOCAL)'
@echo 'COQLIB = $(COQLIB)'
@echo 'DOCDIR = $(DOCDIR)'
@echo 'OCAMLFIND = $(OCAMLFIND)'
@echo 'HASNATDYNLINK = $(HASNATDYNLINK)'
@echo 'SRC_SUBDIRS = $(SRC_SUBDIRS)'
@echo 'COQ_SRC_SUBDIRS = $(COQ_SRC_SUBDIRS)'
@echo 'OCAMLFIND = $(OCAMLFIND)'
@echo 'PP = $(PP)'
@echo 'COQFLAGS = $(COQFLAGS)'
@echo 'COQLIB = $(COQLIBS)'
@echo 'COQLIBINSTALL = $(COQLIBINSTALL)'
@echo 'COQDOCINSTALL = $(COQDOCINSTALL)'
.PHONY: printenv
# Generate a .merlin file. If you need to append directives to this
# file you can extend the merlin-hook target in Makefile.local
.merlin:
$(SHOW)'FILL .merlin'
$(HIDE)echo 'FLG $(COQMF_CAMLFLAGS)' > .merlin
$(HIDE)echo 'B $(COQLIB)' >> .merlin
$(HIDE)echo 'S $(COQLIB)' >> .merlin
$(HIDE)$(foreach d,$(COQ_SRC_SUBDIRS), \
echo 'B $(COQLIB)$(d)' >> .merlin;)
$(HIDE)$(foreach d,$(COQ_SRC_SUBDIRS), \
echo 'S $(COQLIB)$(d)' >> .merlin;)
$(HIDE)$(foreach d,$(SRC_SUBDIRS), echo 'B $(d)' >> .merlin;)
$(HIDE)$(foreach d,$(SRC_SUBDIRS), echo 'S $(d)' >> .merlin;)
$(HIDE)$(MAKE) merlin-hook -f "$(SELF)"
.PHONY: merlin
merlin-hook::
@# Extension point
.PHONY: merlin-hook
# prints all variables
debug:
$(foreach v,\
$(sort $(filter-out $(INITIAL_VARS) INITIAL_VARS,\
$(.VARIABLES))),\
$(info $(v) = $($(v))))
.PHONY: debug
.DEFAULT_GOAL := all
# Local Variables:
# mode: makefile-gmake
# End:
| {
"pile_set_name": "Github"
} |
#Copyright (C) 2017 Paolo Galeone <[email protected]>
#
#This Source Code Form is subject to the terms of the Mozilla Public
#License, v. 2.0. If a copy of the MPL was not distributed with this
#file, you can obtain one at http://mozilla.org/MPL/2.0/.
#Exhibit B is not attached; this software is compatible with the
#licenses expressed under Section 1.12 of the MPL v2.
| {
"pile_set_name": "Github"
} |
import * as commands from './commands'
import * as imports from './imports'
import * as options from './options'
export {
commands,
imports,
options
}
export { default as NuxtCommand } from './command'
export { default as setup } from './setup'
export { default as run } from './run'
export { loadNuxtConfig } from './utils/config'
export { getWebpackConfig } from './utils/webpack'
export { isNuxtDir } from './utils/dir'
| {
"pile_set_name": "Github"
} |
/*
Copyright (C) 2015 Apple Inc. All Rights Reserved.
See LICENSE.txt for this sample’s licensing information
Abstract:
Configures the Core Data persistence stack and starts the RSS importer.
*/
#import <UIKit/UIKit.h>
#import "iTunesRSSImporter.h"
@interface AppDelegate : NSObject <UIApplicationDelegate, iTunesRSSImporterDelegate>
@end
| {
"pile_set_name": "Github"
} |
#include <cusp/csr_matrix.h>
#include <cusp/print.h>
#include <cusp/gallery/poisson.h>
#include <cusp/graph/pseudo_peripheral.h>
#include <cusp/io/matrix_market.h>
#include "../timer.h"
template<typename MemorySpace, typename MatrixType>
void PSEUDO(const MatrixType& G)
{
typedef typename MatrixType::index_type IndexType;
typedef cusp::csr_matrix<IndexType,IndexType,MemorySpace> BFSType;
typedef cusp::array1d<IndexType,MemorySpace> Array;
BFSType G_bfs(G);
std::cout << " pseudo-peripheral vertex : " << cusp::graph::pseudo_peripheral_vertex(G_bfs) << std::endl;
}
int main(int argc, char*argv[])
{
srand(time(NULL));
typedef int IndexType;
typedef float ValueType;
typedef cusp::device_memory MemorySpace;
cusp::csr_matrix<IndexType, ValueType, MemorySpace> A;
size_t size = 1024;
if (argc == 1)
{
// no input file was specified, generate an example
std::cout << "Generated matrix (poisson5pt) ";
cusp::gallery::poisson5pt(A, size, size);
}
else if (argc == 2)
{
// an input file was specified, read it from disk
cusp::io::read_matrix_market_file(A, argv[1]);
std::cout << "Read matrix (" << argv[1] << ") ";
}
std::cout << "with shape (" << A.num_rows << "," << A.num_cols << ") and "
<< A.num_entries << " entries" << "\n\n";
std::cout << " Device ";
PSEUDO<cusp::device_memory>(A);
std::cout << " Host ";
PSEUDO<cusp::host_memory>(A);
return EXIT_SUCCESS;
}
| {
"pile_set_name": "Github"
} |
# Table of Contents
* [Java中Class类及用法](#java中class类及用法)
* [Class类原理](#class类原理)
* [如何获得一个Class类对象](#如何获得一个class类对象)
* [使用Class类的对象来生成目标类的实例](#使用class类的对象来生成目标类的实例)
* [Object类](#object类)
* [类构造器public Object();](#类构造器public-object)
* [registerNatives()方法;](#registernatives方法)
* [Clone()方法实现浅拷贝](#clone方法实现浅拷贝)
* [getClass()方法](#getclass方法)
* [equals()方法](#equals方法)
* [hashCode()方法;](#hashcode方法)
* [toString()方法](#tostring方法)
* [wait() notify() notifAll()](#wait-notify-notifall)
* [finalize()方法](#finalize方法)
* [CLass类和Object类的关系](#class类和object类的关系)
* [参考文章](#参考文章)
* [微信公众号](#微信公众号)
* [Java技术江湖](#java技术江湖)
* [个人公众号:黄小斜](#个人公众号:黄小斜)
本系列文章将整理到我在GitHub上的《Java面试指南》仓库,更多精彩内容请到我的仓库里查看
> https://github.com/h2pl/Java-Tutorial
喜欢的话麻烦点下Star哈
文章首发于我的个人博客:
> www.how2playlife.com
本文是微信公众号【Java技术江湖】的《夯实Java基础系列博文》其中一篇,本文部分内容来源于网络,为了把本文主题讲得清晰透彻,也整合了很多我认为不错的技术博客内容,引用其中了一些比较好的博客文章,如有侵权,请联系作者。
该系列博文会告诉你如何从入门到进阶,一步步地学习Java基础知识,并上手进行实战,接着了解每个Java知识点背后的实现原理,更完整地了解整个Java技术体系,形成自己的知识框架。为了更好地总结和检验你的学习成果,本系列文章也会提供每个知识点对应的面试题以及参考答案。
如果对本系列文章有什么建议,或者是有什么疑问的话,也可以关注公众号【Java技术江湖】联系作者,欢迎你参与本系列博文的创作和修订。
<!-- more -->
## Java中Class类及用法
Java程序在运行时,Java运行时系统一直对所有的对象进行所谓的运行时类型标识,即所谓的RTTI。
> 这项信息纪录了每个对象所属的类。虚拟机通常使用运行时类型信息选准正确方法去执行,用来保存这些类型信息的类是Class类。Class类封装一个对象和接口运行时的状态,当装载类时,Class类型的对象自动创建。
说白了就是:
> Class类也是类的一种,只是名字和class关键字高度相似。Java是大小写敏感的语言。
> Class类的对象内容是你创建的类的类型信息,比如你创建一个shapes类,那么,Java会生成一个内容是shapes的Class类的对象
> Class类的对象不能像普通类一样,以 new shapes() 的方式创建,它的对象只能由JVM创建,因为这个类没有public构造函数
/*
* Private constructor. Only the Java Virtual Machine creates Class objects.
* This constructor is not used and prevents the default constructor being
* generated.
*/
//私有构造方法,只能由jvm进行实例化
private Class(ClassLoader loader) {
// Initialize final field for classLoader. The initialization value of non-null
// prevents future JIT optimizations from assuming this final field is null.
classLoader = loader;
}
> Class类的作用是运行时提供或获得某个对象的类型信息,和C++中的typeid()函数类似。这些信息也可用于反射。
### Class类原理
看一下Class类的部分源码
//Class类中封装了类型的各种信息。在jvm中就是通过Class类的实例来获取每个Java类的所有信息的。
public class Class类 {
Class aClass = null;
// private EnclosingMethodInfo getEnclosingMethodInfo() {
// Object[] enclosingInfo = getEnclosingMethod0();
// if (enclosingInfo == null)
// return null;
// else {
// return new EnclosingMethodInfo(enclosingInfo);
// }
// }
/**提供原子类操作
* Atomic operations support.
*/
// private static class Atomic {
// // initialize Unsafe machinery here, since we need to call Class.class instance method
// // and have to avoid calling it in the static initializer of the Class class...
// private static final Unsafe unsafe = Unsafe.getUnsafe();
// // offset of Class.reflectionData instance field
// private static final long reflectionDataOffset;
// // offset of Class.annotationType instance field
// private static final long annotationTypeOffset;
// // offset of Class.annotationData instance field
// private static final long annotationDataOffset;
//
// static {
// Field[] fields = Class.class.getDeclaredFields0(false); // bypass caches
// reflectionDataOffset = objectFieldOffset(fields, "reflectionData");
// annotationTypeOffset = objectFieldOffset(fields, "annotationType");
// annotationDataOffset = objectFieldOffset(fields, "annotationData");
// }
//提供反射信息
// reflection data that might get invalidated when JVM TI RedefineClasses() is called
// private static class ReflectionData<T> {
// volatile Field[] declaredFields;
// volatile Field[] publicFields;
// volatile Method[] declaredMethods;
// volatile Method[] publicMethods;
// volatile Constructor<T>[] declaredConstructors;
// volatile Constructor<T>[] publicConstructors;
// // Intermediate results for getFields and getMethods
// volatile Field[] declaredPublicFields;
// volatile Method[] declaredPublicMethods;
// volatile Class<?>[] interfaces;
//
// // Value of classRedefinedCount when we created this ReflectionData instance
// final int redefinedCount;
//
// ReflectionData(int redefinedCount) {
// this.redefinedCount = redefinedCount;
// }
// }
//方法数组
// static class MethodArray {
// // Don't add or remove methods except by add() or remove() calls.
// private Method[] methods;
// private int length;
// private int defaults;
//
// MethodArray() {
// this(20);
// }
//
// MethodArray(int initialSize) {
// if (initialSize < 2)
// throw new IllegalArgumentException("Size should be 2 or more");
//
// methods = new Method[initialSize];
// length = 0;
// defaults = 0;
// }
//注解信息
// annotation data that might get invalidated when JVM TI RedefineClasses() is called
// private static class AnnotationData {
// final Map<Class<? extends Annotation>, Annotation> annotations;
// final Map<Class<? extends Annotation>, Annotation> declaredAnnotations;
//
// // Value of classRedefinedCount when we created this AnnotationData instance
// final int redefinedCount;
//
// AnnotationData(Map<Class<? extends Annotation>, Annotation> annotations,
// Map<Class<? extends Annotation>, Annotation> declaredAnnotations,
// int redefinedCount) {
// this.annotations = annotations;
// this.declaredAnnotations = declaredAnnotations;
// this.redefinedCount = redefinedCount;
// }
// }
}
> 我们都知道所有的java类都是继承了object这个类,在object这个类中有一个方法:getclass().这个方法是用来取得该类已经被实例化了的对象的该类的引用,这个引用指向的是Class类的对象。
>
> 我们自己无法生成一个Class对象(构造函数为private),而 这个Class类的对象是在当各类被调入时,由 Java 虚拟机自动创建 Class 对象,或通过类装载器中的 defineClass 方法生成。
//通过该方法可以动态地将字节码转为一个Class类对象
protected final Class<?> defineClass(String name, byte[] b, int off, int len)
throws ClassFormatError
{
return defineClass(name, b, off, len, null);
}
>
> 我们生成的对象都会有个字段记录该对象所属类在CLass类的对象的所在位置。如下图所示:
[外链图片转存失败(img-ZfMJTzO4-1569074134147)(http://dl.iteye.com/upload/picture/pic/101542/0047a6e9-6608-3c3c-a67c-d8ee95e7fcb8.jpg)]
### 如何获得一个Class类对象
请注意,以下这些方法都是值、指某个类对应的Class对象已经在堆中生成以后,我们通过不同方式获取对这个Class对象的引用。而上面说的DefineClass才是真正将字节码加载到虚拟机的方法,会在堆中生成新的一个Class对象。
第一种办法,Class类的forName函数
>
> public class shapes{}
> Class obj= Class.forName("shapes");
> 第二种办法,使用对象的getClass()函数
> public class shapes{}
> shapes s1=new shapes();
> Class obj=s1.getClass();
> Class obj1=s1.getSuperclass();//这个函数作用是获取shapes类的父类的类型
第三种办法,使用类字面常量
> Class obj=String.class;
> Class obj1=int.class;
> 注意,使用这种办法生成Class类对象时,不会使JVM自动加载该类(如String类)。==而其他办法会使得JVM初始化该类。==
### 使用Class类的对象来生成目标类的实例
>
> 生成不精确的object实例
>
==获取一个Class类的对象后,可以用 newInstance() 函数来生成目标类的一个实例。然而,该函数并不能直接生成目标类的实例,只能生成object类的实例==
> Class obj=Class.forName("shapes");
> Object ShapesInstance=obj.newInstance();
> 使用泛化Class引用生成带类型的目标实例
>
> Class<shapes> obj=shapes.class;
> shapes newShape=obj.newInstance();
> 因为有了类型限制,所以使用泛化Class语法的对象引用不能指向别的类。
Class obj1=int.class;
Class<Integer> obj2=int.class;
obj1=double.class;
//obj2=double.class; 这一行代码是非法的,obj2不能改指向别的类
然而,有个灵活的用法,使得你可以用Class的对象指向基类的任何子类。
Class<? extends Number> obj=int.class;
obj=Number.class;
obj=double.class;
因此,以下语法生成的Class对象可以指向任何类。
Class<?> obj=int.class;
obj=double.class;
obj=shapes.class;
最后一个奇怪的用法是,当你使用这种泛型语法来构建你手头有的一个Class类的对象的基类对象时,必须采用以下的特殊语法
public class shapes{}
class round extends shapes{}
Class<round> rclass=round.class;
Class<? super round> sclass= rclass.getSuperClass();
//Class<shapes> sclass=rclass.getSuperClass();
我们明知道,round的基类就是shapes,但是却不能直接声明 Class < shapes >,必须使用特殊语法
Class < ? super round >
这个记住就可以啦。
## Object类
这部分主要参考http://ihenu.iteye.com/blog/2233249
Object类是Java中其他所有类的祖先,没有Object类Java面向对象无从谈起。作为其他所有类的基类,Object具有哪些属性和行为,是Java语言设计背后的思维体现。
Object类位于java.lang包中,java.lang包包含着Java最基础和核心的类,在编译时会自动导入。Object类没有定义属性,一共有13个方法,13个方法之中并不是所有方法都是子类可访问的,一共有9个方法是所有子类都继承了的。
先大概介绍一下这些方法
1.clone方法
保护方法,实现对象的浅复制,只有实现了Cloneable接口才可以调用该方法,否则抛出CloneNotSupportedException异常。
2.getClass方法
final方法,获得运行时类型。
3.toString方法
该方法用得比较多,一般子类都有覆盖。
4.finalize方法
该方法用于释放资源。因为无法确定该方法什么时候被调用,很少使用。
5.equals方法
该方法是非常重要的一个方法。一般equals和==是不一样的,但是在Object中两者是一样的。子类一般都要重写这个方法。
6.hashCode方法
该方法用于哈希查找,重写了equals方法一般都要重写hashCode方法。这个方法在一些具有哈希功能的Collection中用到。
一般必须满足obj1.equals(obj2)==true。可以推出obj1.hash- Code()==obj2.hashCode(),但是hashCode相等不一定就满足equals。不过为了提高效率,应该尽量使上面两个条件接近等价。
7.wait方法
wait方法就是使当前线程等待该对象的锁,当前线程必须是该对象的拥有者,也就是具有该对象的锁。wait()方法一直等待,直到获得锁或者被中断。wait(long timeout)设定一个超时间隔,如果在规定时间内没有获得锁就返回。
调用该方法后当前线程进入睡眠状态,直到以下事件发生。
(1)其他线程调用了该对象的notify方法。
(2)其他线程调用了该对象的notifyAll方法。
(3)其他线程调用了interrupt中断该线程。
(4)时间间隔到了。
此时该线程就可以被调度了,如果是被中断的话就抛出一个InterruptedException异常。
8.notify方法
该方法唤醒在该对象上等待的某个线程。
9.notifyAll方法
该方法唤醒在该对象上等待的所有线程。
### 类构造器public Object();
> 大部分情况下,Java中通过形如 new A(args..)形式创建一个属于该类型的对象。其中A即是类名,A(args..)即此类定义中相对应的构造函数。通过此种形式创建的对象都是通过类中的构造函数完成。
> 为体现此特性,Java中规定:在类定义过程中,对于未定义构造函数的类,默认会有一个无参数的构造函数,作为所有类的基类,Object类自然要反映出此特性,在源码中,未给出Object类构造函数定义,但实际上,此构造函数是存在的。
>
> 当然,并不是所有的类都是通过此种方式去构建,也自然的,并不是所有的类构造函数都是public。
### registerNatives()方法;
private static native void registerNatives();
> registerNatives函数前面有native关键字修饰,Java中,用native关键字修饰的函数表明该方法的实现并不是在Java中去完成,而是由C/C++去完成,并被编译成了.dll,由Java去调用。
>
> 方法的具体实现体在dll文件中,对于不同平台,其具体实现应该有所不同。用native修饰,即表示操作系统,需要提供此方法,Java本身需要使用。
>
> 具体到registerNatives()方法本身,其主要作用是将C/C++中的方法映射到Java中的native方法,实现方法命名的解耦。
>
> 既然如此,可能有人会问,registerNatives()修饰符为private,且并没有执行,作用何以达到?其实,在Java源码中,此方法的声明后有紧接着一段静态代码块:
private static native void registerNatives();
static {
registerNatives();
}
### Clone()方法实现浅拷贝
protected native Object clone() throwsCloneNotSupportedException;
> 看,clode()方法又是一个被声明为native的方法,因此,我们知道了clone()方法并不是Java的原生方法,具体的实现是有C/C++完成的。clone英文翻译为"克隆",其目的是创建并返回此对象的一个副本。
> 形象点理解,这有一辆科鲁兹,你看着不错,想要个一模一样的。你调用此方法即可像变魔术一样变出一辆一模一样的科鲁兹出来。配置一样,长相一样。但从此刻起,原来的那辆科鲁兹如果进行了新的装饰,与你克隆出来的这辆科鲁兹没有任何关系了。
>
> 你克隆出来的对象变不变完全在于你对克隆出来的科鲁兹有没有进行过什么操作了。Java术语表述为:clone函数返回的是一个引用,指向的是新的clone出来的对象,此对象与原对象分别占用不同的堆空间。
明白了clone的含义后,接下来看看如果调用clone()函数对象进行此克隆操作。
首先看一下下面的这个例子:
package com.corn.objectsummary;
import com.corn.Person;
public class ObjectTest {
public static void main(String[] args) {
Object o1 = new Object();
// The method clone() from the type Object is not visible
Object clone = o1.clone();
}
}
> 例子很简单,在main()方法中,new一个Oject对象后,想直接调用此对象的clone方法克隆一个对象,但是出现错误提示:"The method clone() from the type Object is not visible"
>
> why? 根据提示,第一反应是ObjectTest类中定义的Oject对象无法访问其clone()方法。回到Object类中clone()方法的定义,可以看到其被声明为protected,估计问题就在这上面了,protected修饰的属性或方法表示:在同一个包内或者不同包的子类可以访问。
>
> 显然,Object类与ObjectTest类在不同的包中,但是ObjectTest继承自Object,是Object类的子类,于是,现在却出现子类中通过Object引用不能访问protected方法,原因在于对"不同包中的子类可以访问"没有正确理解。
>
> "不同包中的子类可以访问",是指当两个类不在同一个包中的时候,继承自父类的子类内部且主调(调用者)为子类的引用时才能访问父类用protected修饰的成员(属性/方法)。 在子类内部,主调为父类的引用时并不能访问此protected修饰的成员。!(super关键字除外)
于是,上例改成如下形式,我们发现,可以正常编译:
public class clone方法 {
public static void main(String[] args) {
}
public void test1() {
User user = new User();
// User copy = user.clone();
}
public void test2() {
User user = new User();
// User copy = (User)user.clone();
}
}
是的,因为此时的主调已经是子类的引用了。
> 上述代码在运行过程中会抛出"java.lang.CloneNotSupportedException",表明clone()方法并未正确执行完毕,问题的原因在与Java中的语法规定:
>
> clone()的正确调用是需要实现Cloneable接口,如果没有实现Cloneable接口,并且子类直接调用Object类的clone()方法,则会抛出CloneNotSupportedException异常。
>
> Cloneable接口仅是一个表示接口,接口本身不包含任何方法,用来指示Object.clone()可以合法的被子类引用所调用。
>
> 于是,上述代码改成如下形式,即可正确指定clone()方法以实现克隆。
public class User implements Cloneable{
public int id;
public String name;
public UserInfo userInfo;
public static void main(String[] args) {
User user = new User();
UserInfo userInfo = new UserInfo();
user.userInfo = userInfo;
System.out.println(user);
System.out.println(user.userInfo);
try {
User copy = (User) user.clone();
System.out.println(copy);
System.out.println(copy.userInfo);
} catch (CloneNotSupportedException e) {
e.printStackTrace();
}
}
//拷贝的User实例与原来不一样,是两个对象。
// com.javase.Class和Object.Object方法.用到的类.User@4dc63996
// com.javase.Class和Object.Object方法.用到的类.UserInfo@d716361
//而拷贝后对象的userinfo引用对象是同一个。
//所以这是浅拷贝
// com.javase.Class和Object.Object方法.用到的类.User@6ff3c5b5
// com.javase.Class和Object.Object方法.用到的类.UserInfo@d716361
}
总结:
clone方法实现的是浅拷贝,只拷贝当前对象,并且在堆中分配新的空间,放这个复制的对象。但是对象如果里面有其他类的子对象,那么就不会拷贝到新的对象中。
==深拷贝和浅拷贝的区别==
> 浅拷贝
> 浅拷贝是按位拷贝对象,它会创建一个新对象,这个对象有着原始对象属性值的一份精确拷贝。如果属性是基本类型,拷贝的就是基本类型的值;如果属性是内存地址(引用类型),拷贝的就是内存地址 ,因此如果其中一个对象改变了这个地址,就会影响到另一个对象。
>
> 深拷贝
> 深拷贝会拷贝所有的属性,并拷贝属性指向的动态分配的内存。当对象和它所引用的对象一起拷贝时即发生深拷贝。深拷贝相比于浅拷贝速度较慢并且花销较大。
> 现在为了要在clone对象时进行深拷贝, 那么就要Clonable接口,覆盖并实现clone方法,除了调用父类中的clone方法得到新的对象, 还要将该类中的引用变量也clone出来。如果只是用Object中默认的clone方法,是浅拷贝的。
那么这两种方式有什么相同和不同呢?
> new操作符的本意是分配内存。程序执行到new操作符时, 首先去看new操作符后面的类型,因为知道了类型,才能知道要分配多大的内存空间。
>
> 分配完内存之后,再调用构造函数,填充对象的各个域,这一步叫做对象的初始化,构造方法返回后,一个对象创建完毕,可以把他的引用(地址)发布到外部,在外部就可以使用这个引用操纵这个对象。
>
> 而clone在第一步是和new相似的, 都是分配内存,调用clone方法时,分配的内存和源对象(即调用clone方法的对象)相同,然后再使用原对象中对应的各个域,填充新对象的域,
>
> 填充完成之后,clone方法返回,一个新的相同的对象被创建,同样可以把这个新对象的引用发布到外部。
==也就是说,一个对象在浅拷贝以后,只是把对象复制了一份放在堆空间的另一个地方,但是成员变量如果有引用指向其他对象,这个引用指向的对象和被拷贝的对象中引用指向的对象是一样的。当然,基本数据类型还是会重新拷贝一份的。==
### getClass()方法
4.public final native Class<?> getClass();
> getClass()也是一个native方法,返回的是此Object对象的类对象/运行时类对象Class<?>。效果与Object.class相同。
>
> 首先解释下"类对象"的概念:在Java中,类是是对具有一组相同特征或行为的实例的抽象并进行描述,对象则是此类所描述的特征或行为的具体实例。
>
> 作为概念层次的类,其本身也具有某些共同的特性,如都具有类名称、由类加载器去加载,都具有包,具有父类,属性和方法等。
>
> 于是,Java中有专门定义了一个类,Class,去描述其他类所具有的这些特性,因此,从此角度去看,类本身也都是属于Class类的对象。为与经常意义上的对象相区分,在此称之为"类对象"。
public class getClass方法 {
public static void main(String[] args) {
User user = new User();
//getclass方法是native方法,可以取到堆区唯一的Class<User>对象
Class<?> aClass = user.getClass();
Class bClass = User.class;
try {
Class cClass = Class.forName("com.javase.Class和Object.Object方法.用到的类.User");
} catch (ClassNotFoundException e) {
e.printStackTrace();
}
System.out.println(aClass);
System.out.println(bClass);
// class com.javase.Class和Object.Object方法.用到的类.User
// class com.javase.Class和Object.Object方法.用到的类.User
try {
User a = (User) aClass.newInstance();
} catch (InstantiationException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
}
}
}
此处主要大量涉及到Java中的反射知识
### equals()方法
5.public boolean equals(Object obj);
>
> 与equals在Java中经常被使用,大家也都知道与equals的区别:
>
> ==表示的是变量值完成相同(对于基础类型,地址中存储的是值,引用类型则存储指向实际对象的地址);
>
> equals表示的是对象的内容完全相同,此处的内容多指对象的特征/属性。
实际上,上面说法是不严谨的,更多的只是常见于String类中。首先看一下Object类中关于equals()方法的定义:
public boolean equals(Object obj) {
return (this == obj);
}
> 由此可见,Object原生的equals()方法内部调用的正是==,与==具有相同的含义。既然如此,为什么还要定义此equals()方法?
>
> equals()方法的正确理解应该是:判断两个对象是否相等。那么判断对象相等的标尺又是什么?
>
> 如上,在object类中,此标尺即为==。当然,这个标尺不是固定的,其他类中可以按照实际的需要对此标尺含义进行重定义。如String类中则是依据字符串内容是否相等来重定义了此标尺含义。如此可以增加类的功能型和实际编码的灵活性。当然了,如果自定义的类没有重写equals()方法来重新定义此标尺,那么默认的将是其父类的equals(),直到object基类。
>
> 如下场景的实际业务需求,对于User bean,由实际的业务需求可知当属性uid相同时,表示的是同一个User,即两个User对象相等。则可以重写equals以重定义User对象相等的标尺。
ObjectTest中打印出true,因为User类定义中重写了equals()方法,这很好理解,很可能张三是一个人小名,张三丰才是其大名,判断这两个人是不是同一个人,这时只用判断uid是否相同即可。
> 如上重写equals方法表面上看上去是可以了,实则不然。因为它破坏了Java中的约定:重写equals()方法必须重写hasCode()方法。
### hashCode()方法;
6. public native int hashCode()
hashCode()方法返回一个整形数值,表示该对象的哈希码值。
hashCode()具有如下约定:
> 1).在Java应用程序程序执行期间,对于同一对象多次调用hashCode()方法时,其返回的哈希码是相同的,前提是将对象进行equals比较时所用的标尺信息未做修改。在Java应用程序的一次执行到另外一次执行,同一对象的hashCode()返回的哈希码无须保持一致;
>
> 2).如果两个对象相等(依据:调用equals()方法),那么这两个对象调用hashCode()返回的哈希码也必须相等;
>
> 3).反之,两个对象调用hasCode()返回的哈希码相等,这两个对象不一定相等。
即严格的数学逻辑表示为: 两个对象相等 <=> equals()相等 => hashCode()相等。因此,重写equlas()方法必须重写hashCode()方法,以保证此逻辑严格成立,同时可以推理出:hasCode()不相等 => equals()不相等 <=> 两个对象不相等。
可能有人在此产生疑问:既然比较两个对象是否相等的唯一条件(也是冲要条件)是equals,那么为什么还要弄出一个hashCode(),并且进行如此约定,弄得这么麻烦?
其实,这主要体现在hashCode()方法的作用上,其主要用于增强哈希表的性能。
以集合类中,以Set为例,当新加一个对象时,需要判断现有集合中是否已经存在与此对象相等的对象,如果没有hashCode()方法,需要将Set进行一次遍历,并逐一用equals()方法判断两个对象是否相等,此种算法时间复杂度为o(n)。通过借助于hasCode方法,先计算出即将新加入对象的哈希码,然后根据哈希算法计算出此对象的位置,直接判断此位置上是否已有对象即可。(注:Set的底层用的是Map的原理实现)
> 在此需要纠正一个理解上的误区:对象的hashCode()返回的不是对象所在的物理内存地址。甚至也不一定是对象的逻辑地址,hashCode()相同的两个对象,不一定相等,换言之,不相等的两个对象,hashCode()返回的哈希码可能相同。
>
> 因此,在上述代码中,重写了equals()方法后,需要重写hashCode()方法。
public class equals和hashcode方法 {
@Override
//修改equals时必须同时修改hashcode方法,否则在作为key时会出问题
public boolean equals(Object obj) {
return (this == obj);
}
@Override
//相同的对象必须有相同hashcode,不同对象可能有相同hashcode
public int hashCode() {
return hashCode() >> 2;
}
}
### toString()方法
7.public String toString();
toString()方法返回该对象的字符串表示。先看一下Object中的具体方法体:
public String toString() {
return getClass().getName() + "@" + Integer.toHexString(hashCode());
}
> toString()方法相信大家都经常用到,即使没有显式调用,但当我们使用System.out.println(obj)时,其内部也是通过toString()来实现的。
>
> getClass()返回对象的类对象,getClassName()以String形式返回类对象的名称(含包名)。Integer.toHexString(hashCode())则是以对象的哈希码为实参,以16进制无符号整数形式返回此哈希码的字符串表示形式。
>
> 如上例中的u1的哈希码是638,则对应的16进制为27e,调用toString()方法返回的结果为:com.corn.objectsummary.User@27e。
>
> 因此:toString()是由对象的类型和其哈希码唯一确定,同一类型但不相等的两个对象分别调用toString()方法返回的结果可能相同。
### wait() notify() notifAll()
8/9/10/11/12. wait(...) / notify() / notifyAll()
>
> 一说到wait(...) / notify() | notifyAll()几个方法,首先想到的是线程。确实,这几个方法主要用于java多线程之间的协作。先具体看下这几个方法的主要含义:
>
> wait():调用此方法所在的当前线程等待,直到在其他线程上调用此方法的主调(某一对象)的notify()/notifyAll()方法。
>
> wait(long timeout)/wait(long timeout, int nanos):调用此方法所在的当前线程等待,直到在其他线程上调用此方法的主调(某一对象)的notisfy()/notisfyAll()方法,或超过指定的超时时间量。
>
> notify()/notifyAll():唤醒在此对象监视器上等待的单个线程/所有线程。
>
> wait(...) / notify() | notifyAll()一般情况下都是配套使用。下面来看一个简单的例子:
这是一个生产者消费者的模型,只不过这里只用flag来标识哪个线程需要工作
public class wait和notify {
//volatile保证线程可见性
volatile static int flag = 1;
//object作为锁对象,用于线程使用wait和notify方法
volatile static Object o = new Object();
public static void main(String[] args) {
new Thread(new Runnable() {
@Override
public void run() {
//wait和notify只能在同步代码块内使用
synchronized (o) {
while (true) {
if (flag == 0) {
try {
Thread.sleep(2000);
System.out.println("thread1 wait");
//释放锁,线程挂起进入object的等待队列,后续代码运行
o.wait();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
System.out.println("thread1 run");
System.out.println("notify t2");
flag = 0;
//通知等待队列的一个线程获取锁
o.notify();
}
}
}
}).start();
//解释同上
new Thread(new Runnable() {
@Override
public void run() {
while (true) {
synchronized (o) {
if (flag == 1) {
try {
Thread.sleep(2000);
System.out.println("thread2 wait");
o.wait();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
System.out.println("thread2 run");
System.out.println("notify t1");
flag = 1;
o.notify();
}
}
}
}).start();
}
//输出结果是
// thread1 run
// notify t2
// thread1 wait
// thread2 run
// notify t1
// thread2 wait
// thread1 run
// notify t2
//不断循环
}
> 从上述例子的输出结果中可以得出如下结论:
>
> 1、wait(...)方法调用后当前线程将立即阻塞,且适当其所持有的同步代码块中的锁,直到被唤醒或超时或打断后且重新获取到锁后才能继续执行;
>
> 2、notify()/notifyAll()方法调用后,其所在线程不会立即释放所持有的锁,直到其所在同步代码块中的代码执行完毕,此时释放锁,因此,如果其同步代码块后还有代码,其执行则依赖于JVM的线程调度。
在Java源码中,可以看到wait()具体定义如下:
public final void wait() throws InterruptedException {
wait(0);
}
> 且wait(long timeout, int nanos)方法定义内部实质上也是通过调用wait(long timeout)完成。而wait(long timeout)是一个native方法。因此,wait(...)方法本质上都是native方式实现。
notify()/notifyAll()方法也都是native方法。
Java中线程具有较多的知识点,是一块比较大且重要的知识点。后期会有博文专门针对Java多线程作出详细总结。此处不再细述。
### finalize()方法
13. protected void finalize();
finalize方法主要与Java垃圾回收机制有关。首先我们看一下finalized方法在Object中的具体定义:
protected void finalize() throws Throwable { }
> 我们发现Object类中finalize方法被定义成一个空方法,为什么要如此定义呢?finalize方法的调用时机是怎么样的呢?
>
> 首先,Object中定义finalize方法表明Java中每一个对象都将具有finalize这种行为,其具体调用时机在:JVM准备对此对形象所占用的内存空间进行垃圾回收前,将被调用。由此可以看出,此方法并不是由我们主动去调用的(虽然可以主动去调用,此时与其他自定义方法无异)。
## CLass类和Object类的关系
> Object类和Class类没有直接的关系。
>
> Object类是一切java类的父类,对于普通的java类,即便不声明,也是默认继承了Object类。典型的,可以使用Object类中的toString()方法。
>
> Class类是用于java反射机制的,一切java类,都有一个对应的Class对象,他是一个final类。Class 类的实例表示,正在运行的 Java 应用程序中的类和接口。
转一个知乎很有趣的问题
https://www.zhihu.com/question/30301819
Java的对象模型中:
1 所有的类都是Class类的实例,Object是类,那么Object也是Class类的一个实例。
2 所有的类都最终继承自Object类,Class是类,那么Class也继承自Object。
3 这就像是先有鸡还是先有蛋的问题,请问实际中JVM是怎么处理的?
> 这个问题中,第1个假设是错的:java.lang.Object是一个Java类,但并不是java.lang.Class的一个实例。后者只是一个用于描述Java类与接口的、用于支持反射操作的类型。这点上Java跟其它一些更纯粹的面向对象语言(例如Python和Ruby)不同。
>
> 而第2个假设是对的:java.lang.Class是java.lang.Object的派生类,前者继承自后者。虽然第1个假设不对,但“鸡蛋问题”仍然存在:在一个已经启动完毕、可以使用的Java对象系统里,必须要有一个java.lang.Class实例对应java.lang.Object这个类;而java.lang.Class是java.lang.Object的派生类,按“一般思维”前者应该要在后者完成初始化之后才可以初始化…
>
> 事实是:这些相互依赖的核心类型完全可以在“混沌”中一口气都初始化好,然后对象系统的状态才叫做完成了“bootstrap”,后面就可以按照Java对象系统的一般规则去运行。JVM、JavaScript、Python、Ruby等的运行时都有这样的bootstrap过程。
>
> 在“混沌”(boostrap过程)里,JVM可以为对象系统中最重要的一些核心类型先分配好内存空间,让它们进入[已分配空间]但[尚未完全初始化]状态。此时这些对象虽然已经分配了空间,但因为状态还不完整所以尚不可使用。
>
> 然后,通过这些分配好的空间把这些核心类型之间的引用关系串好。到此为止所有动作都由JVM完成,尚未执行任何Java字节码。然后这些核心类型就进入了[完全初始化]状态,对象系统就可以开始自我运行下去,也就是可以开始执行Java字节码来进一步完成Java系统的初始化了。
## 参考文章
https://www.cnblogs.com/congsg2016/p/5317362.html
https://www.jb51.net/article/125936.htm
https://blog.csdn.net/dufufd/article/details/80537638
https://blog.csdn.net/farsight1/article/details/80664104
https://blog.csdn.net/xiaomingdetianxia/article/details/77429180
## 微信公众号
### Java技术江湖
如果大家想要实时关注我更新的文章以及分享的干货的话,可以关注我的公众号【Java技术江湖】一位阿里 Java 工程师的技术小站,作者黄小斜,专注 Java 相关技术:SSM、SpringBoot、MySQL、分布式、中间件、集群、Linux、网络、多线程,偶尔讲点Docker、ELK,同时也分享技术干货和学习经验,致力于Java全栈开发!
**Java工程师必备学习资源:** 一些Java工程师常用学习资源,关注公众号后,后台回复关键字 **“Java”** 即可免费无套路获取。

### 个人公众号:黄小斜
作者是 985 硕士,蚂蚁金服 JAVA 工程师,专注于 JAVA 后端技术栈:SpringBoot、MySQL、分布式、中间件、微服务,同时也懂点投资理财,偶尔讲点算法和计算机理论基础,坚持学习和写作,相信终身学习的力量!
**程序员3T技术学习资源:** 一些程序员学习技术的资源大礼包,关注公众号后,后台回复关键字 **“资料”** 即可免费无套路获取。

| {
"pile_set_name": "Github"
} |
{
"_from": "ordered-read-streams@^1.0.0",
"_id": "[email protected]",
"_inBundle": false,
"_integrity": "sha1-d8DLN8QVJdZBZtmQ/61+xqDhNj4=",
"_location": "/node-red-contrib-johnny-five/ordered-read-streams",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "ordered-read-streams@^1.0.0",
"name": "ordered-read-streams",
"escapedName": "ordered-read-streams",
"rawSpec": "^1.0.0",
"saveSpec": null,
"fetchSpec": "^1.0.0"
},
"_requiredBy": [
"/node-red-contrib-johnny-five/glob-stream"
],
"_resolved": "https://registry.npmjs.org/ordered-read-streams/-/ordered-read-streams-1.0.1.tgz",
"_shasum": "77c0cb37c41525d64166d990ffad7ec6a0e1363e",
"_spec": "ordered-read-streams@^1.0.0",
"_where": "/home/redmatic/redmatic-prebuild/node_modules/node-red-contrib-johnny-five/node_modules/glob-stream",
"author": {
"name": "Artem Medeusheyev",
"email": "[email protected]"
},
"bugs": {
"url": "https://github.com/armed/ordered-read-streams/issues"
},
"bundleDependencies": false,
"dependencies": {
"readable-stream": "^2.0.1"
},
"deprecated": false,
"description": "Combines array of streams into one read stream in strict order",
"devDependencies": {
"expect": "^1.20.2",
"jscs": "^1.13.1",
"jshint": "^2.8.0",
"mississippi": "^1.3.0",
"mocha": "^2.2.5",
"pre-commit": "^1.0.10",
"through2": "^2.0.0"
},
"files": [
"index.js"
],
"homepage": "https://github.com/armed/ordered-read-streams#readme",
"license": "MIT",
"name": "ordered-read-streams",
"repository": {
"type": "git",
"url": "git+https://github.com/armed/ordered-read-streams.git"
},
"scripts": {
"test": "jscs *.js test/*js && jshint *.js test/*.js && mocha"
},
"version": "1.0.1"
}
| {
"pile_set_name": "Github"
} |
$ go mod init example.com/myproject
go: creating new go.mod: module example.com/myproject
$ go mod edit -replace=github.com/tv42/becky="$TESTDIR/../.."
$ cp -- "$TESTDIR/cat-xyzzy.go" .
$ echo Hello, world >greeting.txt
$ go run github.com/tv42/becky -var=xyzzy greeting.txt
$ go build -o cat-xyzzy
$ ./cat-xyzzy
Hello, world
| {
"pile_set_name": "Github"
} |
<!-- This Source Code Form is subject to the terms of the Mozilla Public
- License, v. 2.0. If a copy of the MPL was not distributed with this
- file, You can obtain one at http://mozilla.org/MPL/2.0/. -->
<!-- LOCALIZATION NOTE the term "Service Workers" should not be translated. -->
<!ENTITY aboutServiceWorkers.title "關於 Service Workers">
<!-- LOCALIZATION NOTE the term "Service Workers" should not be translated. -->
<!ENTITY aboutServiceWorkers.maintitle "已註冊的 Service Workers">
<!-- LOCALIZATION NOTE the term "Service Workers" should not be translated. -->
<!ENTITY aboutServiceWorkers.warning_not_enabled "未啟用 Service Workers。">
<!-- LOCALIZATION NOTE the term "Service Workers" should not be translated. -->
<!ENTITY aboutServiceWorkers.warning_no_serviceworkers "未註冊 Service Workers。">
| {
"pile_set_name": "Github"
} |
using UnityEngine;
using UnityEngine.ProBuilder;
namespace ProBuilder.Examples
{
/// <summary>
/// Move a sphere around the surface of a ProBuilder mesh, changing the
/// vertex color of the nearest face.
///
/// Scene setup: Create a Unity Sphere primitive in a new scene, then attach
/// this script to the sphere. Press 'Play'
/// </summary>
public class HighlightNearestFace : MonoBehaviour
{
// The distance covered by the plane.
public float travel = 50f;
// The speed at which the sphere will move.
public float speed = .2f;
// ProBuilder mesh component
private ProBuilderMesh target;
// The nearest face to this sphere.
private Face nearest = null;
void Start()
{
// Generate a 50x50 plane with 25 subdivisions, facing up, with no smoothing applied.
target = ShapeGenerator.GeneratePlane(PivotLocation.Center, travel, travel, 25, 25, Axis.Up);
target.transform.position = new Vector3(travel * .5f, 0f, travel * .5f);
// Rebuild the mesh (apply ProBuilderMesh data to UnityEngine.Mesh)
target.ToMesh();
// Rebuild UVs, Colors, Collisions, Normals, and Tangents
target.Refresh();
// Orient the camera in a good position
Camera cam = Camera.main;
cam.transform.position = new Vector3(25f, 40f, 0f);
cam.transform.localRotation = Quaternion.Euler(new Vector3(65f, 0f, 0f));
}
void Update()
{
float time = Time.time * speed;
Vector3 position = new Vector3(
Mathf.PerlinNoise(time, time) * travel,
2,
Mathf.PerlinNoise(time + 1f, time + 1f) * travel
);
transform.position = position;
if (target == null)
{
Debug.LogWarning("Missing the ProBuilder Mesh target!");
return;
}
// instead of testing distance by converting each face's center to world space,
// convert the world space of this object to the pb-Object local transform.
Vector3 pbRelativePosition = target.transform.InverseTransformPoint(transform.position);
// reset the last colored face to white
if (nearest != null)
target.SetFaceColor(nearest, Color.white);
// iterate each face in the ProBuilderMesh looking for the one nearest
// to this object.
int faceCount = target.faces.Count;
float smallestDistance = Mathf.Infinity;
nearest = target.faces[0];
for (int i = 0; i < faceCount; i++)
{
float distance = Vector3.Distance(pbRelativePosition, FaceCenter(target, target.faces[i]));
if (distance < smallestDistance)
{
smallestDistance = distance;
nearest = target.faces[i];
}
}
// Set a single face's vertex colors. If you're updating more than one face, consider using
// the ProBuilderMesh.SetColors(Color[] colors); function instead.
target.SetFaceColor(nearest, Color.blue);
// Apply the stored vertex color array to the Unity mesh.
target.Refresh(RefreshMask.Colors);
}
/**
* Returns the average of each vertex position in a face.
* In local space.
*/
private Vector3 FaceCenter(ProBuilderMesh pb, Face face)
{
var vertices = pb.positions;
Vector3 average = Vector3.zero;
// face holds triangle data. distinctIndices is a
// cached collection of the distinct indices that
// make up the triangles. Ex:
// tris = {0, 1, 2, 2, 3, 0}
// distinct indices = {0, 1, 2, 3}
foreach (int index in face.distinctIndexes)
{
average.x += vertices[index].x;
average.y += vertices[index].y;
average.z += vertices[index].z;
}
float len = (float) face.distinctIndexes.Count;
average.x /= len;
average.y /= len;
average.z /= len;
return average;
}
}
}
| {
"pile_set_name": "Github"
} |
/*
* Distributed under the Boost Software License, Version 1.0.
* (See accompanying file LICENSE_1_0.txt or copy at
* http://www.boost.org/LICENSE_1_0.txt)
*
* Copyright (c) 2009 Helge Bahmann
* Copyright (c) 2013 Tim Blechmann
* Copyright (c) 2014 Andrey Semashev
*/
/*!
* \file atomic/detail/ops_gcc_ppc.hpp
*
* This header contains implementation of the \c operations template.
*/
#ifndef BOOST_ATOMIC_DETAIL_OPS_GCC_PPC_HPP_INCLUDED_
#define BOOST_ATOMIC_DETAIL_OPS_GCC_PPC_HPP_INCLUDED_
#include <cstddef>
#include <boost/memory_order.hpp>
#include <boost/atomic/detail/config.hpp>
#include <boost/atomic/detail/storage_type.hpp>
#include <boost/atomic/detail/operations_fwd.hpp>
#include <boost/atomic/detail/ops_gcc_ppc_common.hpp>
#include <boost/atomic/capabilities.hpp>
#ifdef BOOST_HAS_PRAGMA_ONCE
#pragma once
#endif
namespace boost {
namespace atomics {
namespace detail {
// The implementation below uses information from this document:
// http://www.rdrop.com/users/paulmck/scalability/paper/N2745r.2010.02.19a.html
/*
Refer to: Motorola: "Programming Environments Manual for 32-Bit
Implementations of the PowerPC Architecture", Appendix E:
"Synchronization Programming Examples" for an explanation of what is
going on here (can be found on the web at various places by the
name "MPCFPE32B.pdf", Google is your friend...)
Most of the atomic operations map to instructions in a relatively
straight-forward fashion, but "load"s may at first glance appear
a bit strange as they map to:
lwz %rX, addr
cmpw %rX, %rX
bne- 1f
1:
That is, the CPU is forced to perform a branch that "formally" depends
on the value retrieved from memory. This scheme has an overhead of
about 1-2 clock cycles per load, but it allows to map "acquire" to
the "isync" instruction instead of "sync" uniformly and for all type
of atomic operations. Since "isync" has a cost of about 15 clock
cycles, while "sync" hast a cost of about 50 clock cycles, the small
penalty to atomic loads more than compensates for this.
Byte- and halfword-sized atomic values are implemented in two ways.
When 8 and 16-bit instructions are available (in Power8 and later),
they are used. Otherwise operations are realized by encoding the
value to be represented into a word, performing sign/zero extension
as appropriate. This means that after add/sub operations the value
needs fixing up to accurately preserve the wrap-around semantic of
the smaller type. (Nothing special needs to be done for the bit-wise
and the "exchange type" operators as the compiler already sees to
it that values carried in registers are extended appropriately and
everything falls into place naturally).
The register constraint "b" instructs gcc to use any register
except r0; this is sometimes required because the encoding for
r0 is used to signify "constant zero" in a number of instructions,
making r0 unusable in this place. For simplicity this constraint
is used everywhere since I am to lazy to look this up on a
per-instruction basis, and ppc has enough registers for this not
to pose a problem.
*/
template< bool Signed >
struct operations< 4u, Signed > :
public gcc_ppc_operations_base
{
typedef typename make_storage_type< 4u >::type storage_type;
typedef typename make_storage_type< 4u >::aligned aligned_storage_type;
static BOOST_CONSTEXPR_OR_CONST std::size_t storage_size = 4u;
static BOOST_CONSTEXPR_OR_CONST bool is_signed = Signed;
static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
fence_before(order);
__asm__ __volatile__
(
"stw %1, %0\n\t"
: "+m" (storage)
: "r" (v)
);
}
static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
{
storage_type v;
if (order == memory_order_seq_cst)
__asm__ __volatile__ ("sync" ::: "memory");
if ((static_cast< unsigned int >(order) & (static_cast< unsigned int >(memory_order_consume) | static_cast< unsigned int >(memory_order_acquire))) != 0u)
{
__asm__ __volatile__
(
"lwz %0, %1\n\t"
"cmpw %0, %0\n\t"
"bne- 1f\n\t"
"1:\n\t"
"isync\n\t"
: "=&r" (v)
: "m" (storage)
: "cr0", "memory"
);
}
else
{
__asm__ __volatile__
(
"lwz %0, %1\n\t"
: "=&r" (v)
: "m" (storage)
);
}
return v;
}
static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
storage_type original;
fence_before(order);
__asm__ __volatile__
(
"1:\n\t"
"lwarx %0,%y1\n\t"
"stwcx. %2,%y1\n\t"
"bne- 1b\n\t"
: "=&b" (original), "+Z" (storage)
: "b" (v)
: "cr0"
);
fence_after(order);
return original;
}
static BOOST_FORCEINLINE bool compare_exchange_weak(
storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
{
int success;
fence_before(success_order);
__asm__ __volatile__
(
"li %1, 0\n\t"
"lwarx %0,%y2\n\t"
"cmpw %0, %3\n\t"
"bne- 1f\n\t"
"stwcx. %4,%y2\n\t"
"bne- 1f\n\t"
"li %1, 1\n\t"
"1:\n\t"
: "=&b" (expected), "=&b" (success), "+Z" (storage)
: "b" (expected), "b" (desired)
: "cr0"
);
if (success)
fence_after(success_order);
else
fence_after(failure_order);
return !!success;
}
static BOOST_FORCEINLINE bool compare_exchange_strong(
storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
{
int success;
fence_before(success_order);
__asm__ __volatile__
(
"li %1, 0\n\t"
"0: lwarx %0,%y2\n\t"
"cmpw %0, %3\n\t"
"bne- 1f\n\t"
"stwcx. %4,%y2\n\t"
"bne- 0b\n\t"
"li %1, 1\n\t"
"1:\n\t"
: "=&b" (expected), "=&b" (success), "+Z" (storage)
: "b" (expected), "b" (desired)
: "cr0"
);
if (success)
fence_after(success_order);
else
fence_after(failure_order);
return !!success;
}
static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
storage_type original, result;
fence_before(order);
__asm__ __volatile__
(
"1:\n\t"
"lwarx %0,%y2\n\t"
"add %1,%0,%3\n\t"
"stwcx. %1,%y2\n\t"
"bne- 1b\n\t"
: "=&b" (original), "=&b" (result), "+Z" (storage)
: "b" (v)
: BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
);
fence_after(order);
return original;
}
static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
storage_type original, result;
fence_before(order);
__asm__ __volatile__
(
"1:\n\t"
"lwarx %0,%y2\n\t"
"sub %1,%0,%3\n\t"
"stwcx. %1,%y2\n\t"
"bne- 1b\n\t"
: "=&b" (original), "=&b" (result), "+Z" (storage)
: "b" (v)
: BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
);
fence_after(order);
return original;
}
static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
storage_type original, result;
fence_before(order);
__asm__ __volatile__
(
"1:\n\t"
"lwarx %0,%y2\n\t"
"and %1,%0,%3\n\t"
"stwcx. %1,%y2\n\t"
"bne- 1b\n\t"
: "=&b" (original), "=&b" (result), "+Z" (storage)
: "b" (v)
: BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
);
fence_after(order);
return original;
}
static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
storage_type original, result;
fence_before(order);
__asm__ __volatile__
(
"1:\n\t"
"lwarx %0,%y2\n\t"
"or %1,%0,%3\n\t"
"stwcx. %1,%y2\n\t"
"bne- 1b\n\t"
: "=&b" (original), "=&b" (result), "+Z" (storage)
: "b" (v)
: BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
);
fence_after(order);
return original;
}
static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
storage_type original, result;
fence_before(order);
__asm__ __volatile__
(
"1:\n\t"
"lwarx %0,%y2\n\t"
"xor %1,%0,%3\n\t"
"stwcx. %1,%y2\n\t"
"bne- 1b\n\t"
: "=&b" (original), "=&b" (result), "+Z" (storage)
: "b" (v)
: BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
);
fence_after(order);
return original;
}
static BOOST_FORCEINLINE bool test_and_set(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
{
return !!exchange(storage, (storage_type)1, order);
}
static BOOST_FORCEINLINE void clear(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
{
store(storage, 0, order);
}
};
#if defined(BOOST_ATOMIC_DETAIL_PPC_HAS_LBARX_STBCX)
template< bool Signed >
struct operations< 1u, Signed > :
public gcc_ppc_operations_base
{
typedef typename make_storage_type< 1u >::type storage_type;
typedef typename make_storage_type< 1u >::aligned aligned_storage_type;
static BOOST_CONSTEXPR_OR_CONST std::size_t storage_size = 1u;
static BOOST_CONSTEXPR_OR_CONST bool is_signed = Signed;
static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
fence_before(order);
__asm__ __volatile__
(
"stb %1, %0\n\t"
: "+m" (storage)
: "r" (v)
);
}
static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
{
storage_type v;
if (order == memory_order_seq_cst)
__asm__ __volatile__ ("sync" ::: "memory");
if ((static_cast< unsigned int >(order) & (static_cast< unsigned int >(memory_order_consume) | static_cast< unsigned int >(memory_order_acquire))) != 0u)
{
__asm__ __volatile__
(
"lbz %0, %1\n\t"
"cmpw %0, %0\n\t"
"bne- 1f\n\t"
"1:\n\t"
"isync\n\t"
: "=&r" (v)
: "m" (storage)
: "cr0", "memory"
);
}
else
{
__asm__ __volatile__
(
"lbz %0, %1\n\t"
: "=&r" (v)
: "m" (storage)
);
}
return v;
}
static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
storage_type original;
fence_before(order);
__asm__ __volatile__
(
"1:\n\t"
"lbarx %0,%y1\n\t"
"stbcx. %2,%y1\n\t"
"bne- 1b\n\t"
: "=&b" (original), "+Z" (storage)
: "b" (v)
: "cr0"
);
fence_after(order);
return original;
}
static BOOST_FORCEINLINE bool compare_exchange_weak(
storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
{
int success;
fence_before(success_order);
__asm__ __volatile__
(
"li %1, 0\n\t"
"lbarx %0,%y2\n\t"
"cmpw %0, %3\n\t"
"bne- 1f\n\t"
"stbcx. %4,%y2\n\t"
"bne- 1f\n\t"
"li %1, 1\n\t"
"1:\n\t"
: "=&b" (expected), "=&b" (success), "+Z" (storage)
: "b" (expected), "b" (desired)
: "cr0"
);
if (success)
fence_after(success_order);
else
fence_after(failure_order);
return !!success;
}
static BOOST_FORCEINLINE bool compare_exchange_strong(
storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
{
int success;
fence_before(success_order);
__asm__ __volatile__
(
"li %1, 0\n\t"
"0: lbarx %0,%y2\n\t"
"cmpw %0, %3\n\t"
"bne- 1f\n\t"
"stbcx. %4,%y2\n\t"
"bne- 0b\n\t"
"li %1, 1\n\t"
"1:\n\t"
: "=&b" (expected), "=&b" (success), "+Z" (storage)
: "b" (expected), "b" (desired)
: "cr0"
);
if (success)
fence_after(success_order);
else
fence_after(failure_order);
return !!success;
}
static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
storage_type original, result;
fence_before(order);
__asm__ __volatile__
(
"1:\n\t"
"lbarx %0,%y2\n\t"
"add %1,%0,%3\n\t"
"stbcx. %1,%y2\n\t"
"bne- 1b\n\t"
: "=&b" (original), "=&b" (result), "+Z" (storage)
: "b" (v)
: BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
);
fence_after(order);
return original;
}
static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
storage_type original, result;
fence_before(order);
__asm__ __volatile__
(
"1:\n\t"
"lbarx %0,%y2\n\t"
"sub %1,%0,%3\n\t"
"stbcx. %1,%y2\n\t"
"bne- 1b\n\t"
: "=&b" (original), "=&b" (result), "+Z" (storage)
: "b" (v)
: BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
);
fence_after(order);
return original;
}
static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
storage_type original, result;
fence_before(order);
__asm__ __volatile__
(
"1:\n\t"
"lbarx %0,%y2\n\t"
"and %1,%0,%3\n\t"
"stbcx. %1,%y2\n\t"
"bne- 1b\n\t"
: "=&b" (original), "=&b" (result), "+Z" (storage)
: "b" (v)
: BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
);
fence_after(order);
return original;
}
static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
storage_type original, result;
fence_before(order);
__asm__ __volatile__
(
"1:\n\t"
"lbarx %0,%y2\n\t"
"or %1,%0,%3\n\t"
"stbcx. %1,%y2\n\t"
"bne- 1b\n\t"
: "=&b" (original), "=&b" (result), "+Z" (storage)
: "b" (v)
: BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
);
fence_after(order);
return original;
}
static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
storage_type original, result;
fence_before(order);
__asm__ __volatile__
(
"1:\n\t"
"lbarx %0,%y2\n\t"
"xor %1,%0,%3\n\t"
"stbcx. %1,%y2\n\t"
"bne- 1b\n\t"
: "=&b" (original), "=&b" (result), "+Z" (storage)
: "b" (v)
: BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
);
fence_after(order);
return original;
}
static BOOST_FORCEINLINE bool test_and_set(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
{
return !!exchange(storage, (storage_type)1, order);
}
static BOOST_FORCEINLINE void clear(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
{
store(storage, 0, order);
}
};
#else // defined(BOOST_ATOMIC_DETAIL_PPC_HAS_LBARX_STBCX)
template< >
struct operations< 1u, false > :
public operations< 4u, false >
{
typedef operations< 4u, false > base_type;
typedef base_type::storage_type storage_type;
static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
storage_type original, result;
fence_before(order);
__asm__ __volatile__
(
"1:\n\t"
"lwarx %0,%y2\n\t"
"add %1,%0,%3\n\t"
"rlwinm %1, %1, 0, 0xff\n\t"
"stwcx. %1,%y2\n\t"
"bne- 1b\n\t"
: "=&b" (original), "=&b" (result), "+Z" (storage)
: "b" (v)
: BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
);
fence_after(order);
return original;
}
static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
storage_type original, result;
fence_before(order);
__asm__ __volatile__
(
"1:\n\t"
"lwarx %0,%y2\n\t"
"sub %1,%0,%3\n\t"
"rlwinm %1, %1, 0, 0xff\n\t"
"stwcx. %1,%y2\n\t"
"bne- 1b\n\t"
: "=&b" (original), "=&b" (result), "+Z" (storage)
: "b" (v)
: BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
);
fence_after(order);
return original;
}
};
template< >
struct operations< 1u, true > :
public operations< 4u, true >
{
typedef operations< 4u, true > base_type;
typedef base_type::storage_type storage_type;
static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
storage_type original, result;
fence_before(order);
__asm__ __volatile__
(
"1:\n\t"
"lwarx %0,%y2\n\t"
"add %1,%0,%3\n\t"
"extsb %1, %1\n\t"
"stwcx. %1,%y2\n\t"
"bne- 1b\n\t"
: "=&b" (original), "=&b" (result), "+Z" (storage)
: "b" (v)
: BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
);
fence_after(order);
return original;
}
static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
storage_type original, result;
fence_before(order);
__asm__ __volatile__
(
"1:\n\t"
"lwarx %0,%y2\n\t"
"sub %1,%0,%3\n\t"
"extsb %1, %1\n\t"
"stwcx. %1,%y2\n\t"
"bne- 1b\n\t"
: "=&b" (original), "=&b" (result), "+Z" (storage)
: "b" (v)
: BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
);
fence_after(order);
return original;
}
};
#endif // defined(BOOST_ATOMIC_DETAIL_PPC_HAS_LBARX_STBCX)
#if defined(BOOST_ATOMIC_DETAIL_PPC_HAS_LHARX_STHCX)
template< bool Signed >
struct operations< 2u, Signed > :
public gcc_ppc_operations_base
{
typedef typename make_storage_type< 2u >::type storage_type;
typedef typename make_storage_type< 2u >::aligned aligned_storage_type;
static BOOST_CONSTEXPR_OR_CONST std::size_t storage_size = 2u;
static BOOST_CONSTEXPR_OR_CONST bool is_signed = Signed;
static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
fence_before(order);
__asm__ __volatile__
(
"sth %1, %0\n\t"
: "+m" (storage)
: "r" (v)
);
}
static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
{
storage_type v;
if (order == memory_order_seq_cst)
__asm__ __volatile__ ("sync" ::: "memory");
if ((static_cast< unsigned int >(order) & (static_cast< unsigned int >(memory_order_consume) | static_cast< unsigned int >(memory_order_acquire))) != 0u)
{
__asm__ __volatile__
(
"lhz %0, %1\n\t"
"cmpw %0, %0\n\t"
"bne- 1f\n\t"
"1:\n\t"
"isync\n\t"
: "=&r" (v)
: "m" (storage)
: "cr0", "memory"
);
}
else
{
__asm__ __volatile__
(
"lhz %0, %1\n\t"
: "=&r" (v)
: "m" (storage)
);
}
return v;
}
static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
storage_type original;
fence_before(order);
__asm__ __volatile__
(
"1:\n\t"
"lharx %0,%y1\n\t"
"sthcx. %2,%y1\n\t"
"bne- 1b\n\t"
: "=&b" (original), "+Z" (storage)
: "b" (v)
: "cr0"
);
fence_after(order);
return original;
}
static BOOST_FORCEINLINE bool compare_exchange_weak(
storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
{
int success;
fence_before(success_order);
__asm__ __volatile__
(
"li %1, 0\n\t"
"lharx %0,%y2\n\t"
"cmpw %0, %3\n\t"
"bne- 1f\n\t"
"sthcx. %4,%y2\n\t"
"bne- 1f\n\t"
"li %1, 1\n\t"
"1:\n\t"
: "=&b" (expected), "=&b" (success), "+Z" (storage)
: "b" (expected), "b" (desired)
: "cr0"
);
if (success)
fence_after(success_order);
else
fence_after(failure_order);
return !!success;
}
static BOOST_FORCEINLINE bool compare_exchange_strong(
storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
{
int success;
fence_before(success_order);
__asm__ __volatile__
(
"li %1, 0\n\t"
"0: lharx %0,%y2\n\t"
"cmpw %0, %3\n\t"
"bne- 1f\n\t"
"sthcx. %4,%y2\n\t"
"bne- 0b\n\t"
"li %1, 1\n\t"
"1:\n\t"
: "=&b" (expected), "=&b" (success), "+Z" (storage)
: "b" (expected), "b" (desired)
: "cr0"
);
if (success)
fence_after(success_order);
else
fence_after(failure_order);
return !!success;
}
static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
storage_type original, result;
fence_before(order);
__asm__ __volatile__
(
"1:\n\t"
"lharx %0,%y2\n\t"
"add %1,%0,%3\n\t"
"sthcx. %1,%y2\n\t"
"bne- 1b\n\t"
: "=&b" (original), "=&b" (result), "+Z" (storage)
: "b" (v)
: BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
);
fence_after(order);
return original;
}
static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
storage_type original, result;
fence_before(order);
__asm__ __volatile__
(
"1:\n\t"
"lharx %0,%y2\n\t"
"sub %1,%0,%3\n\t"
"sthcx. %1,%y2\n\t"
"bne- 1b\n\t"
: "=&b" (original), "=&b" (result), "+Z" (storage)
: "b" (v)
: BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
);
fence_after(order);
return original;
}
static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
storage_type original, result;
fence_before(order);
__asm__ __volatile__
(
"1:\n\t"
"lharx %0,%y2\n\t"
"and %1,%0,%3\n\t"
"sthcx. %1,%y2\n\t"
"bne- 1b\n\t"
: "=&b" (original), "=&b" (result), "+Z" (storage)
: "b" (v)
: BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
);
fence_after(order);
return original;
}
static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
storage_type original, result;
fence_before(order);
__asm__ __volatile__
(
"1:\n\t"
"lharx %0,%y2\n\t"
"or %1,%0,%3\n\t"
"sthcx. %1,%y2\n\t"
"bne- 1b\n\t"
: "=&b" (original), "=&b" (result), "+Z" (storage)
: "b" (v)
: BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
);
fence_after(order);
return original;
}
static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
storage_type original, result;
fence_before(order);
__asm__ __volatile__
(
"1:\n\t"
"lharx %0,%y2\n\t"
"xor %1,%0,%3\n\t"
"sthcx. %1,%y2\n\t"
"bne- 1b\n\t"
: "=&b" (original), "=&b" (result), "+Z" (storage)
: "b" (v)
: BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
);
fence_after(order);
return original;
}
static BOOST_FORCEINLINE bool test_and_set(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
{
return !!exchange(storage, (storage_type)1, order);
}
static BOOST_FORCEINLINE void clear(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
{
store(storage, 0, order);
}
};
#else // defined(BOOST_ATOMIC_DETAIL_PPC_HAS_LHARX_STHCX)
template< >
struct operations< 2u, false > :
public operations< 4u, false >
{
typedef operations< 4u, false > base_type;
typedef base_type::storage_type storage_type;
static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
storage_type original, result;
fence_before(order);
__asm__ __volatile__
(
"1:\n\t"
"lwarx %0,%y2\n\t"
"add %1,%0,%3\n\t"
"rlwinm %1, %1, 0, 0xffff\n\t"
"stwcx. %1,%y2\n\t"
"bne- 1b\n\t"
: "=&b" (original), "=&b" (result), "+Z" (storage)
: "b" (v)
: BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
);
fence_after(order);
return original;
}
static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
storage_type original, result;
fence_before(order);
__asm__ __volatile__
(
"1:\n\t"
"lwarx %0,%y2\n\t"
"sub %1,%0,%3\n\t"
"rlwinm %1, %1, 0, 0xffff\n\t"
"stwcx. %1,%y2\n\t"
"bne- 1b\n\t"
: "=&b" (original), "=&b" (result), "+Z" (storage)
: "b" (v)
: BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
);
fence_after(order);
return original;
}
};
template< >
struct operations< 2u, true > :
public operations< 4u, true >
{
typedef operations< 4u, true > base_type;
typedef base_type::storage_type storage_type;
static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
storage_type original, result;
fence_before(order);
__asm__ __volatile__
(
"1:\n\t"
"lwarx %0,%y2\n\t"
"add %1,%0,%3\n\t"
"extsh %1, %1\n\t"
"stwcx. %1,%y2\n\t"
"bne- 1b\n\t"
: "=&b" (original), "=&b" (result), "+Z" (storage)
: "b" (v)
: BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
);
fence_after(order);
return original;
}
static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
storage_type original, result;
fence_before(order);
__asm__ __volatile__
(
"1:\n\t"
"lwarx %0,%y2\n\t"
"sub %1,%0,%3\n\t"
"extsh %1, %1\n\t"
"stwcx. %1,%y2\n\t"
"bne- 1b\n\t"
: "=&b" (original), "=&b" (result), "+Z" (storage)
: "b" (v)
: BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
);
fence_after(order);
return original;
}
};
#endif // defined(BOOST_ATOMIC_DETAIL_PPC_HAS_LHARX_STHCX)
#if defined(BOOST_ATOMIC_DETAIL_PPC_HAS_LDARX_STDCX)
template< bool Signed >
struct operations< 8u, Signed > :
public gcc_ppc_operations_base
{
typedef typename make_storage_type< 8u >::type storage_type;
typedef typename make_storage_type< 8u >::aligned aligned_storage_type;
static BOOST_CONSTEXPR_OR_CONST std::size_t storage_size = 8u;
static BOOST_CONSTEXPR_OR_CONST bool is_signed = Signed;
static BOOST_FORCEINLINE void store(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
fence_before(order);
__asm__ __volatile__
(
"std %1, %0\n\t"
: "+m" (storage)
: "r" (v)
);
}
static BOOST_FORCEINLINE storage_type load(storage_type const volatile& storage, memory_order order) BOOST_NOEXCEPT
{
storage_type v;
if (order == memory_order_seq_cst)
__asm__ __volatile__ ("sync" ::: "memory");
if ((static_cast< unsigned int >(order) & (static_cast< unsigned int >(memory_order_consume) | static_cast< unsigned int >(memory_order_acquire))) != 0u)
{
__asm__ __volatile__
(
"ld %0, %1\n\t"
"cmpd %0, %0\n\t"
"bne- 1f\n\t"
"1:\n\t"
"isync\n\t"
: "=&b" (v)
: "m" (storage)
: "cr0", "memory"
);
}
else
{
__asm__ __volatile__
(
"ld %0, %1\n\t"
: "=&b" (v)
: "m" (storage)
);
}
return v;
}
static BOOST_FORCEINLINE storage_type exchange(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
storage_type original;
fence_before(order);
__asm__ __volatile__
(
"1:\n\t"
"ldarx %0,%y1\n\t"
"stdcx. %2,%y1\n\t"
"bne- 1b\n\t"
: "=&b" (original), "+Z" (storage)
: "b" (v)
: "cr0"
);
fence_after(order);
return original;
}
static BOOST_FORCEINLINE bool compare_exchange_weak(
storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
{
int success;
fence_before(success_order);
__asm__ __volatile__
(
"li %1, 0\n\t"
"ldarx %0,%y2\n\t"
"cmpd %0, %3\n\t"
"bne- 1f\n\t"
"stdcx. %4,%y2\n\t"
"bne- 1f\n\t"
"li %1, 1\n\t"
"1:"
: "=&b" (expected), "=&b" (success), "+Z" (storage)
: "b" (expected), "b" (desired)
: "cr0"
);
if (success)
fence_after(success_order);
else
fence_after(failure_order);
return !!success;
}
static BOOST_FORCEINLINE bool compare_exchange_strong(
storage_type volatile& storage, storage_type& expected, storage_type desired, memory_order success_order, memory_order failure_order) BOOST_NOEXCEPT
{
int success;
fence_before(success_order);
__asm__ __volatile__
(
"li %1, 0\n\t"
"0: ldarx %0,%y2\n\t"
"cmpd %0, %3\n\t"
"bne- 1f\n\t"
"stdcx. %4,%y2\n\t"
"bne- 0b\n\t"
"li %1, 1\n\t"
"1:\n\t"
: "=&b" (expected), "=&b" (success), "+Z" (storage)
: "b" (expected), "b" (desired)
: "cr0"
);
if (success)
fence_after(success_order);
else
fence_after(failure_order);
return !!success;
}
static BOOST_FORCEINLINE storage_type fetch_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
storage_type original, result;
fence_before(order);
__asm__ __volatile__
(
"1:\n\t"
"ldarx %0,%y2\n\t"
"add %1,%0,%3\n\t"
"stdcx. %1,%y2\n\t"
"bne- 1b\n\t"
: "=&b" (original), "=&b" (result), "+Z" (storage)
: "b" (v)
: BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
);
fence_after(order);
return original;
}
static BOOST_FORCEINLINE storage_type fetch_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
storage_type original, result;
fence_before(order);
__asm__ __volatile__
(
"1:\n\t"
"ldarx %0,%y2\n\t"
"sub %1,%0,%3\n\t"
"stdcx. %1,%y2\n\t"
"bne- 1b\n\t"
: "=&b" (original), "=&b" (result), "+Z" (storage)
: "b" (v)
: BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
);
fence_after(order);
return original;
}
static BOOST_FORCEINLINE storage_type fetch_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
storage_type original, result;
fence_before(order);
__asm__ __volatile__
(
"1:\n\t"
"ldarx %0,%y2\n\t"
"and %1,%0,%3\n\t"
"stdcx. %1,%y2\n\t"
"bne- 1b\n\t"
: "=&b" (original), "=&b" (result), "+Z" (storage)
: "b" (v)
: BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
);
fence_after(order);
return original;
}
static BOOST_FORCEINLINE storage_type fetch_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
storage_type original, result;
fence_before(order);
__asm__ __volatile__
(
"1:\n\t"
"ldarx %0,%y2\n\t"
"or %1,%0,%3\n\t"
"stdcx. %1,%y2\n\t"
"bne- 1b\n\t"
: "=&b" (original), "=&b" (result), "+Z" (storage)
: "b" (v)
: BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
);
fence_after(order);
return original;
}
static BOOST_FORCEINLINE storage_type fetch_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
storage_type original, result;
fence_before(order);
__asm__ __volatile__
(
"1:\n\t"
"ldarx %0,%y2\n\t"
"xor %1,%0,%3\n\t"
"stdcx. %1,%y2\n\t"
"bne- 1b\n\t"
: "=&b" (original), "=&b" (result), "+Z" (storage)
: "b" (v)
: BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
);
fence_after(order);
return original;
}
static BOOST_FORCEINLINE bool test_and_set(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
{
return !!exchange(storage, (storage_type)1, order);
}
static BOOST_FORCEINLINE void clear(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
{
store(storage, 0, order);
}
};
#endif // defined(BOOST_ATOMIC_DETAIL_PPC_HAS_LDARX_STDCX)
BOOST_FORCEINLINE void thread_fence(memory_order order) BOOST_NOEXCEPT
{
if (order != memory_order_relaxed)
{
#if defined(__powerpc64__) || defined(__PPC64__)
if (order != memory_order_seq_cst)
__asm__ __volatile__ ("lwsync" ::: "memory");
else
__asm__ __volatile__ ("sync" ::: "memory");
#else
__asm__ __volatile__ ("sync" ::: "memory");
#endif
}
}
BOOST_FORCEINLINE void signal_fence(memory_order order) BOOST_NOEXCEPT
{
if (order != memory_order_relaxed)
#if defined(__ibmxl__) || defined(__IBMCPP__)
__fence();
#else
__asm__ __volatile__ ("" ::: "memory");
#endif
}
} // namespace detail
} // namespace atomics
} // namespace boost
#endif // BOOST_ATOMIC_DETAIL_OPS_GCC_PPC_HPP_INCLUDED_
| {
"pile_set_name": "Github"
} |
:root {
--cx-spatial-base: 0.5rem;
--cx-spatial-sm: calc(var(--cx-spatial-base) / 2);
--cx-spatial-md: calc(2 * var(--cx-spatial-base));
--cx-spatial-lg: calc(4 * var(--cx-spatial-base));
}
| {
"pile_set_name": "Github"
} |
fileFormatVersion: 2
guid: 84280ccd57418b244a0619103d43416c
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:
| {
"pile_set_name": "Github"
} |
package bolt
import (
"context"
"github.com/spaceuptech/helpers"
"github.com/spaceuptech/space-cloud/gateway/model"
)
// Aggregate performs a bolt db pipeline aggregation
func (b *Bolt) Aggregate(ctx context.Context, col string, req *model.AggregateRequest) (interface{}, error) {
return nil, helpers.Logger.LogError(helpers.GetRequestID(ctx), "aggregate operation not supported for selected database", nil, nil)
}
| {
"pile_set_name": "Github"
} |
#
# Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
# See https://llvm.org/LICENSE.txt for license information.
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
#
########## Make rule for test oop148 ########
fcheck.o check_mod.mod: $(SRC)/check_mod.f90
-$(FC) -c $(FFLAGS) $(SRC)/check_mod.f90 -o fcheck.o
oop148.o: $(SRC)/oop148.f90 check_mod.mod
@echo ------------------------------------ building test $@
-$(FC) -c $(FFLAGS) $(LDFLAGS) $(SRC)/oop148.f90 -o oop148.o
oop148: oop148.o fcheck.o
-$(FC) $(FFLAGS) $(LDFLAGS) oop148.o fcheck.o $(LIBS) -o oop148
oop148.run: oop148
@echo ------------------------------------ executing test oop148
oop148
-$(RM) my_container.mod
### TA Expected Targets ###
build: $(TEST)
.PHONY: run
run: $(TEST).run
verify: ;
### End of Expected Targets ###
| {
"pile_set_name": "Github"
} |
rule m2377_23390012d7831932
{
meta:
copyright="Copyright (c) 2014-2018 Support Intelligence Inc, All Rights Reserved."
engine="saphire/1.3.1 divinorum/0.998 icewater/0.4"
viz_url="http://icewater.io/en/cluster/query?h64=m2377.23390012d7831932"
cluster="m2377.23390012d7831932"
cluster_size="4"
filetype = "HTML document"
tlp = "amber"
version = "icewater snowflake"
author = "Rick Wesson (@wessorh) [email protected]"
date = "20171121"
license = "RIL-1.0 [Rick's Internet License] "
family="ramnit html script"
md5_hashes="['1015290765b8544bcde0e51b59be80d1','7625a7f21d93f4a2eb74e72a1c001267','e52336cf188a96babbeeeaec37abe2ef']"
strings:
$hex_string = { 696e672e46696c6553797374656d4f626a65637422290d0a44726f7050617468203d2046534f2e4765745370656369616c466f6c646572283229202620225c22 }
condition:
filesize > 65536 and filesize < 262144
and $hex_string
}
| {
"pile_set_name": "Github"
} |
using System.Text.Json.Serialization;
namespace Essensoft.AspNetCore.Payment.Alipay.Domain
{
/// <summary>
/// AlipayPayCodecAcodeDecodeUseModel Data Structure.
/// </summary>
public class AlipayPayCodecAcodeDecodeUseModel : AlipayObject
{
/// <summary>
/// 具体业务场景代码
/// </summary>
[JsonPropertyName("acode_scene")]
public string AcodeScene { get; set; }
/// <summary>
/// 场景码。用于区分大场景。非必填参数,不传默认是非支付场景
/// </summary>
[JsonPropertyName("biz_scene")]
public string BizScene { get; set; }
/// <summary>
/// 扫码设备ID
/// </summary>
[JsonPropertyName("device_id")]
public string DeviceId { get; set; }
/// <summary>
/// 二维码byte数组的base64的形式
/// </summary>
[JsonPropertyName("dynamic_id")]
public string DynamicId { get; set; }
/// <summary>
/// 机构类型,浙里办传“00000008”
/// </summary>
[JsonPropertyName("institution_type")]
public string InstitutionType { get; set; }
/// <summary>
/// 地理位置信息
/// </summary>
[JsonPropertyName("lbs_info")]
public string LbsInfo { get; set; }
/// <summary>
/// 扫码的时间戳(Long型)
/// </summary>
[JsonPropertyName("scan_time")]
public long ScanTime { get; set; }
/// <summary>
/// 外部业务号,用于标识这笔解码请求,对同一个码的重复解码请求,scene_no必须与上一次保持一致,不同请求的scene_no必须不一样
/// </summary>
[JsonPropertyName("scene_no")]
public string SceneNo { get; set; }
}
}
| {
"pile_set_name": "Github"
} |
/*
* Copyright 1994-1998 Sun Microsystems, Inc. All Rights Reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Sun designates this
* particular file as subject to the "Classpath" exception as provided
* by Sun in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
* CA 95054 USA or visit www.sun.com if you need additional information or
* have any questions.
*/
/*
* Solaris-dependent I/O
*/
#ifndef _JAVASOFT_SOLARIS_IO_MD_H_
#define _JAVASOFT_SOLARIS_IO_MD_H_
#include <sys/param.h>
#include <dirent.h>
#include <unistd.h>
#include <sys/types.h>
#include <sys/socket.h>
#include <netinet/in.h>
#include <netdb.h>
#define LINE_SEPARATOR "\n"
/* file system macros moved to sysmacros_md.h */
#endif /* !_JAVASOFT_SOLARIS_IO_MD_H_ */
| {
"pile_set_name": "Github"
} |
/*
* NetBSD header file, copied from
* http://gitorious.org/freebsd/freebsd/blobs/HEAD/sys/dev/mfi/mfireg.h
*/
/*-
* Copyright (c) 2006 IronPort Systems
* Copyright (c) 2007 LSI Corp.
* Copyright (c) 2007 Rajesh Prabhakaran.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*/
#ifndef MFI_REG_H
#define MFI_REG_H
/*
* MegaRAID SAS MFI firmware definitions
*/
/*
* Start with the register set. All registers are 32 bits wide.
* The usual Intel IOP style setup.
*/
#define MFI_IMSG0 0x10 /* Inbound message 0 */
#define MFI_IMSG1 0x14 /* Inbound message 1 */
#define MFI_OMSG0 0x18 /* Outbound message 0 */
#define MFI_OMSG1 0x1c /* Outbound message 1 */
#define MFI_IDB 0x20 /* Inbound doorbell */
#define MFI_ISTS 0x24 /* Inbound interrupt status */
#define MFI_IMSK 0x28 /* Inbound interrupt mask */
#define MFI_ODB 0x2c /* Outbound doorbell */
#define MFI_OSTS 0x30 /* Outbound interrupt status */
#define MFI_OMSK 0x34 /* Outbound interrupt mask */
#define MFI_IQP 0x40 /* Inbound queue port */
#define MFI_OQP 0x44 /* Outbound queue port */
/*
* 1078 specific related register
*/
#define MFI_ODR0 0x9c /* outbound doorbell register0 */
#define MFI_ODCR0 0xa0 /* outbound doorbell clear register0 */
#define MFI_OSP0 0xb0 /* outbound scratch pad0 */
#define MFI_IQPL 0xc0 /* Inbound queue port (low bytes) */
#define MFI_IQPH 0xc4 /* Inbound queue port (high bytes) */
#define MFI_DIAG 0xf8 /* Host diag */
#define MFI_SEQ 0xfc /* Sequencer offset */
#define MFI_1078_EIM 0x80000004 /* 1078 enable intrrupt mask */
#define MFI_RMI 0x2 /* reply message interrupt */
#define MFI_1078_RM 0x80000000 /* reply 1078 message interrupt */
#define MFI_ODC 0x4 /* outbound doorbell change interrupt */
/*
* gen2 specific changes
*/
#define MFI_GEN2_EIM 0x00000005 /* gen2 enable interrupt mask */
#define MFI_GEN2_RM 0x00000001 /* reply gen2 message interrupt */
/*
* skinny specific changes
*/
#define MFI_SKINNY_IDB 0x00 /* Inbound doorbell is at 0x00 for skinny */
#define MFI_SKINNY_RM 0x00000001 /* reply skinny message interrupt */
/* Bits for MFI_OSTS */
#define MFI_OSTS_INTR_VALID 0x00000002
/*
* Firmware state values. Found in OMSG0 during initialization.
*/
#define MFI_FWSTATE_MASK 0xf0000000
#define MFI_FWSTATE_UNDEFINED 0x00000000
#define MFI_FWSTATE_BB_INIT 0x10000000
#define MFI_FWSTATE_FW_INIT 0x40000000
#define MFI_FWSTATE_WAIT_HANDSHAKE 0x60000000
#define MFI_FWSTATE_FW_INIT_2 0x70000000
#define MFI_FWSTATE_DEVICE_SCAN 0x80000000
#define MFI_FWSTATE_BOOT_MSG_PENDING 0x90000000
#define MFI_FWSTATE_FLUSH_CACHE 0xa0000000
#define MFI_FWSTATE_READY 0xb0000000
#define MFI_FWSTATE_OPERATIONAL 0xc0000000
#define MFI_FWSTATE_FAULT 0xf0000000
#define MFI_FWSTATE_MAXSGL_MASK 0x00ff0000
#define MFI_FWSTATE_MAXCMD_MASK 0x0000ffff
#define MFI_FWSTATE_MSIX_SUPPORTED 0x04000000
#define MFI_FWSTATE_HOSTMEMREQD_MASK 0x08000000
/*
* Control bits to drive the card to ready state. These go into the IDB
* register.
*/
#define MFI_FWINIT_ABORT 0x00000001 /* Abort all pending commands */
#define MFI_FWINIT_READY 0x00000002 /* Move from operational to ready */
#define MFI_FWINIT_MFIMODE 0x00000004 /* unknown */
#define MFI_FWINIT_CLEAR_HANDSHAKE 0x00000008 /* Respond to WAIT_HANDSHAKE */
#define MFI_FWINIT_HOTPLUG 0x00000010
#define MFI_FWINIT_STOP_ADP 0x00000020 /* Move to operational, stop */
#define MFI_FWINIT_ADP_RESET 0x00000040 /* Reset ADP */
/* MFI Commands */
typedef enum {
MFI_CMD_INIT = 0x00,
MFI_CMD_LD_READ,
MFI_CMD_LD_WRITE,
MFI_CMD_LD_SCSI_IO,
MFI_CMD_PD_SCSI_IO,
MFI_CMD_DCMD,
MFI_CMD_ABORT,
MFI_CMD_SMP,
MFI_CMD_STP
} mfi_cmd_t;
/* Direct commands */
typedef enum {
MFI_DCMD_CTRL_MFI_HOST_MEM_ALLOC = 0x0100e100,
MFI_DCMD_CTRL_GET_INFO = 0x01010000,
MFI_DCMD_CTRL_GET_PROPERTIES = 0x01020100,
MFI_DCMD_CTRL_SET_PROPERTIES = 0x01020200,
MFI_DCMD_CTRL_ALARM = 0x01030000,
MFI_DCMD_CTRL_ALARM_GET = 0x01030100,
MFI_DCMD_CTRL_ALARM_ENABLE = 0x01030200,
MFI_DCMD_CTRL_ALARM_DISABLE = 0x01030300,
MFI_DCMD_CTRL_ALARM_SILENCE = 0x01030400,
MFI_DCMD_CTRL_ALARM_TEST = 0x01030500,
MFI_DCMD_CTRL_EVENT_GETINFO = 0x01040100,
MFI_DCMD_CTRL_EVENT_CLEAR = 0x01040200,
MFI_DCMD_CTRL_EVENT_GET = 0x01040300,
MFI_DCMD_CTRL_EVENT_COUNT = 0x01040400,
MFI_DCMD_CTRL_EVENT_WAIT = 0x01040500,
MFI_DCMD_CTRL_SHUTDOWN = 0x01050000,
MFI_DCMD_HIBERNATE_STANDBY = 0x01060000,
MFI_DCMD_CTRL_GET_TIME = 0x01080101,
MFI_DCMD_CTRL_SET_TIME = 0x01080102,
MFI_DCMD_CTRL_BIOS_DATA_GET = 0x010c0100,
MFI_DCMD_CTRL_BIOS_DATA_SET = 0x010c0200,
MFI_DCMD_CTRL_FACTORY_DEFAULTS = 0x010d0000,
MFI_DCMD_CTRL_MFC_DEFAULTS_GET = 0x010e0201,
MFI_DCMD_CTRL_MFC_DEFAULTS_SET = 0x010e0202,
MFI_DCMD_CTRL_CACHE_FLUSH = 0x01101000,
MFI_DCMD_PD_GET_LIST = 0x02010000,
MFI_DCMD_PD_LIST_QUERY = 0x02010100,
MFI_DCMD_PD_GET_INFO = 0x02020000,
MFI_DCMD_PD_STATE_SET = 0x02030100,
MFI_DCMD_PD_REBUILD = 0x02040100,
MFI_DCMD_PD_BLINK = 0x02070100,
MFI_DCMD_PD_UNBLINK = 0x02070200,
MFI_DCMD_LD_GET_LIST = 0x03010000,
MFI_DCMD_LD_LIST_QUERY = 0x03010100,
MFI_DCMD_LD_GET_INFO = 0x03020000,
MFI_DCMD_LD_GET_PROP = 0x03030000,
MFI_DCMD_LD_SET_PROP = 0x03040000,
MFI_DCMD_LD_DELETE = 0x03090000,
MFI_DCMD_CFG_READ = 0x04010000,
MFI_DCMD_CFG_ADD = 0x04020000,
MFI_DCMD_CFG_CLEAR = 0x04030000,
MFI_DCMD_CFG_FOREIGN_READ = 0x04060100,
MFI_DCMD_CFG_FOREIGN_IMPORT = 0x04060400,
MFI_DCMD_BBU_STATUS = 0x05010000,
MFI_DCMD_BBU_CAPACITY_INFO = 0x05020000,
MFI_DCMD_BBU_DESIGN_INFO = 0x05030000,
MFI_DCMD_BBU_PROP_GET = 0x05050100,
MFI_DCMD_CLUSTER = 0x08000000,
MFI_DCMD_CLUSTER_RESET_ALL = 0x08010100,
MFI_DCMD_CLUSTER_RESET_LD = 0x08010200
} mfi_dcmd_t;
/* Modifiers for MFI_DCMD_CTRL_FLUSHCACHE */
#define MFI_FLUSHCACHE_CTRL 0x01
#define MFI_FLUSHCACHE_DISK 0x02
/* Modifiers for MFI_DCMD_CTRL_SHUTDOWN */
#define MFI_SHUTDOWN_SPINDOWN 0x01
/*
* MFI Frame flags
*/
typedef enum {
MFI_FRAME_DONT_POST_IN_REPLY_QUEUE = 0x0001,
MFI_FRAME_SGL64 = 0x0002,
MFI_FRAME_SENSE64 = 0x0004,
MFI_FRAME_DIR_WRITE = 0x0008,
MFI_FRAME_DIR_READ = 0x0010,
MFI_FRAME_IEEE_SGL = 0x0020,
} mfi_frame_flags;
/* MFI Status codes */
typedef enum {
MFI_STAT_OK = 0x00,
MFI_STAT_INVALID_CMD,
MFI_STAT_INVALID_DCMD,
MFI_STAT_INVALID_PARAMETER,
MFI_STAT_INVALID_SEQUENCE_NUMBER,
MFI_STAT_ABORT_NOT_POSSIBLE,
MFI_STAT_APP_HOST_CODE_NOT_FOUND,
MFI_STAT_APP_IN_USE,
MFI_STAT_APP_NOT_INITIALIZED,
MFI_STAT_ARRAY_INDEX_INVALID,
MFI_STAT_ARRAY_ROW_NOT_EMPTY,
MFI_STAT_CONFIG_RESOURCE_CONFLICT,
MFI_STAT_DEVICE_NOT_FOUND,
MFI_STAT_DRIVE_TOO_SMALL,
MFI_STAT_FLASH_ALLOC_FAIL,
MFI_STAT_FLASH_BUSY,
MFI_STAT_FLASH_ERROR = 0x10,
MFI_STAT_FLASH_IMAGE_BAD,
MFI_STAT_FLASH_IMAGE_INCOMPLETE,
MFI_STAT_FLASH_NOT_OPEN,
MFI_STAT_FLASH_NOT_STARTED,
MFI_STAT_FLUSH_FAILED,
MFI_STAT_HOST_CODE_NOT_FOUNT,
MFI_STAT_LD_CC_IN_PROGRESS,
MFI_STAT_LD_INIT_IN_PROGRESS,
MFI_STAT_LD_LBA_OUT_OF_RANGE,
MFI_STAT_LD_MAX_CONFIGURED,
MFI_STAT_LD_NOT_OPTIMAL,
MFI_STAT_LD_RBLD_IN_PROGRESS,
MFI_STAT_LD_RECON_IN_PROGRESS,
MFI_STAT_LD_WRONG_RAID_LEVEL,
MFI_STAT_MAX_SPARES_EXCEEDED,
MFI_STAT_MEMORY_NOT_AVAILABLE = 0x20,
MFI_STAT_MFC_HW_ERROR,
MFI_STAT_NO_HW_PRESENT,
MFI_STAT_NOT_FOUND,
MFI_STAT_NOT_IN_ENCL,
MFI_STAT_PD_CLEAR_IN_PROGRESS,
MFI_STAT_PD_TYPE_WRONG,
MFI_STAT_PR_DISABLED,
MFI_STAT_ROW_INDEX_INVALID,
MFI_STAT_SAS_CONFIG_INVALID_ACTION,
MFI_STAT_SAS_CONFIG_INVALID_DATA,
MFI_STAT_SAS_CONFIG_INVALID_PAGE,
MFI_STAT_SAS_CONFIG_INVALID_TYPE,
MFI_STAT_SCSI_DONE_WITH_ERROR,
MFI_STAT_SCSI_IO_FAILED,
MFI_STAT_SCSI_RESERVATION_CONFLICT,
MFI_STAT_SHUTDOWN_FAILED = 0x30,
MFI_STAT_TIME_NOT_SET,
MFI_STAT_WRONG_STATE,
MFI_STAT_LD_OFFLINE,
MFI_STAT_PEER_NOTIFICATION_REJECTED,
MFI_STAT_PEER_NOTIFICATION_FAILED,
MFI_STAT_RESERVATION_IN_PROGRESS,
MFI_STAT_I2C_ERRORS_DETECTED,
MFI_STAT_PCI_ERRORS_DETECTED,
MFI_STAT_DIAG_FAILED,
MFI_STAT_BOOT_MSG_PENDING,
MFI_STAT_FOREIGN_CONFIG_INCOMPLETE,
MFI_STAT_INVALID_SGL,
MFI_STAT_UNSUPPORTED_HW,
MFI_STAT_CC_SCHEDULE_DISABLED,
MFI_STAT_PD_COPYBACK_IN_PROGRESS,
MFI_STAT_MULTIPLE_PDS_IN_ARRAY = 0x40,
MFI_STAT_FW_DOWNLOAD_ERROR,
MFI_STAT_FEATURE_SECURITY_NOT_ENABLED,
MFI_STAT_LOCK_KEY_ALREADY_EXISTS,
MFI_STAT_LOCK_KEY_BACKUP_NOT_ALLOWED,
MFI_STAT_LOCK_KEY_VERIFY_NOT_ALLOWED,
MFI_STAT_LOCK_KEY_VERIFY_FAILED,
MFI_STAT_LOCK_KEY_REKEY_NOT_ALLOWED,
MFI_STAT_LOCK_KEY_INVALID,
MFI_STAT_LOCK_KEY_ESCROW_INVALID,
MFI_STAT_LOCK_KEY_BACKUP_REQUIRED,
MFI_STAT_SECURE_LD_EXISTS,
MFI_STAT_LD_SECURE_NOT_ALLOWED,
MFI_STAT_REPROVISION_NOT_ALLOWED,
MFI_STAT_PD_SECURITY_TYPE_WRONG,
MFI_STAT_LD_ENCRYPTION_TYPE_INVALID,
MFI_STAT_CONFIG_FDE_NON_FDE_MIX_NOT_ALLOWED = 0x50,
MFI_STAT_CONFIG_LD_ENCRYPTION_TYPE_MIX_NOT_ALLOWED,
MFI_STAT_SECRET_KEY_NOT_ALLOWED,
MFI_STAT_PD_HW_ERRORS_DETECTED,
MFI_STAT_LD_CACHE_PINNED,
MFI_STAT_POWER_STATE_SET_IN_PROGRESS,
MFI_STAT_POWER_STATE_SET_BUSY,
MFI_STAT_POWER_STATE_WRONG,
MFI_STAT_PR_NO_AVAILABLE_PD_FOUND,
MFI_STAT_CTRL_RESET_REQUIRED,
MFI_STAT_LOCK_KEY_EKM_NO_BOOT_AGENT,
MFI_STAT_SNAP_NO_SPACE,
MFI_STAT_SNAP_PARTIAL_FAILURE,
MFI_STAT_UPGRADE_KEY_INCOMPATIBLE,
MFI_STAT_PFK_INCOMPATIBLE,
MFI_STAT_PD_MAX_UNCONFIGURED,
MFI_STAT_IO_METRICS_DISABLED = 0x60,
MFI_STAT_AEC_NOT_STOPPED,
MFI_STAT_PI_TYPE_WRONG,
MFI_STAT_LD_PD_PI_INCOMPATIBLE,
MFI_STAT_PI_NOT_ENABLED,
MFI_STAT_LD_BLOCK_SIZE_MISMATCH,
MFI_STAT_INVALID_STATUS = 0xFF
} mfi_status_t;
/* Event classes */
typedef enum {
MFI_EVT_CLASS_DEBUG = -2,
MFI_EVT_CLASS_PROGRESS = -1,
MFI_EVT_CLASS_INFO = 0,
MFI_EVT_CLASS_WARNING = 1,
MFI_EVT_CLASS_CRITICAL = 2,
MFI_EVT_CLASS_FATAL = 3,
MFI_EVT_CLASS_DEAD = 4
} mfi_evt_class_t;
/* Event locales */
typedef enum {
MFI_EVT_LOCALE_LD = 0x0001,
MFI_EVT_LOCALE_PD = 0x0002,
MFI_EVT_LOCALE_ENCL = 0x0004,
MFI_EVT_LOCALE_BBU = 0x0008,
MFI_EVT_LOCALE_SAS = 0x0010,
MFI_EVT_LOCALE_CTRL = 0x0020,
MFI_EVT_LOCALE_CONFIG = 0x0040,
MFI_EVT_LOCALE_CLUSTER = 0x0080,
MFI_EVT_LOCALE_ALL = 0xffff
} mfi_evt_locale_t;
/* Event args */
typedef enum {
MR_EVT_ARGS_NONE = 0x00,
MR_EVT_ARGS_CDB_SENSE,
MR_EVT_ARGS_LD,
MR_EVT_ARGS_LD_COUNT,
MR_EVT_ARGS_LD_LBA,
MR_EVT_ARGS_LD_OWNER,
MR_EVT_ARGS_LD_LBA_PD_LBA,
MR_EVT_ARGS_LD_PROG,
MR_EVT_ARGS_LD_STATE,
MR_EVT_ARGS_LD_STRIP,
MR_EVT_ARGS_PD,
MR_EVT_ARGS_PD_ERR,
MR_EVT_ARGS_PD_LBA,
MR_EVT_ARGS_PD_LBA_LD,
MR_EVT_ARGS_PD_PROG,
MR_EVT_ARGS_PD_STATE,
MR_EVT_ARGS_PCI,
MR_EVT_ARGS_RATE,
MR_EVT_ARGS_STR,
MR_EVT_ARGS_TIME,
MR_EVT_ARGS_ECC,
MR_EVT_ARGS_LD_PROP,
MR_EVT_ARGS_PD_SPARE,
MR_EVT_ARGS_PD_INDEX,
MR_EVT_ARGS_DIAG_PASS,
MR_EVT_ARGS_DIAG_FAIL,
MR_EVT_ARGS_PD_LBA_LBA,
MR_EVT_ARGS_PORT_PHY,
MR_EVT_ARGS_PD_MISSING,
MR_EVT_ARGS_PD_ADDRESS,
MR_EVT_ARGS_BITMAP,
MR_EVT_ARGS_CONNECTOR,
MR_EVT_ARGS_PD_PD,
MR_EVT_ARGS_PD_FRU,
MR_EVT_ARGS_PD_PATHINFO,
MR_EVT_ARGS_PD_POWER_STATE,
MR_EVT_ARGS_GENERIC,
} mfi_evt_args;
/* Event codes */
#define MR_EVT_CFG_CLEARED 0x0004
#define MR_EVT_CTRL_SHUTDOWN 0x002a
#define MR_EVT_LD_STATE_CHANGE 0x0051
#define MR_EVT_PD_INSERTED 0x005b
#define MR_EVT_PD_REMOVED 0x0070
#define MR_EVT_PD_STATE_CHANGED 0x0072
#define MR_EVT_LD_CREATED 0x008a
#define MR_EVT_LD_DELETED 0x008b
#define MR_EVT_FOREIGN_CFG_IMPORTED 0x00db
#define MR_EVT_LD_OFFLINE 0x00fc
#define MR_EVT_CTRL_HOST_BUS_SCAN_REQUESTED 0x0152
typedef enum {
MR_LD_CACHE_WRITE_BACK = 0x01,
MR_LD_CACHE_WRITE_ADAPTIVE = 0x02,
MR_LD_CACHE_READ_AHEAD = 0x04,
MR_LD_CACHE_READ_ADAPTIVE = 0x08,
MR_LD_CACHE_WRITE_CACHE_BAD_BBU = 0x10,
MR_LD_CACHE_ALLOW_WRITE_CACHE = 0x20,
MR_LD_CACHE_ALLOW_READ_CACHE = 0x40
} mfi_ld_cache;
typedef enum {
MR_PD_CACHE_UNCHANGED = 0,
MR_PD_CACHE_ENABLE = 1,
MR_PD_CACHE_DISABLE = 2
} mfi_pd_cache;
typedef enum {
MR_PD_QUERY_TYPE_ALL = 0,
MR_PD_QUERY_TYPE_STATE = 1,
MR_PD_QUERY_TYPE_POWER_STATE = 2,
MR_PD_QUERY_TYPE_MEDIA_TYPE = 3,
MR_PD_QUERY_TYPE_SPEED = 4,
MR_PD_QUERY_TYPE_EXPOSED_TO_HOST = 5, /*query for system drives */
} mfi_pd_query_type;
typedef enum {
MR_LD_QUERY_TYPE_ALL = 0,
MR_LD_QUERY_TYPE_EXPOSED_TO_HOST = 1,
MR_LD_QUERY_TYPE_USED_TGT_IDS = 2,
MR_LD_QUERY_TYPE_CLUSTER_ACCESS = 3,
MR_LD_QUERY_TYPE_CLUSTER_LOCALE = 4,
} mfi_ld_query_type;
/*
* Other propertities and definitions
*/
#define MFI_MAX_PD_CHANNELS 2
#define MFI_MAX_LD_CHANNELS 2
#define MFI_MAX_CHANNELS (MFI_MAX_PD_CHANNELS + MFI_MAX_LD_CHANNELS)
#define MFI_MAX_CHANNEL_DEVS 128
#define MFI_DEFAULT_ID -1
#define MFI_MAX_LUN 8
#define MFI_MAX_LD 64
#define MFI_FRAME_SIZE 64
#define MFI_MBOX_SIZE 12
/* Firmware flashing can take 40s */
#define MFI_POLL_TIMEOUT_SECS 50
/* Allow for speedier math calculations */
#define MFI_SECTOR_LEN 512
/* Scatter Gather elements */
struct mfi_sg32 {
uint32_t addr;
uint32_t len;
} QEMU_PACKED;
struct mfi_sg64 {
uint64_t addr;
uint32_t len;
} QEMU_PACKED;
struct mfi_sg_skinny {
uint64_t addr;
uint32_t len;
uint32_t flag;
} QEMU_PACKED;
union mfi_sgl {
struct mfi_sg32 sg32[1];
struct mfi_sg64 sg64[1];
struct mfi_sg_skinny sg_skinny[1];
} QEMU_PACKED;
/* Message frames. All messages have a common header */
struct mfi_frame_header {
uint8_t frame_cmd;
uint8_t sense_len;
uint8_t cmd_status;
uint8_t scsi_status;
uint8_t target_id;
uint8_t lun_id;
uint8_t cdb_len;
uint8_t sge_count;
uint64_t context;
uint16_t flags;
uint16_t timeout;
uint32_t data_len;
} QEMU_PACKED;
struct mfi_init_frame {
struct mfi_frame_header header;
uint32_t qinfo_new_addr_lo;
uint32_t qinfo_new_addr_hi;
uint32_t qinfo_old_addr_lo;
uint32_t qinfo_old_addr_hi;
uint32_t reserved[6];
};
#define MFI_IO_FRAME_SIZE 40
struct mfi_io_frame {
struct mfi_frame_header header;
uint32_t sense_addr_lo;
uint32_t sense_addr_hi;
uint32_t lba_lo;
uint32_t lba_hi;
union mfi_sgl sgl;
} QEMU_PACKED;
#define MFI_PASS_FRAME_SIZE 48
struct mfi_pass_frame {
struct mfi_frame_header header;
uint32_t sense_addr_lo;
uint32_t sense_addr_hi;
uint8_t cdb[16];
union mfi_sgl sgl;
} QEMU_PACKED;
#define MFI_DCMD_FRAME_SIZE 40
struct mfi_dcmd_frame {
struct mfi_frame_header header;
uint32_t opcode;
uint8_t mbox[MFI_MBOX_SIZE];
union mfi_sgl sgl;
} QEMU_PACKED;
struct mfi_abort_frame {
struct mfi_frame_header header;
uint64_t abort_context;
uint32_t abort_mfi_addr_lo;
uint32_t abort_mfi_addr_hi;
uint32_t reserved1[6];
} QEMU_PACKED;
struct mfi_smp_frame {
struct mfi_frame_header header;
uint64_t sas_addr;
union {
struct mfi_sg32 sg32[2];
struct mfi_sg64 sg64[2];
} sgl;
} QEMU_PACKED;
struct mfi_stp_frame {
struct mfi_frame_header header;
uint16_t fis[10];
uint32_t stp_flags;
union {
struct mfi_sg32 sg32[2];
struct mfi_sg64 sg64[2];
} sgl;
} QEMU_PACKED;
union mfi_frame {
struct mfi_frame_header header;
struct mfi_init_frame init;
struct mfi_io_frame io;
struct mfi_pass_frame pass;
struct mfi_dcmd_frame dcmd;
struct mfi_abort_frame abort;
struct mfi_smp_frame smp;
struct mfi_stp_frame stp;
uint64_t raw[8];
uint8_t bytes[MFI_FRAME_SIZE];
};
#define MFI_SENSE_LEN 128
struct mfi_sense {
uint8_t data[MFI_SENSE_LEN];
};
#define MFI_QUEUE_FLAG_CONTEXT64 0x00000002
/* The queue init structure that is passed with the init message */
struct mfi_init_qinfo {
uint32_t flags;
uint32_t rq_entries;
uint32_t rq_addr_lo;
uint32_t rq_addr_hi;
uint32_t pi_addr_lo;
uint32_t pi_addr_hi;
uint32_t ci_addr_lo;
uint32_t ci_addr_hi;
} QEMU_PACKED;
/* Controller properties */
struct mfi_ctrl_props {
uint16_t seq_num;
uint16_t pred_fail_poll_interval;
uint16_t intr_throttle_cnt;
uint16_t intr_throttle_timeout;
uint8_t rebuild_rate;
uint8_t patrol_read_rate;
uint8_t bgi_rate;
uint8_t cc_rate;
uint8_t recon_rate;
uint8_t cache_flush_interval;
uint8_t spinup_drv_cnt;
uint8_t spinup_delay;
uint8_t cluster_enable;
uint8_t coercion_mode;
uint8_t alarm_enable;
uint8_t disable_auto_rebuild;
uint8_t disable_battery_warn;
uint8_t ecc_bucket_size;
uint16_t ecc_bucket_leak_rate;
uint8_t restore_hotspare_on_insertion;
uint8_t expose_encl_devices;
uint8_t maintainPdFailHistory;
uint8_t disallowHostRequestReordering;
uint8_t abortCCOnError;
uint8_t loadBalanceMode;
uint8_t disableAutoDetectBackplane;
uint8_t snapVDSpace;
uint32_t OnOffProperties;
/* set TRUE to disable copyBack (0=copyback enabled) */
#define MFI_CTRL_PROP_CopyBackDisabled (1 << 0)
#define MFI_CTRL_PROP_SMARTerEnabled (1 << 1)
#define MFI_CTRL_PROP_PRCorrectUnconfiguredAreas (1 << 2)
#define MFI_CTRL_PROP_UseFdeOnly (1 << 3)
#define MFI_CTRL_PROP_DisableNCQ (1 << 4)
#define MFI_CTRL_PROP_SSDSMARTerEnabled (1 << 5)
#define MFI_CTRL_PROP_SSDPatrolReadEnabled (1 << 6)
#define MFI_CTRL_PROP_EnableSpinDownUnconfigured (1 << 7)
#define MFI_CTRL_PROP_AutoEnhancedImport (1 << 8)
#define MFI_CTRL_PROP_EnableSecretKeyControl (1 << 9)
#define MFI_CTRL_PROP_DisableOnlineCtrlReset (1 << 10)
#define MFI_CTRL_PROP_AllowBootWithPinnedCache (1 << 11)
#define MFI_CTRL_PROP_DisableSpinDownHS (1 << 12)
#define MFI_CTRL_PROP_EnableJBOD (1 << 13)
uint8_t autoSnapVDSpace; /* % of source LD to be
* reserved for auto snapshot
* in snapshot repository, for
* metadata and user data
* 1=5%, 2=10%, 3=15% and so on
*/
uint8_t viewSpace; /* snapshot writeable VIEWs
* capacity as a % of source LD
* capacity. 0=READ only
* 1=5%, 2=10%, 3=15% and so on
*/
uint16_t spinDownTime; /* # of idle minutes before device
* is spun down (0=use FW defaults)
*/
uint8_t reserved[24];
} QEMU_PACKED;
/* PCI information about the card. */
struct mfi_info_pci {
uint16_t vendor;
uint16_t device;
uint16_t subvendor;
uint16_t subdevice;
uint8_t reserved[24];
} QEMU_PACKED;
/* Host (front end) interface information */
struct mfi_info_host {
uint8_t type;
#define MFI_INFO_HOST_PCIX 0x01
#define MFI_INFO_HOST_PCIE 0x02
#define MFI_INFO_HOST_ISCSI 0x04
#define MFI_INFO_HOST_SAS3G 0x08
uint8_t reserved[6];
uint8_t port_count;
uint64_t port_addr[8];
} QEMU_PACKED;
/* Device (back end) interface information */
struct mfi_info_device {
uint8_t type;
#define MFI_INFO_DEV_SPI 0x01
#define MFI_INFO_DEV_SAS3G 0x02
#define MFI_INFO_DEV_SATA1 0x04
#define MFI_INFO_DEV_SATA3G 0x08
#define MFI_INFO_DEV_PCIE 0x10
uint8_t reserved[6];
uint8_t port_count;
uint64_t port_addr[8];
} QEMU_PACKED;
/* Firmware component information */
struct mfi_info_component {
char name[8];
char version[32];
char build_date[16];
char build_time[16];
} QEMU_PACKED;
/* Controller default settings */
struct mfi_defaults {
uint64_t sas_addr;
uint8_t phy_polarity;
uint8_t background_rate;
uint8_t stripe_size;
uint8_t flush_time;
uint8_t write_back;
uint8_t read_ahead;
uint8_t cache_when_bbu_bad;
uint8_t cached_io;
uint8_t smart_mode;
uint8_t alarm_disable;
uint8_t coercion;
uint8_t zrc_config;
uint8_t dirty_led_shows_drive_activity;
uint8_t bios_continue_on_error;
uint8_t spindown_mode;
uint8_t allowed_device_types;
uint8_t allow_mix_in_enclosure;
uint8_t allow_mix_in_ld;
uint8_t allow_sata_in_cluster;
uint8_t max_chained_enclosures;
uint8_t disable_ctrl_r;
uint8_t enable_web_bios;
uint8_t phy_polarity_split;
uint8_t direct_pd_mapping;
uint8_t bios_enumerate_lds;
uint8_t restored_hot_spare_on_insertion;
uint8_t expose_enclosure_devices;
uint8_t maintain_pd_fail_history;
uint8_t disable_puncture;
uint8_t zero_based_enumeration;
uint8_t disable_preboot_cli;
uint8_t show_drive_led_on_activity;
uint8_t cluster_disable;
uint8_t sas_disable;
uint8_t auto_detect_backplane;
uint8_t fde_only;
uint8_t delay_during_post;
uint8_t resv[19];
} QEMU_PACKED;
/* Controller default settings */
struct mfi_bios_data {
uint16_t boot_target_id;
uint8_t do_not_int_13;
uint8_t continue_on_error;
uint8_t verbose;
uint8_t geometry;
uint8_t expose_all_drives;
uint8_t reserved[56];
uint8_t check_sum;
} QEMU_PACKED;
/* SAS (?) controller info, returned from MFI_DCMD_CTRL_GETINFO. */
struct mfi_ctrl_info {
struct mfi_info_pci pci;
struct mfi_info_host host;
struct mfi_info_device device;
/* Firmware components that are present and active. */
uint32_t image_check_word;
uint32_t image_component_count;
struct mfi_info_component image_component[8];
/* Firmware components that have been flashed but are inactive */
uint32_t pending_image_component_count;
struct mfi_info_component pending_image_component[8];
uint8_t max_arms;
uint8_t max_spans;
uint8_t max_arrays;
uint8_t max_lds;
char product_name[80];
char serial_number[32];
uint32_t hw_present;
#define MFI_INFO_HW_BBU 0x01
#define MFI_INFO_HW_ALARM 0x02
#define MFI_INFO_HW_NVRAM 0x04
#define MFI_INFO_HW_UART 0x08
#define MFI_INFO_HW_MEM 0x10
#define MFI_INFO_HW_FLASH 0x20
uint32_t current_fw_time;
uint16_t max_cmds;
uint16_t max_sg_elements;
uint32_t max_request_size;
uint16_t lds_present;
uint16_t lds_degraded;
uint16_t lds_offline;
uint16_t pd_present;
uint16_t pd_disks_present;
uint16_t pd_disks_pred_failure;
uint16_t pd_disks_failed;
uint16_t nvram_size;
uint16_t memory_size;
uint16_t flash_size;
uint16_t ram_correctable_errors;
uint16_t ram_uncorrectable_errors;
uint8_t cluster_allowed;
uint8_t cluster_active;
uint16_t max_strips_per_io;
uint32_t raid_levels;
#define MFI_INFO_RAID_0 0x01
#define MFI_INFO_RAID_1 0x02
#define MFI_INFO_RAID_5 0x04
#define MFI_INFO_RAID_1E 0x08
#define MFI_INFO_RAID_6 0x10
uint32_t adapter_ops;
#define MFI_INFO_AOPS_RBLD_RATE 0x0001
#define MFI_INFO_AOPS_CC_RATE 0x0002
#define MFI_INFO_AOPS_BGI_RATE 0x0004
#define MFI_INFO_AOPS_RECON_RATE 0x0008
#define MFI_INFO_AOPS_PATROL_RATE 0x0010
#define MFI_INFO_AOPS_ALARM_CONTROL 0x0020
#define MFI_INFO_AOPS_CLUSTER_SUPPORTED 0x0040
#define MFI_INFO_AOPS_BBU 0x0080
#define MFI_INFO_AOPS_SPANNING_ALLOWED 0x0100
#define MFI_INFO_AOPS_DEDICATED_SPARES 0x0200
#define MFI_INFO_AOPS_REVERTIBLE_SPARES 0x0400
#define MFI_INFO_AOPS_FOREIGN_IMPORT 0x0800
#define MFI_INFO_AOPS_SELF_DIAGNOSTIC 0x1000
#define MFI_INFO_AOPS_MIXED_ARRAY 0x2000
#define MFI_INFO_AOPS_GLOBAL_SPARES 0x4000
uint32_t ld_ops;
#define MFI_INFO_LDOPS_READ_POLICY 0x01
#define MFI_INFO_LDOPS_WRITE_POLICY 0x02
#define MFI_INFO_LDOPS_IO_POLICY 0x04
#define MFI_INFO_LDOPS_ACCESS_POLICY 0x08
#define MFI_INFO_LDOPS_DISK_CACHE_POLICY 0x10
struct {
uint8_t min;
uint8_t max;
uint8_t reserved[2];
} QEMU_PACKED stripe_sz_ops;
uint32_t pd_ops;
#define MFI_INFO_PDOPS_FORCE_ONLINE 0x01
#define MFI_INFO_PDOPS_FORCE_OFFLINE 0x02
#define MFI_INFO_PDOPS_FORCE_REBUILD 0x04
uint32_t pd_mix_support;
#define MFI_INFO_PDMIX_SAS 0x01
#define MFI_INFO_PDMIX_SATA 0x02
#define MFI_INFO_PDMIX_ENCL 0x04
#define MFI_INFO_PDMIX_LD 0x08
#define MFI_INFO_PDMIX_SATA_CLUSTER 0x10
uint8_t ecc_bucket_count;
uint8_t reserved2[11];
struct mfi_ctrl_props properties;
char package_version[0x60];
uint8_t pad[0x800 - 0x6a0];
} QEMU_PACKED;
/* keep track of an event. */
union mfi_evt {
struct {
uint16_t locale;
uint8_t reserved;
int8_t class;
} members;
uint32_t word;
} QEMU_PACKED;
/* event log state. */
struct mfi_evt_log_state {
uint32_t newest_seq_num;
uint32_t oldest_seq_num;
uint32_t clear_seq_num;
uint32_t shutdown_seq_num;
uint32_t boot_seq_num;
} QEMU_PACKED;
struct mfi_progress {
uint16_t progress;
uint16_t elapsed_seconds;
} QEMU_PACKED;
struct mfi_evt_ld {
uint16_t target_id;
uint8_t ld_index;
uint8_t reserved;
} QEMU_PACKED;
struct mfi_evt_pd {
uint16_t device_id;
uint8_t enclosure_index;
uint8_t slot_number;
} QEMU_PACKED;
/* event detail, returned from MFI_DCMD_CTRL_EVENT_WAIT. */
struct mfi_evt_detail {
uint32_t seq;
uint32_t time;
uint32_t code;
union mfi_evt class;
uint8_t arg_type;
uint8_t reserved1[15];
union {
struct {
struct mfi_evt_pd pd;
uint8_t cdb_len;
uint8_t sense_len;
uint8_t reserved[2];
uint8_t cdb[16];
uint8_t sense[64];
} cdb_sense;
struct mfi_evt_ld ld;
struct {
struct mfi_evt_ld ld;
uint64_t count;
} ld_count;
struct {
uint64_t lba;
struct mfi_evt_ld ld;
} ld_lba;
struct {
struct mfi_evt_ld ld;
uint32_t pre_owner;
uint32_t new_owner;
} ld_owner;
struct {
uint64_t ld_lba;
uint64_t pd_lba;
struct mfi_evt_ld ld;
struct mfi_evt_pd pd;
} ld_lba_pd_lba;
struct {
struct mfi_evt_ld ld;
struct mfi_progress prog;
} ld_prog;
struct {
struct mfi_evt_ld ld;
uint32_t prev_state;
uint32_t new_state;
} ld_state;
struct {
uint64_t strip;
struct mfi_evt_ld ld;
} ld_strip;
struct mfi_evt_pd pd;
struct {
struct mfi_evt_pd pd;
uint32_t err;
} pd_err;
struct {
uint64_t lba;
struct mfi_evt_pd pd;
} pd_lba;
struct {
uint64_t lba;
struct mfi_evt_pd pd;
struct mfi_evt_ld ld;
} pd_lba_ld;
struct {
struct mfi_evt_pd pd;
struct mfi_progress prog;
} pd_prog;
struct {
struct mfi_evt_pd ld;
uint32_t prev_state;
uint32_t new_state;
} pd_state;
struct {
uint16_t venderId;
uint16_t deviceId;
uint16_t subVenderId;
uint16_t subDeviceId;
} pci;
uint32_t rate;
char str[96];
struct {
uint32_t rtc;
uint16_t elapsedSeconds;
} time;
struct {
uint32_t ecar;
uint32_t elog;
char str[64];
} ecc;
uint8_t b[96];
uint16_t s[48];
uint32_t w[24];
uint64_t d[12];
} args;
char description[128];
} QEMU_PACKED;
struct mfi_evt_list {
uint32_t count;
uint32_t reserved;
struct mfi_evt_detail event[1];
} QEMU_PACKED;
union mfi_pd_ref {
struct {
uint16_t device_id;
uint16_t seq_num;
} v;
uint32_t ref;
} QEMU_PACKED;
union mfi_pd_ddf_type {
struct {
uint16_t pd_type;
#define MFI_PD_DDF_TYPE_FORCED_PD_GUID (1 << 0)
#define MFI_PD_DDF_TYPE_IN_VD (1 << 1)
#define MFI_PD_DDF_TYPE_IS_GLOBAL_SPARE (1 << 2)
#define MFI_PD_DDF_TYPE_IS_SPARE (1 << 3)
#define MFI_PD_DDF_TYPE_IS_FOREIGN (1 << 4)
#define MFI_PD_DDF_TYPE_INTF_SPI (1 << 12)
#define MFI_PD_DDF_TYPE_INTF_SAS (1 << 13)
#define MFI_PD_DDF_TYPE_INTF_SATA1 (1 << 14)
#define MFI_PD_DDF_TYPE_INTF_SATA3G (1 << 15)
uint16_t reserved;
} ddf;
struct {
uint32_t reserved;
} non_disk;
uint32_t type;
} QEMU_PACKED;
struct mfi_pd_progress {
uint32_t active;
#define PD_PROGRESS_ACTIVE_REBUILD (1 << 0)
#define PD_PROGRESS_ACTIVE_PATROL (1 << 1)
#define PD_PROGRESS_ACTIVE_CLEAR (1 << 2)
struct mfi_progress rbld;
struct mfi_progress patrol;
struct mfi_progress clear;
struct mfi_progress reserved[4];
} QEMU_PACKED;
struct mfi_pd_info {
union mfi_pd_ref ref;
uint8_t inquiry_data[96];
uint8_t vpd_page83[64];
uint8_t not_supported;
uint8_t scsi_dev_type;
uint8_t connected_port_bitmap;
uint8_t device_speed;
uint32_t media_err_count;
uint32_t other_err_count;
uint32_t pred_fail_count;
uint32_t last_pred_fail_event_seq_num;
uint16_t fw_state;
uint8_t disable_for_removal;
uint8_t link_speed;
union mfi_pd_ddf_type state;
struct {
uint8_t count;
uint8_t is_path_broken;
uint8_t reserved[6];
uint64_t sas_addr[4];
} path_info;
uint64_t raw_size;
uint64_t non_coerced_size;
uint64_t coerced_size;
uint16_t encl_device_id;
uint8_t encl_index;
uint8_t slot_number;
struct mfi_pd_progress prog_info;
uint8_t bad_block_table_full;
uint8_t unusable_in_current_config;
uint8_t vpd_page83_ext[64];
uint8_t reserved[512-358];
} QEMU_PACKED;
struct mfi_pd_address {
uint16_t device_id;
uint16_t encl_device_id;
uint8_t encl_index;
uint8_t slot_number;
uint8_t scsi_dev_type;
uint8_t connect_port_bitmap;
uint64_t sas_addr[2];
} QEMU_PACKED;
#define MFI_MAX_SYS_PDS 240
struct mfi_pd_list {
uint32_t size;
uint32_t count;
struct mfi_pd_address addr[MFI_MAX_SYS_PDS];
} QEMU_PACKED;
union mfi_ld_ref {
struct {
uint8_t target_id;
uint8_t lun_id;
uint16_t seq;
} v;
uint32_t ref;
} QEMU_PACKED;
struct mfi_ld_list {
uint32_t ld_count;
uint32_t reserved1;
struct {
union mfi_ld_ref ld;
uint8_t state;
uint8_t reserved2[3];
uint64_t size;
} ld_list[MFI_MAX_LD];
} QEMU_PACKED;
enum mfi_ld_access {
MFI_LD_ACCESS_RW = 0,
MFI_LD_ACCSSS_RO = 2,
MFI_LD_ACCESS_BLOCKED = 3,
};
#define MFI_LD_ACCESS_MASK 3
enum mfi_ld_state {
MFI_LD_STATE_OFFLINE = 0,
MFI_LD_STATE_PARTIALLY_DEGRADED = 1,
MFI_LD_STATE_DEGRADED = 2,
MFI_LD_STATE_OPTIMAL = 3
};
enum mfi_syspd_state {
MFI_PD_STATE_UNCONFIGURED_GOOD = 0x00,
MFI_PD_STATE_UNCONFIGURED_BAD = 0x01,
MFI_PD_STATE_HOT_SPARE = 0x02,
MFI_PD_STATE_OFFLINE = 0x10,
MFI_PD_STATE_FAILED = 0x11,
MFI_PD_STATE_REBUILD = 0x14,
MFI_PD_STATE_ONLINE = 0x18,
MFI_PD_STATE_COPYBACK = 0x20,
MFI_PD_STATE_SYSTEM = 0x40
};
struct mfi_ld_props {
union mfi_ld_ref ld;
char name[16];
uint8_t default_cache_policy;
uint8_t access_policy;
uint8_t disk_cache_policy;
uint8_t current_cache_policy;
uint8_t no_bgi;
uint8_t reserved[7];
} QEMU_PACKED;
struct mfi_ld_params {
uint8_t primary_raid_level;
uint8_t raid_level_qualifier;
uint8_t secondary_raid_level;
uint8_t stripe_size;
uint8_t num_drives;
uint8_t span_depth;
uint8_t state;
uint8_t init_state;
uint8_t is_consistent;
uint8_t reserved[23];
} QEMU_PACKED;
struct mfi_ld_progress {
uint32_t active;
#define MFI_LD_PROGRESS_CC (1<<0)
#define MFI_LD_PROGRESS_BGI (1<<1)
#define MFI_LD_PROGRESS_FGI (1<<2)
#define MFI_LD_PORGRESS_RECON (1<<3)
struct mfi_progress cc;
struct mfi_progress bgi;
struct mfi_progress fgi;
struct mfi_progress recon;
struct mfi_progress reserved[4];
} QEMU_PACKED;
struct mfi_span {
uint64_t start_block;
uint64_t num_blocks;
uint16_t array_ref;
uint8_t reserved[6];
} QEMU_PACKED;
#define MFI_MAX_SPAN_DEPTH 8
struct mfi_ld_config {
struct mfi_ld_props properties;
struct mfi_ld_params params;
struct mfi_span span[MFI_MAX_SPAN_DEPTH];
} QEMU_PACKED;
struct mfi_ld_info {
struct mfi_ld_config ld_config;
uint64_t size;
struct mfi_ld_progress progress;
uint16_t cluster_owner;
uint8_t reconstruct_active;
uint8_t reserved1[1];
uint8_t vpd_page83[64];
uint8_t reserved2[16];
} QEMU_PACKED;
union mfi_spare_type {
uint8_t flags;
#define MFI_SPARE_IS_DEDICATED (1 << 0)
#define MFI_SPARE_IS_REVERTABLE (1 << 1)
#define MFI_SPARE_IS_ENCL_AFFINITY (1 << 2)
uint8_t type;
} QEMU_PACKED;
#define MFI_MAX_ARRAYS 16
struct mfi_spare {
union mfi_pd_ref ref;
union mfi_spare_type spare_type;
uint8_t reserved[2];
uint8_t array_count;
uint16_t array_refd[MFI_MAX_ARRAYS];
} QEMU_PACKED;
#define MFI_MAX_ROW_SIZE 32
struct mfi_array {
uint64_t size;
uint8_t num_drives;
uint8_t reserved;
uint16_t array_ref;
uint8_t pad[20];
struct {
union mfi_pd_ref ref;
uint16_t fw_state; /* enum mfi_syspd_state */
struct {
uint8_t pd;
uint8_t slot;
} encl;
} pd[MFI_MAX_ROW_SIZE];
} QEMU_PACKED;
struct mfi_config_data {
uint32_t size;
uint16_t array_count;
uint16_t array_size;
uint16_t log_drv_count;
uint16_t log_drv_size;
uint16_t spares_count;
uint16_t spares_size;
uint8_t reserved[16];
/*
struct mfi_array array[];
struct mfi_ld_config ld[];
struct mfi_spare spare[];
*/
} QEMU_PACKED;
#define MFI_SCSI_MAX_TARGETS 128
#define MFI_SCSI_MAX_LUNS 8
#define MFI_SCSI_INITIATOR_ID 255
#define MFI_SCSI_MAX_CMDS 8
#define MFI_SCSI_MAX_CDB_LEN 16
#endif /* MFI_REG_H */
| {
"pile_set_name": "Github"
} |
53327120110106,300.50,302.00,299.50,300.25,300.199516324062877871,827,22,248265.00,2.50,-0.25
53327120110107,303.90,305.10,297.20,298.10,301.670115792067011579,8118,317,2448958.00,7.90,-5.80
53327120110110,298.00,298.00,283.20,287.05,288.526645768025078369,638,56,184080.00,14.80,-10.95
53327120110111,300.00,302.00,286.05,286.35,291.266666666666666666,3300,169,961180.00,15.95,-13.65
53327120110112,288.00,295.00,286.00,291.05,289.790375741595253790,1517,104,439612.00,9.00,3.05
53327120110113,290.00,294.00,285.50,287.00,287.150837988826815642,358,34,102800.00,8.50,-3.00
53327120110114,291.00,291.00,283.15,285.10,285.770459081836327345,1002,62,286342.00,7.85,-5.90
53327120110117,284.00,288.90,284.00,285.00,285.003558718861209964,1967,191,560602.00,4.90,1.00
53327120110118,283.10,283.10,276.10,281.25,281.368125701459034792,891,82,250699.00,7.00,-1.85
53327120110119,275.00,285.40,275.00,282.70,282.068230277185501066,469,60,132290.00,10.40,7.70
53327120110120,280.10,280.10,271.80,274.55,274.400966183574879227,414,54,113602.00,8.30,-5.55
53327120110121,276.00,279.00,268.75,271.35,272.371158392434988179,423,48,115213.00,10.25,-4.65
53327120110124,274.00,278.00,273.00,277.95,275.758064516129032258,62,10,17097.00,5.00,3.95
53327120110125,276.00,276.50,270.10,270.45,270.790476190476190476,105,13,28433.00,6.40,-5.55
53327120110127,276.75,276.80,269.00,269.10,272.724137931034482758,377,26,102817.00,7.80,-7.65
53327120110128,266.00,271.00,257.00,264.35,264.065656565656565656,1188,86,313710.00,14.00,-1.65
53327120110131,250.10,265.00,250.05,261.20,256.637689287717330342,1783,79,457585.00,14.95,11.10
53327120110201,263.00,263.00,254.05,255.05,255.020488334237655995,46075,76,11750069.00,8.95,-7.95
53327120110202,256.30,258.00,255.00,255.05,255.145121951219512195,2460,39,627657.00,3.00,-1.25
53327120110203,261.00,268.00,255.00,256.20,257.928251121076233183,1338,163,345108.00,13.00,-4.80
53327120110204,259.00,259.00,255.00,256.40,255.137931034482758620,2146,22,547526.00,4.00,-2.60
53327120110207,259.00,265.10,255.10,256.75,257.520884520884520884,407,40,104811.00,10.00,-2.25
53327120110208,255.10,257.00,255.10,255.10,255.164351851851851851,432,27,110231.00,1.90,0.00
53327120110209,255.10,259.00,255.10,255.15,255.179568193786203264,1899,53,484586.00,3.90,0.05
53327120110210,255.10,306.00,255.00,260.35,256.090925689245937557,5477,91,1402610.00,51.00,5.25
53327120110211,275.05,275.05,255.10,259.60,259.547679755226925038,1961,163,508973.00,19.95,-15.45
53327120110214,270.00,292.00,257.60,278.45,275.181912144702842377,1935,117,532477.00,34.40,8.45
53327120110215,291.00,291.00,267.00,268.25,271.627586206896551724,435,52,118158.00,24.00,-22.75
53327120110216,271.00,274.00,265.00,266.60,268.405405405405405405,222,35,59586.00,9.00,-4.40
53327120110217,269.00,269.00,265.05,265.55,265.923076923076923076,130,11,34570.00,3.95,-3.45
53327120110218,268.00,268.00,258.00,261.70,263.552631578947368421,190,39,50075.00,10.00,-6.30
53327120110221,265.00,285.00,265.00,274.15,275.828831041257367387,4072,185,1123175.00,20.00,9.15
53327120110222,277.00,289.00,274.00,277.70,282.243554580362040592,3646,154,1029060.00,15.00,0.70
53327120110223,277.00,277.90,259.00,267.05,264.705955851728446480,4802,272,1271118.00,18.90,-9.95
53327120110224,278.00,278.00,259.30,261.25,264.149193548387096774,248,73,65509.00,18.70,-16.75
53327120110225,266.00,269.00,244.95,248.05,253.933027943197434722,10915,169,2771679.00,24.05,-17.95
53327120110228,255.00,258.95,245.05,246.45,253.535227945529899348,1689,76,428221.00,13.90,-8.55
53327120110301,247.00,252.00,240.05,248.05,244.972088030059044551,1863,99,456383.00,11.95,1.05
53327120110303,244.00,252.00,244.00,246.50,248.988321167883211678,1370,51,341114.00,8.00,2.50
53327120110304,245.00,246.70,240.10,241.95,242.931645569620253164,395,27,95958.00,6.60,-3.05
53327120110307,242.00,256.90,226.00,253.60,241.401186943620178041,3370,298,813522.00,30.90,11.60
53327120110308,231.30,254.95,231.30,247.55,244.145390070921985815,1410,101,344245.00,23.65,16.25
53327120110309,253.00,265.00,245.00,257.70,255.602422907488986784,1816,139,464174.00,20.00,4.70
53327120110310,255.00,264.00,255.00,260.00,260.018332135154565061,2782,44,723371.00,9.00,5.00
53327120110311,260.00,264.85,250.15,262.40,258.994059405940594059,505,64,130792.00,14.70,2.40
53327120110314,259.00,274.70,254.05,267.95,262.496232508073196986,929,100,243859.00,20.65,8.95
53327120110315,269.00,272.90,257.00,264.90,266.328385899814471243,539,85,143551.00,15.90,-4.10
53327120110316,260.05,274.00,260.05,271.25,269.997030410926920382,137056,37,37004713.00,13.95,11.20
53327120110317,274.00,277.00,271.00,272.40,272.679699248120300751,665,58,181332.00,6.00,-1.60
53327120110318,279.95,284.95,274.00,277.50,280.891629297458893871,1338,64,375833.00,10.95,-2.45
53327120110321,275.00,282.90,275.00,279.70,278.556876061120543293,589,21,164070.00,7.90,4.70
53327120110322,271.00,284.00,271.00,276.70,279.812703583061889250,614,59,171805.00,13.00,5.70
53327120110323,275.00,297.70,275.00,287.45,284.589648798521256931,2164,126,615852.00,22.70,12.45
53327120110324,289.00,297.85,280.05,292.95,287.766396213657876943,5916,204,1702426.00,17.80,3.95
53327120110325,292.00,307.70,286.00,299.10,296.731657048639736191,4852,268,1439742.00,21.70,7.10
53327120110328,298.00,304.00,291.00,301.10,295.177409638554216867,3320,161,979989.00,13.00,3.10
53327120110329,288.35,301.00,288.30,294.50,295.188203912270302311,3374,84,995965.00,12.70,6.15
53327120110330,296.00,302.90,295.00,300.10,297.101836393989983305,599,34,177964.00,7.90,4.10
53327120110331,296.00,301.00,293.00,296.80,295.975230296827021494,4885,79,1445839.00,8.00,0.80
53327120110401,295.00,295.00,293.00,293.05,293.903225806451612903,62,9,18222.00,2.00,-1.95
53327120110404,293.00,295.00,288.00,290.35,291.561403508771929824,114,33,33238.00,7.00,-2.65
53327120110405,292.00,303.00,291.00,300.70,297.564275037369207772,1338,59,398141.00,12.00,8.70
53327120110406,305.00,305.00,296.00,298.60,298.159784560143626570,557,37,166075.00,9.00,-6.40
53327120110407,295.00,305.00,295.00,304.25,304.209675090252707581,6925,107,2106652.00,10.00,9.25
53327120110408,305.00,305.00,295.00,304.90,303.836229279009386858,5007,164,1521308.00,10.00,-0.10
53327120110411,301.00,302.00,292.00,294.90,297.418246445497630331,844,99,251021.00,10.00,-6.10
53327120110413,295.00,295.00,276.00,284.10,290.467153284671532846,548,79,159176.00,19.00,-10.90
53327120110415,289.00,289.50,268.05,271.00,276.542105263157894736,1140,107,315258.00,21.45,-18.00
53327120110418,274.00,275.00,258.00,262.45,267.505862646566164154,1791,91,479103.00,17.00,-11.55
53327120110419,263.50,264.50,254.30,255.75,258.956415726912526668,3281,109,849636.00,10.20,-7.75
53327120110420,264.10,270.90,256.55,264.90,262.385815602836879432,2820,135,739928.00,14.35,0.80
53327120110421,265.00,268.00,251.00,260.90,258.062139654067905188,1561,138,402835.00,17.00,-4.10
53327120110425,262.00,266.00,258.05,260.35,260.769607843137254901,612,30,159591.00,7.95,-1.65
53327120110426,260.00,263.90,258.00,259.15,259.849099099099099099,888,48,230746.00,5.90,-0.85
53327120110427,262.00,262.00,258.00,259.35,259.480446927374301675,358,22,92894.00,4.00,-2.65
53327120110428,258.00,262.00,256.00,261.95,258.388254486133768352,613,25,158392.00,6.00,3.95
53327120110429,256.05,260.45,256.00,258.25,257.860000000000000000,950,44,244967.00,4.45,2.20
53327120110502,260.75,265.00,259.10,262.85,261.145390070921985815,282,25,73643.00,5.90,2.10
53327120110503,261.00,261.00,259.00,260.00,259.148483476686283386,4418,31,1144918.00,2.00,-1.00
53327120110504,258.90,268.00,257.10,259.05,261.562112932604735883,2745,154,717988.00,10.90,0.15
53327120110505,258.50,263.00,257.00,261.00,260.912240184757505773,1299,24,338925.00,6.00,2.50
53327120110506,268.00,268.00,260.00,261.00,260.946004319654427645,463,21,120818.00,8.00,-7.00
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8" ?>
<configuration>
<startup>
<supportedRuntime version="v4.0" sku=".NETFramework,Version=v4.6.2" />
</startup>
</configuration> | {
"pile_set_name": "Github"
} |
<!-- -*- indent-tabs-mode: nil -*-
Process this file with ../format.sh to produce asmjs_shmem.html. See comments in that file.
-->
<!doctype html>
<meta charset="utf8">
<title>ECMAScript Shared Memory and Atomics - asm.js addenda</title>
<script src="emu.js"></script>
<link rel="stylesheet" href="emu.css">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/8.4/styles/github.min.css">
<h1>ECMAScript Shared Memory and Atomics - asm.js addenda</h1>
<p> Revised: 2016-01-22 </p>
<emu-intro id="intro">
<h1>Introduction</h1>
<p> This document defines the changes and amendments to asm.js
semantics to support
<a href="https://github.com/lars-t-hansen/ecmascript_sharedmem">the proposal for ECMAScript shared memory and atomics</a>.
Please refer to that document for full information about the semantics. </p>
<p>This specification is a restatement of
<a href="https://docs.google.com/document/d/19X8Geo_7OMyyUICMvIqzEgSDms0rRMDODdMIkXIm9m0/edit?usp=sharing">an earlier work</a>,
along with later bug fixes. The earlier work is obsolete. </p>
<p>Changelog:</p>
<ul>
<li> 2016-01-22 -- Removed the section on SharedTypedArray in Firefox, that code is gone.
<li> 2015-11-05 -- Noted that the old SharedTypedArray views are now deprecated for asm.js code in Firefox.
<li> 2015-08-31 -- Correct return types on atomic operations. (For load, exchange, compareExchange, add, sub, and, or, and xor the return type is Int, not Signed. For store it is the type of the _value_ argument, not Signed). Remove non-throwing behavior on out-of-range accesses; the operations throw, as they do for non-asm.js code. Move mention of the old SharedTypedArray views to the Firefox section. Clarify wording throughout.
<li> 2015-08-28 -- initial translation from the original proposal.
</ul>
</emu-intro>
<emu-clause id="terminology">
<h1>Terminology</h1>
<p> An "integer-typed array" is one of the ~TypedArray~ views ~Int8Array~, ~Uint8Array~, ~Int16Array~, ~Uint16Array~, ~Int32Array~, and ~Uint32Array~. </p>
<p> A "float-typed array" is one of the TypedArray views ~Float32Array~ and ~Float64Array~. </p>
</emu-clause>
<emu-clause id="views">
<h1>Views</h1>
<p> The heap memory passed to the asm.js module at link time must be
a ~SharedArrayBuffer~ if and only if the module references the
~Atomics~ object (see below). If this constraint fails then a
soft link error ensues and execution falls back to non-asm.js, cf
the asm.js spec section 7. </p>
</emu-clause>
<emu-clause id="atomics">
<h1>Atomics</h1>
<p> There is a new known stdlib object ~Atomics~. This object
provides a number of known, static methods, a subset of the
methods provided in the full specification. The atomic operations
have the same dynamic semantics in asm.js as in full JS. </p>
<emu-note>
<p>The following Atomics names are not available as intrinsics in asm.js:</p>
<ul>
<li> Atomics.futexWait()
<li> Atomics.futexWake()
<li> Atomics.futexWakeOrRequeue()
<li> Atomics.OK
<li> Atomics.NOTEQUAL
<li> Atomics.TIMEDOUT
</ul>
<p> The futex methods can be accessed through the FFI (with the
heap passed implicitly) and the operation result values can be
expanded into constant values, as they have known values.</p>
</emu-note>
<emu-clause id="atomics.load">
<h1>Atomics.load(view, index)</h1>
<emu-clause id="atomics.load.static">
<h1>Static constraints</h1>
<p> The _view_ must name an integer-typed array mapped onto
shared memory. </p>
<p> The _index_ must be an expression of static type Intish. If
the element byte size of _view_ is not 1 then _index_ must have
the form _C_, where _C_ is a constant, or the form _E>>K_, where
_E_ is some expression and _K_ is a constant that is the
log-base-2 of the element byte size of _view_. </p>
<p> The result type of Atomics.load is Int. </p>
<emu-note>
<p> Treating atomic accesses as "syntax" rather than as
"calls" -- ie, requiring a shift for the index expression --
fits in with how they will be used and allows for
simplifictions in code generation. </p>
</emu-note>
</emu-clause>
</emu-clause>
<emu-clause id="atomics.store">
<h1> Atomics.store(view, index, value) </h1>
<emu-clause id="atomics.store.static">
<h1>Static constraints</h1>
<p> The _view_ and _index_ arguments are constrained as for Atomics.load. </p>
<p> The _value_ must be an expression of static type Intish. </p>
<p> The result type of Atomics.store is the actual type of _value_. </p>
</emu-clause>
</emu-clause>
<emu-clause id="atomics.exchange">
<h1> Atomics.exchange(view, index, value) </h1>
<emu-clause id="atomics.exchange.static">
<h1>Static constraints</h1>
<p> The _view_, _index_, and _value_ arguments are constrained as for Atomics.store. </p>
<p> The result type of Atomics.exchange is Int. </p>
</emu-clause>
</emu-clause>
<emu-clause id="atomics.add">
<h1> Atomics.add(view, index, value) </h1>
<p> As for Atomics.exchange. </p>
</emu-clause>
<emu-clause id="atomics.sub">
<h1> Atomics.sub(view, index, value) </h1>
<p> As for Atomics.exchange. </p>
</emu-clause>
<emu-clause id="atomics.and">
<h1> Atomics.and(view, index, value) </h1>
<p> As for Atomics.exchange. </p>
</emu-clause>
<emu-clause id="atomics.or">
<h1> Atomics.or(view, index, value) </h1>
<p> As for Atomics.exchange. </p>
</emu-clause>
<emu-clause id="atomics.xor">
<h1> Atomics.xor(view, index, value) </h1>
<p> As for Atomics.exchange. </p>
</emu-clause>
<emu-clause id="atomics.compareExchange">
<h1> Atomics.compareExchange(view, index, expected, replacement) </h1>
<emu-clause id="atomics.compareExchange.static">
<h1>Static constraints</h1>
<p> The _view_ and _index_ arguments are constrained as for Atomics.load. </p>
<p> The _expected_ and _replacement_ arguments must be
expressions of static type Intish. </p>
<p> The result type of Atomics.compareExchange is Int. </p>
</emu-clause>
</emu-clause>
<emu-clause id="atomics.fence">
<h1> Atomics.fence() </h1>
<p> Implements a full memory barrier. </p>
<emu-note>
<p> Atomics.fence() is no longer a part of the Shared Memory and
Atomics specification and we do not expect to reintroduce it.
It is implemented in Firefox, for now, but will likely be removed. </p>
</emu-note>
<emu-clause id="atomics.fence.static">
<h1>Static constraints</h1>
<p> The result type of Atomics.fence is void. </p>
</emu-clause>
</emu-clause>
<emu-clause id="atomics.isLockFree">
<h1> Atomics.isLockFree(size) </h1>
<emu-clause id="atomics.isLockFree.static">
<h1>Static constraints</h1>
<p> The _size_ argument must be an integer constant. <p>
<p> The result type of Atomics.isLockFree is Int, a boolean value. </p>
<emu-note>
<p> Again, treating Atomics.isLockFree as syntax rather than
as a call fits in with how it will be used and provides
guarantees that it will be resolved at compile time. </p>
</emu-note>
</emu-clause>
</emu-clause>
</emu-clause>
| {
"pile_set_name": "Github"
} |
package stream
import (
"context"
"fmt"
"io"
"math/rand"
"sync"
"time"
"github.com/golang/glog"
"github.com/livepeer/joy4/av"
)
type BasicRTMPVideoStream struct {
appData AppData // opaque app-supplied data
ch chan *av.Packet
listeners map[string]av.MuxCloser
listnersLock *sync.Mutex
dirty bool // set after listeners has been updated; reset after read
header []av.CodecData
EOF chan struct{}
closed bool
closeLock *sync.Mutex
RTMPTimeout time.Duration
}
//NewBasicRTMPVideoStream creates a new BasicRTMPVideoStream. The default RTMPTimeout is set to 10 milliseconds because we assume all RTMP streams are local.
func NewBasicRTMPVideoStream(data AppData) *BasicRTMPVideoStream {
ch := make(chan *av.Packet)
eof := make(chan struct{})
listeners := make(map[string]av.MuxCloser)
lLock := &sync.Mutex{}
cLock := &sync.Mutex{}
s := &BasicRTMPVideoStream{appData: data, listeners: listeners, listnersLock: lLock, ch: ch, EOF: eof, closeLock: cLock, closed: false}
//Automatically start a worker that reads packets. There is no buffering of the video packets.
go func(strm *BasicRTMPVideoStream) {
var cache map[string]av.MuxCloser
for {
select {
case pkt := <-strm.ch:
strm.listnersLock.Lock()
if strm.dirty {
cache = make(map[string]av.MuxCloser)
for d, l := range strm.listeners {
cache[d] = l
}
strm.dirty = false
}
strm.listnersLock.Unlock()
for dstid, l := range cache {
if err := l.WritePacket(*pkt); err != nil {
glog.Infof("RTMP stream got error: %v", err)
go strm.deleteListener(dstid)
}
}
case <-strm.EOF:
return
}
}
}(s)
return s
}
func (s *BasicRTMPVideoStream) GetStreamID() string {
if s.appData == nil {
return ""
}
return s.appData.StreamID()
}
func (s *BasicRTMPVideoStream) AppData() AppData {
return s.appData
}
func (s *BasicRTMPVideoStream) GetStreamFormat() VideoFormat {
return RTMP
}
//ReadRTMPFromStream reads the content from the RTMP stream out into the dst.
func (s *BasicRTMPVideoStream) ReadRTMPFromStream(ctx context.Context, dst av.MuxCloser) (eof chan struct{}, err error) {
// probably not the best named lock to use but lower risk of deadlock
s.closeLock.Lock()
hdr := s.header
s.closeLock.Unlock()
if err := dst.WriteHeader(hdr); err != nil {
return nil, err
}
dstid := randString()
s.listnersLock.Lock()
s.listeners[dstid] = dst
s.dirty = true
s.listnersLock.Unlock()
eof = make(chan struct{})
go func(ctx context.Context, eof chan struct{}, dstid string, dst av.MuxCloser) {
select {
case <-s.EOF:
dst.WriteTrailer()
s.deleteListener(dstid)
eof <- struct{}{}
return
case <-ctx.Done():
dst.WriteTrailer()
s.deleteListener(dstid)
return
}
}(ctx, eof, dstid, dst)
return eof, nil
}
//WriteRTMPToStream writes a video stream from src into the stream.
func (s *BasicRTMPVideoStream) WriteRTMPToStream(ctx context.Context, src av.DemuxCloser) (eof chan struct{}, err error) {
//Set header in case we want to use it.
h, err := src.Streams()
if err != nil {
return nil, err
}
// probably not the best named lock to use but lower risk of deadlock
s.closeLock.Lock()
s.header = h
s.closeLock.Unlock()
eof = make(chan struct{})
go func(ch chan *av.Packet) {
for {
packet, err := src.ReadPacket()
if err != nil {
if err != io.EOF {
glog.Errorf("Error reading packet from RTMP: %v", err)
}
s.Close()
return
}
ch <- &packet
}
}(s.ch)
go func(strmEOF chan struct{}, eof chan struct{}) {
select {
case <-strmEOF:
src.Close()
eof <- struct{}{}
}
}(s.EOF, eof)
return eof, nil
}
func (s *BasicRTMPVideoStream) Close() {
s.closeLock.Lock()
defer s.closeLock.Unlock()
if s.closed {
return
}
s.closed = true
glog.V(2).Infof("Closing RTMP %v", s.appData.StreamID())
close(s.EOF)
}
func (s *BasicRTMPVideoStream) deleteListener(dstid string) {
s.listnersLock.Lock()
defer s.listnersLock.Unlock()
delete(s.listeners, dstid)
s.dirty = true
}
func (s BasicRTMPVideoStream) String() string {
return fmt.Sprintf("StreamID: %v, Type: %v", s.GetStreamID(), s.GetStreamFormat())
}
func (s BasicRTMPVideoStream) Height() int {
for _, cd := range s.header {
if cd.Type().IsVideo() {
return cd.(av.VideoCodecData).Height()
}
}
return 0
}
func (s BasicRTMPVideoStream) Width() int {
for _, cd := range s.header {
if cd.Type().IsVideo() {
return cd.(av.VideoCodecData).Width()
}
}
return 0
}
func randString() string {
rand.Seed(time.Now().UnixNano())
x := make([]byte, 10, 10)
for i := 0; i < len(x); i++ {
x[i] = byte(rand.Uint32())
}
return string(x)
}
| {
"pile_set_name": "Github"
} |
--
-- PacketFence SQL schema upgrade from 6.1.0 to 6.2.0
--
--
-- Setting the major/minor/sub-minor version of the DB
--
SET @MAJOR_VERSION = 6;
SET @MINOR_VERSION = 2;
SET @SUBMINOR_VERSION = 0;
--
-- The VERSION_INT to ensure proper ordering of the version in queries
--
SET @VERSION_INT = @MAJOR_VERSION << 16 | @MINOR_VERSION << 8 | @SUBMINOR_VERSION;
--
-- Add 'callingstationid' index to radacct table
--
ALTER TABLE radacct ADD KEY `callingstationid` (`callingstationid`);
--
-- Updating to current version
--
INSERT INTO pf_version (id, version) VALUES (@VERSION_INT, CONCAT_WS('.', @MAJOR_VERSION, @MINOR_VERSION, @SUBMINOR_VERSION));
| {
"pile_set_name": "Github"
} |
cmake_minimum_required(VERSION 2.8)
project(ORB_SLAM2)
IF(NOT CMAKE_BUILD_TYPE)
SET(CMAKE_BUILD_TYPE Release)
ENDIF()
MESSAGE("Build type: " ${CMAKE_BUILD_TYPE})
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wall -O3 -march=native ")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -O3 -march=native")
# Check C++11 or C++0x support
include(CheckCXXCompilerFlag)
CHECK_CXX_COMPILER_FLAG("-std=c++11" COMPILER_SUPPORTS_CXX11)
CHECK_CXX_COMPILER_FLAG("-std=c++0x" COMPILER_SUPPORTS_CXX0X)
if(COMPILER_SUPPORTS_CXX11)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11")
add_definitions(-DCOMPILEDWITHC11)
message(STATUS "Using flag -std=c++11.")
elseif(COMPILER_SUPPORTS_CXX0X)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++0x")
add_definitions(-DCOMPILEDWITHC0X)
message(STATUS "Using flag -std=c++0x.")
else()
message(FATAL_ERROR "The compiler ${CMAKE_CXX_COMPILER} has no C++11 support. Please use a different C++ compiler.")
endif()
LIST(APPEND CMAKE_MODULE_PATH ${PROJECT_SOURCE_DIR}/cmake_modules)
find_package(OpenCV 2.4.3 REQUIRED)
find_package(Eigen3 3.1.0 REQUIRED)
find_package(Pangolin REQUIRED)
find_package(Cholmod REQUIRED)
include_directories(
${PROJECT_SOURCE_DIR}
${PROJECT_SOURCE_DIR}/include
${PROJECT_SOURCE_DIR}/src
${EIGEN3_INCLUDE_DIR}
${Pangolin_INCLUDE_DIRS}
${CHOLMOD_INCLUDE_DIR}
)
set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${PROJECT_SOURCE_DIR}/lib)
add_library(${PROJECT_NAME} SHARED
src/System.cc
src/Tracking.cc
src/LocalMapping.cc
src/LoopClosing.cc
src/ORBextractor.cc
src/ORBmatcher.cc
src/FrameDrawer.cc
src/Converter.cc
src/MapPoint.cc
src/KeyFrame.cc
src/Map.cc
src/MapDrawer.cc
src/Optimizer.cc
src/PnPsolver.cc
src/Frame.cc
src/KeyFrameDatabase.cc
src/Sim3Solver.cc
src/Initializer.cc
src/Viewer.cc
include/Frame.h
include/KeyFrame.h
include/Tracking.h
include/LocalMapping.h
src/IMU/configparam.h
src/IMU/configparam.cpp
src/IMU/imudata.h
src/IMU/imudata.cpp
src/IMU/IMUPreintegrator.h
src/IMU/IMUPreintegrator.cpp
src/IMU/so3.cpp
src/IMU/so3.h
src/IMU/NavState.h
src/IMU/NavState.cpp
src/IMU/g2otypes.h
src/IMU/g2otypes.cpp
src/IMU/RK4OnManifold.h
src/IMU/RK4OnManifold.cpp
)
target_link_libraries(${PROJECT_NAME}
${OpenCV_LIBS}
${EIGEN3_LIBS}
${Pangolin_LIBRARIES}
${PROJECT_SOURCE_DIR}/Thirdparty/DBoW2/lib/libDBoW2.so
${PROJECT_SOURCE_DIR}/Thirdparty/g2o/lib/libg2o.so
cholmod
${CHOLMOD_LIBRARIES}
${BLAS_LIBRARIES}
${LAPACK_LIBRARIES}
)
## Build examples
#set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${PROJECT_SOURCE_DIR}/Examples/RGB-D)
#add_executable(rgbd_tum
#Examples/RGB-D/rgbd_tum.cc)
#target_link_libraries(rgbd_tum ${PROJECT_NAME})
#set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${PROJECT_SOURCE_DIR}/Examples/Stereo)
#add_executable(stereo_kitti
#Examples/Stereo/stereo_kitti.cc)
#target_link_libraries(stereo_kitti ${PROJECT_NAME})
#add_executable(stereo_euroc
#Examples/Stereo/stereo_euroc.cc)
#target_link_libraries(stereo_euroc ${PROJECT_NAME})
#set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${PROJECT_SOURCE_DIR}/Examples/Monocular)
#add_executable(mono_tum
#Examples/Monocular/mono_tum.cc)
#target_link_libraries(mono_tum ${PROJECT_NAME})
#add_executable(mono_kitti
#Examples/Monocular/mono_kitti.cc)
#target_link_libraries(mono_kitti ${PROJECT_NAME})
#add_executable(mono_euroc
#Examples/Monocular/mono_euroc.cc)
#target_link_libraries(mono_euroc ${PROJECT_NAME})
## Build tools
#set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${PROJECT_SOURCE_DIR}/tools)
#add_executable(bin_vocabulary
#tools/bin_vocabulary.cc)
#target_link_libraries(bin_vocabulary ${PROJECT_NAME})
| {
"pile_set_name": "Github"
} |
# Copyright (C) 1998-2018 Free Software Foundation, Inc.
# This file is part of the GNU C Library.
# The GNU C Library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
# The GNU C Library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public
# License along with the GNU C Library; if not, see
# <http://www.gnu.org/licenses/>.
#
# Makefile for streams.
#
subdir := streams
include ../Makeconfig
headers = stropts.h sys/stropts.h bits/stropts.h bits/xtitypes.h
routines = isastream getmsg getpmsg putmsg putpmsg fattach fdetach
include ../Rules
| {
"pile_set_name": "Github"
} |
/**
* @file
* TCP API (to be used from TCPIP thread)\n
* See also @ref tcp_raw
*/
/*
* Copyright (c) 2001-2004 Swedish Institute of Computer Science.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
* SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
* OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
* IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY
* OF SUCH DAMAGE.
*
* This file is part of the lwIP TCP/IP stack.
*
* Author: Adam Dunkels <[email protected]>
*
*/
#ifndef LWIP_HDR_TCP_H
#define LWIP_HDR_TCP_H
#include "lwip/opt.h"
#if LWIP_TCP /* don't build if not configured for use in lwipopts.h */
#include "lwip/mem.h"
#include "lwip/pbuf.h"
#include "lwip/ip.h"
#include "lwip/icmp.h"
#include "lwip/err.h"
#include "lwip/ip6.h"
#include "lwip/ip6_addr.h"
#ifdef __cplusplus
extern "C" {
#endif
struct tcp_pcb;
/** Function prototype for tcp accept callback functions. Called when a new
* connection can be accepted on a listening pcb.
*
* @param arg Additional argument to pass to the callback function (@see tcp_arg())
* @param newpcb The new connection pcb
* @param err An error code if there has been an error accepting.
* Only return ERR_ABRT if you have called tcp_abort from within the
* callback function!
*/
typedef err_t (*tcp_accept_fn)(void *arg, struct tcp_pcb *newpcb, err_t err);
/** Function prototype for tcp receive callback functions. Called when data has
* been received.
*
* @param arg Additional argument to pass to the callback function (@see tcp_arg())
* @param tpcb The connection pcb which received data
* @param p The received data (or NULL when the connection has been closed!)
* @param err An error code if there has been an error receiving
* Only return ERR_ABRT if you have called tcp_abort from within the
* callback function!
*/
typedef err_t (*tcp_recv_fn)(void *arg, struct tcp_pcb *tpcb,
struct pbuf *p, err_t err);
/** Function prototype for tcp sent callback functions. Called when sent data has
* been acknowledged by the remote side. Use it to free corresponding resources.
* This also means that the pcb has now space available to send new data.
*
* @param arg Additional argument to pass to the callback function (@see tcp_arg())
* @param tpcb The connection pcb for which data has been acknowledged
* @param len The amount of bytes acknowledged
* @return ERR_OK: try to send some data by calling tcp_output
* Only return ERR_ABRT if you have called tcp_abort from within the
* callback function!
*/
typedef err_t (*tcp_sent_fn)(void *arg, struct tcp_pcb *tpcb,
u16_t len);
/** Function prototype for tcp poll callback functions. Called periodically as
* specified by @see tcp_poll.
*
* @param arg Additional argument to pass to the callback function (@see tcp_arg())
* @param tpcb tcp pcb
* @return ERR_OK: try to send some data by calling tcp_output
* Only return ERR_ABRT if you have called tcp_abort from within the
* callback function!
*/
typedef err_t (*tcp_poll_fn)(void *arg, struct tcp_pcb *tpcb);
/** Function prototype for tcp error callback functions. Called when the pcb
* receives a RST or is unexpectedly closed for any other reason.
*
* @note The corresponding pcb is already freed when this callback is called!
*
* @param arg Additional argument to pass to the callback function (@see tcp_arg())
* @param err Error code to indicate why the pcb has been closed
* ERR_ABRT: aborted through tcp_abort or by a TCP timer
* ERR_RST: the connection was reset by the remote host
*/
typedef void (*tcp_err_fn)(void *arg, err_t err);
/** Function prototype for tcp connected callback functions. Called when a pcb
* is connected to the remote side after initiating a connection attempt by
* calling tcp_connect().
*
* @param arg Additional argument to pass to the callback function (@see tcp_arg())
* @param tpcb The connection pcb which is connected
* @param err An unused error code, always ERR_OK currently ;-) @todo!
* Only return ERR_ABRT if you have called tcp_abort from within the
* callback function!
*
* @note When a connection attempt fails, the error callback is currently called!
*/
typedef err_t (*tcp_connected_fn)(void *arg, struct tcp_pcb *tpcb, err_t err);
#if LWIP_WND_SCALE
#define RCV_WND_SCALE(pcb, wnd) (((wnd) >> (pcb)->rcv_scale))
#define SND_WND_SCALE(pcb, wnd) (((wnd) << (pcb)->snd_scale))
#define TCPWND16(x) ((u16_t)LWIP_MIN((x), 0xFFFF))
#define TCP_WND_MAX(pcb) ((tcpwnd_size_t)(((pcb)->flags & TF_WND_SCALE) ? TCP_WND : TCPWND16(TCP_WND)))
typedef u32_t tcpwnd_size_t;
#else
#define RCV_WND_SCALE(pcb, wnd) (wnd)
#define SND_WND_SCALE(pcb, wnd) (wnd)
#define TCPWND16(x) (x)
#define TCP_WND_MAX(pcb) TCP_WND
typedef u16_t tcpwnd_size_t;
#endif
#if LWIP_WND_SCALE || TCP_LISTEN_BACKLOG || LWIP_TCP_TIMESTAMPS
typedef u16_t tcpflags_t;
#else
typedef u8_t tcpflags_t;
#endif
enum tcp_state {
CLOSED = 0,
LISTEN = 1,
SYN_SENT = 2,
SYN_RCVD = 3,
ESTABLISHED = 4,
FIN_WAIT_1 = 5,
FIN_WAIT_2 = 6,
CLOSE_WAIT = 7,
CLOSING = 8,
LAST_ACK = 9,
TIME_WAIT = 10
};
/**
* members common to struct tcp_pcb and struct tcp_listen_pcb
*/
#define TCP_PCB_COMMON(type) \
type *next; /* for the linked list */ \
void *callback_arg; \
enum tcp_state state; /* TCP state */ \
u8_t prio; \
/* ports are in host byte order */ \
u16_t local_port
/** the TCP protocol control block for listening pcbs */
struct tcp_pcb_listen {
/** Common members of all PCB types */
IP_PCB;
/** Protocol specific PCB members */
TCP_PCB_COMMON(struct tcp_pcb_listen);
#if LWIP_CALLBACK_API
/* Function to call when a listener has been connected. */
tcp_accept_fn accept;
#endif /* LWIP_CALLBACK_API */
#if TCP_LISTEN_BACKLOG
u8_t backlog;
u8_t accepts_pending;
#endif /* TCP_LISTEN_BACKLOG */
};
/** the TCP protocol control block */
struct tcp_pcb {
/** common PCB members */
IP_PCB;
/** protocol specific PCB members */
TCP_PCB_COMMON(struct tcp_pcb);
/* ports are in host byte order */
u16_t remote_port;
tcpflags_t flags;
#define TF_ACK_DELAY 0x01U /* Delayed ACK. */
#define TF_ACK_NOW 0x02U /* Immediate ACK. */
#define TF_INFR 0x04U /* In fast recovery. */
#define TF_CLOSEPEND 0x08U /* If this is set, tcp_close failed to enqueue the FIN (retried in tcp_tmr) */
#define TF_RXCLOSED 0x10U /* rx closed by tcp_shutdown */
#define TF_FIN 0x20U /* Connection was closed locally (FIN segment enqueued). */
#define TF_NODELAY 0x40U /* Disable Nagle algorithm */
#define TF_NAGLEMEMERR 0x80U /* nagle enabled, memerr, try to output to prevent delayed ACK to happen */
#if LWIP_WND_SCALE
#define TF_WND_SCALE 0x0100U /* Window Scale option enabled */
#endif
#if TCP_LISTEN_BACKLOG
#define TF_BACKLOGPEND 0x0200U /* If this is set, a connection pcb has increased the backlog on its listener */
#endif
#if LWIP_TCP_TIMESTAMPS
#define TF_TIMESTAMP 0x0400U /* Timestamp option enabled */
#endif
/* the rest of the fields are in host byte order
as we have to do some math with them */
/* Timers */
u8_t polltmr, pollinterval;
u8_t last_timer;
u32_t tmr;
/* receiver variables */
u32_t rcv_nxt; /* next seqno expected */
tcpwnd_size_t rcv_wnd; /* receiver window available */
tcpwnd_size_t rcv_ann_wnd; /* receiver window to announce */
u32_t rcv_ann_right_edge; /* announced right edge of window */
/* Retransmission timer. */
s16_t rtime;
u16_t mss; /* maximum segment size */
/* RTT (round trip time) estimation variables */
u32_t rttest; /* RTT estimate in 500ms ticks */
u32_t rtseq; /* sequence number being timed */
s16_t sa, sv; /* @todo document this */
s16_t rto; /* retransmission time-out */
u8_t nrtx; /* number of retransmissions */
/* fast retransmit/recovery */
u8_t dupacks;
u32_t lastack; /* Highest acknowledged seqno. */
/* congestion avoidance/control variables */
tcpwnd_size_t cwnd;
tcpwnd_size_t ssthresh;
/* sender variables */
u32_t snd_nxt; /* next new seqno to be sent */
u32_t snd_wl1, snd_wl2; /* Sequence and acknowledgement numbers of last
window update. */
u32_t snd_lbb; /* Sequence number of next byte to be buffered. */
tcpwnd_size_t snd_wnd; /* sender window */
tcpwnd_size_t snd_wnd_max; /* the maximum sender window announced by the remote host */
tcpwnd_size_t snd_buf; /* Available buffer space for sending (in bytes). */
#define TCP_SNDQUEUELEN_OVERFLOW (0xffffU-3)
u16_t snd_queuelen; /* Number of pbufs currently in the send buffer. */
#if TCP_OVERSIZE
/* Extra bytes available at the end of the last pbuf in unsent. */
u16_t unsent_oversize;
#endif /* TCP_OVERSIZE */
/* These are ordered by sequence number: */
struct tcp_seg *unsent; /* Unsent (queued) segments. */
struct tcp_seg *unacked; /* Sent but unacknowledged segments. */
#if TCP_QUEUE_OOSEQ
struct tcp_seg *ooseq; /* Received out of sequence segments. */
#endif /* TCP_QUEUE_OOSEQ */
struct pbuf *refused_data; /* Data previously received but not yet taken by upper layer */
#if LWIP_CALLBACK_API || TCP_LISTEN_BACKLOG
struct tcp_pcb_listen* listener;
#endif /* LWIP_CALLBACK_API || TCP_LISTEN_BACKLOG */
#if LWIP_CALLBACK_API
/* Function to be called when more send buffer space is available. */
tcp_sent_fn sent;
/* Function to be called when (in-sequence) data has arrived. */
tcp_recv_fn recv;
/* Function to be called when a connection has been set up. */
tcp_connected_fn connected;
/* Function which is called periodically. */
tcp_poll_fn poll;
/* Function to be called whenever a fatal error occurs. */
tcp_err_fn errf;
#endif /* LWIP_CALLBACK_API */
#if LWIP_TCP_TIMESTAMPS
u32_t ts_lastacksent;
u32_t ts_recent;
#endif /* LWIP_TCP_TIMESTAMPS */
/* idle time before KEEPALIVE is sent */
u32_t keep_idle;
#if LWIP_TCP_KEEPALIVE
u32_t keep_intvl;
u32_t keep_cnt;
#endif /* LWIP_TCP_KEEPALIVE */
/* Persist timer counter */
u8_t persist_cnt;
/* Persist timer back-off */
u8_t persist_backoff;
/* KEEPALIVE counter */
u8_t keep_cnt_sent;
#if LWIP_WND_SCALE
u8_t snd_scale;
u8_t rcv_scale;
#endif
};
#if LWIP_EVENT_API
enum lwip_event {
LWIP_EVENT_ACCEPT,
LWIP_EVENT_SENT,
LWIP_EVENT_RECV,
LWIP_EVENT_CONNECTED,
LWIP_EVENT_POLL,
LWIP_EVENT_ERR
};
err_t lwip_tcp_event(void *arg, struct tcp_pcb *pcb,
enum lwip_event,
struct pbuf *p,
u16_t size,
err_t err);
#endif /* LWIP_EVENT_API */
/* Application program's interface: */
struct tcp_pcb * tcp_new (void);
struct tcp_pcb * tcp_new_ip_type (u8_t type);
void tcp_arg (struct tcp_pcb *pcb, void *arg);
#if LWIP_CALLBACK_API
void tcp_recv (struct tcp_pcb *pcb, tcp_recv_fn recv);
void tcp_sent (struct tcp_pcb *pcb, tcp_sent_fn sent);
void tcp_err (struct tcp_pcb *pcb, tcp_err_fn err);
void tcp_accept (struct tcp_pcb *pcb, tcp_accept_fn accept);
#endif /* LWIP_CALLBACK_API */
void tcp_poll (struct tcp_pcb *pcb, tcp_poll_fn poll, u8_t interval);
#if LWIP_TCP_TIMESTAMPS
#define tcp_mss(pcb) (((pcb)->flags & TF_TIMESTAMP) ? ((pcb)->mss - 12) : (pcb)->mss)
#else /* LWIP_TCP_TIMESTAMPS */
#define tcp_mss(pcb) ((pcb)->mss)
#endif /* LWIP_TCP_TIMESTAMPS */
#define tcp_sndbuf(pcb) (TCPWND16((pcb)->snd_buf))
#define tcp_sndqueuelen(pcb) ((pcb)->snd_queuelen)
/** @ingroup tcp_raw */
#define tcp_nagle_disable(pcb) ((pcb)->flags |= TF_NODELAY)
/** @ingroup tcp_raw */
#define tcp_nagle_enable(pcb) ((pcb)->flags = (tcpflags_t)((pcb)->flags & ~TF_NODELAY))
/** @ingroup tcp_raw */
#define tcp_nagle_disabled(pcb) (((pcb)->flags & TF_NODELAY) != 0)
#if TCP_LISTEN_BACKLOG
#define tcp_backlog_set(pcb, new_backlog) do { \
LWIP_ASSERT("pcb->state == LISTEN (called for wrong pcb?)", (pcb)->state == LISTEN); \
((struct tcp_pcb_listen *)(pcb))->backlog = ((new_backlog) ? (new_backlog) : 1); } while(0)
void tcp_backlog_delayed(struct tcp_pcb* pcb);
void tcp_backlog_accepted(struct tcp_pcb* pcb);
#else /* TCP_LISTEN_BACKLOG */
#define tcp_backlog_set(pcb, new_backlog)
#define tcp_backlog_delayed(pcb)
#define tcp_backlog_accepted(pcb)
#endif /* TCP_LISTEN_BACKLOG */
#define tcp_accepted(pcb) /* compatibility define, not needed any more */
void tcp_recved (struct tcp_pcb *pcb, u16_t len);
err_t tcp_bind (struct tcp_pcb *pcb, const ip_addr_t *ipaddr,
u16_t port);
err_t tcp_connect (struct tcp_pcb *pcb, const ip_addr_t *ipaddr,
u16_t port, tcp_connected_fn connected);
struct tcp_pcb * tcp_listen_with_backlog_and_err(struct tcp_pcb *pcb, u8_t backlog, err_t *err);
struct tcp_pcb * tcp_listen_with_backlog(struct tcp_pcb *pcb, u8_t backlog);
/** @ingroup tcp_raw */
#define tcp_listen(pcb) tcp_listen_with_backlog(pcb, TCP_DEFAULT_LISTEN_BACKLOG)
void tcp_abort (struct tcp_pcb *pcb);
err_t tcp_close (struct tcp_pcb *pcb);
err_t tcp_shutdown(struct tcp_pcb *pcb, int shut_rx, int shut_tx);
/* Flags for "apiflags" parameter in tcp_write */
#define TCP_WRITE_FLAG_COPY 0x01
#define TCP_WRITE_FLAG_MORE 0x02
err_t tcp_write (struct tcp_pcb *pcb, const void *dataptr, u16_t len,
u8_t apiflags);
void tcp_setprio (struct tcp_pcb *pcb, u8_t prio);
#define TCP_PRIO_MIN 1
#define TCP_PRIO_NORMAL 64
#define TCP_PRIO_MAX 127
err_t tcp_output (struct tcp_pcb *pcb);
const char* tcp_debug_state_str(enum tcp_state s);
/* for compatibility with older implementation */
#define tcp_new_ip6() tcp_new_ip_type(IPADDR_TYPE_V6)
#ifdef __cplusplus
}
#endif
#endif /* LWIP_TCP */
#endif /* LWIP_HDR_TCP_H */
| {
"pile_set_name": "Github"
} |
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<title>The source code</title>
<link href="../resources/prettify/prettify.css" type="text/css" rel="stylesheet" />
<script type="text/javascript" src="../resources/prettify/prettify.js"></script>
<style type="text/css">
.highlight { display: block; background-color: #ddd; }
</style>
<script type="text/javascript">
function highlight() {
document.getElementById(location.hash.replace(/#/, "")).className = "highlight";
}
</script>
</head>
<body onload="prettyPrint(); highlight();">
<pre class="prettyprint lang-js"><span id='Ext-chart-axis-Numeric'>/**
</span> * @class Ext.chart.axis.Numeric
* @extends Ext.chart.axis.Axis
*
* An axis to handle numeric values. This axis is used for quantitative data as
* opposed to the category axis. You can set mininum and maximum values to the
* axis so that the values are bound to that. If no values are set, then the
* scale will auto-adjust to the values.
*
* @example
* var store = Ext.create('Ext.data.JsonStore', {
* fields: ['name', 'data1', 'data2', 'data3', 'data4', 'data5'],
* data: [
* {'name':'metric one', 'data1':10, 'data2':12, 'data3':14, 'data4':8, 'data5':13},
* {'name':'metric two', 'data1':7, 'data2':8, 'data3':16, 'data4':10, 'data5':3},
* {'name':'metric three', 'data1':5, 'data2':2, 'data3':14, 'data4':12, 'data5':7},
* {'name':'metric four', 'data1':2, 'data2':14, 'data3':6, 'data4':1, 'data5':23},
* {'name':'metric five', 'data1':27, 'data2':38, 'data3':36, 'data4':13, 'data5':33}
* ]
* });
*
* Ext.create('Ext.chart.Chart', {
* renderTo: Ext.getBody(),
* width: 500,
* height: 300,
* store: store,
* axes: [{
* type: 'Numeric',
* grid: true,
* position: 'left',
* fields: ['data1', 'data2', 'data3', 'data4', 'data5'],
* title: 'Sample Values',
* grid: {
* odd: {
* opacity: 1,
* fill: '#ddd',
* stroke: '#bbb',
* 'stroke-width': 1
* }
* },
* minimum: 0,
* adjustMinimumByMajorUnit: 0
* }, {
* type: 'Category',
* position: 'bottom',
* fields: ['name'],
* title: 'Sample Metrics',
* grid: true,
* label: {
* rotate: {
* degrees: 315
* }
* }
* }],
* series: [{
* type: 'area',
* highlight: false,
* axis: 'left',
* xField: 'name',
* yField: ['data1', 'data2', 'data3', 'data4', 'data5'],
* style: {
* opacity: 0.93
* }
* }]
* });
*
* In this example we create an axis of Numeric type. We set a minimum value so that
* even if all series have values greater than zero, the grid starts at zero. We bind
* the axis onto the left part of the surface by setting `position` to `left`.
* We bind three different store fields to this axis by setting `fields` to an array.
* We set the title of the axis to _Number of Hits_ by using the `title` property.
* We use a `grid` configuration to set odd background rows to a certain style and even rows
* to be transparent/ignored.
*/
Ext.define('Ext.chart.axis.Numeric', {
/* Begin Definitions */
extend: 'Ext.chart.axis.Axis',
alternateClassName: 'Ext.chart.NumericAxis',
/* End Definitions */
type: 'numeric',
alias: 'axis.numeric',
constructor: function(config) {
var me = this,
hasLabel = !!(config.label && config.label.renderer),
label;
me.callParent([config]);
label = me.label;
if (me.roundToDecimal === false) {
return;
}
if (!hasLabel) {
label.renderer = function(v) {
return me.roundToDecimal(v, me.decimals);
};
}
},
roundToDecimal: function(v, dec) {
var val = Math.pow(10, dec || 0);
return Math.floor(v * val) / val;
},
<span id='Ext-chart-axis-Numeric-property-minimum'> /**
</span> * The minimum value drawn by the axis. If not set explicitly, the axis
* minimum will be calculated automatically.
*
* @property {Number} minimum
*/
minimum: NaN,
<span id='Ext-chart-axis-Numeric-property-maximum'> /**
</span> * The maximum value drawn by the axis. If not set explicitly, the axis
* maximum will be calculated automatically.
*
* @property {Number} maximum
*/
maximum: NaN,
<span id='Ext-chart-axis-Numeric-property-decimals'> /**
</span> * The number of decimals to round the value to.
*
* @property {Number} decimals
*/
decimals: 2,
<span id='Ext-chart-axis-Numeric-property-scale'> /**
</span> * The scaling algorithm to use on this axis. May be "linear" or
* "logarithmic". Currently only linear scale is implemented.
*
* @property {String} scale
* @private
*/
scale: "linear",
<span id='Ext-chart-axis-Numeric-property-position'> /**
</span> * Indicates the position of the axis relative to the chart
*
* @property {String} position
*/
position: 'left',
<span id='Ext-chart-axis-Numeric-property-adjustMaximumByMajorUnit'> /**
</span> * Indicates whether to extend maximum beyond data's maximum to the nearest
* majorUnit.
*
* @property {Boolean} adjustMaximumByMajorUnit
*/
adjustMaximumByMajorUnit: false,
<span id='Ext-chart-axis-Numeric-property-adjustMinimumByMajorUnit'> /**
</span> * Indicates whether to extend the minimum beyond data's minimum to the
* nearest majorUnit.
*
* @property {Boolean} adjustMinimumByMajorUnit
*/
adjustMinimumByMajorUnit: false,
// @private apply data.
applyData: function() {
this.callParent();
return this.calcEnds();
}
});
</pre>
</body>
</html>
| {
"pile_set_name": "Github"
} |
<metadata>
<!--
This sample removes the class: android.support.v4.content.AsyncTaskLoader.LoadTask:
<remove-node path="/api/package[@name='android.support.v4.content']/class[@name='AsyncTaskLoader.LoadTask']" />
This sample removes the method: android.support.v4.content.CursorLoader.loadInBackground:
<remove-node path="/api/package[@name='android.support.v4.content']/class[@name='CursorLoader']/method[@name='loadInBackground']" />
-->
<remove-node path="/api/package[starts-with(@name, 'com.actionbarsherlock.internal')]" />
</metadata>
| {
"pile_set_name": "Github"
} |
package auth
import (
"crypto/rand"
"encoding/base64"
"errors"
"fmt"
"time"
"github.com/gammazero/nexus/v3/wamp"
"github.com/gammazero/nexus/v3/wamp/crsign"
)
// CRAuthenticator is a challenge-response authenticator.
type CRAuthenticator struct {
keyStore KeyStore
timeout time.Duration
}
// NewCRAuthenticator creates a new CRAuthenticator with the given key store
// and the maximum time to wait for a client to respond to a CHALLENGE message.
func NewCRAuthenticator(keyStore KeyStore, timeout time.Duration) *CRAuthenticator {
return &CRAuthenticator{
keyStore: keyStore,
timeout: timeout,
}
}
func (cr *CRAuthenticator) AuthMethod() string { return "wampcra" }
func (cr *CRAuthenticator) Authenticate(sid wamp.ID, details wamp.Dict, client wamp.Peer) (*wamp.Welcome, error) {
authid, _ := wamp.AsString(details["authid"])
if authid == "" {
return nil, errors.New("missing authid")
}
authrole, err := cr.keyStore.AuthRole(authid)
if err != nil {
// Do not error here since that leaks authid info.
authrole = "user"
}
ks, ok := cr.keyStore.(BypassKeyStore)
if ok {
if ks.AlreadyAuth(authid, details) {
// Create welcome details containing auth info.
welcome := &wamp.Welcome{
Details: wamp.Dict{
"authid": authid,
"authrole": authrole,
"authmethod": cr.AuthMethod(),
"authprovider": cr.keyStore.Provider(),
},
}
if err = ks.OnWelcome(authid, welcome, details); err != nil {
return nil, err
}
return welcome, nil
}
}
// Get the key and authrole needed for signing the challenge string.
key, err := cr.keyStore.AuthKey(authid, cr.AuthMethod())
if err != nil {
// Do not error here since that leaks authid info.
keyStr, _ := nonce()
if keyStr == "" {
keyStr = wamp.NowISO8601()
}
key = []byte(keyStr)
}
// Create the JSON encoded challenge string.
chStr, err := cr.makeChallengeStr(sid, authid, authrole)
if err != nil {
return nil, err
}
extra := wamp.Dict{"challenge": chStr}
// If key was created using PBKDF2, then salting info should be present.
salt, keylen, iters := cr.keyStore.PasswordInfo(authid)
if salt != "" {
extra["salt"] = salt
extra["keylen"] = keylen
extra["iterations"] = iters
}
// Challenge response needed. Send CHALLENGE message to client.
err = client.Send(&wamp.Challenge{
AuthMethod: cr.AuthMethod(),
Extra: extra,
})
if err != nil {
return nil, err
}
// Read AUTHENTICATE response from client.
msg, err := wamp.RecvTimeout(client, cr.timeout)
if err != nil {
return nil, err
}
authRsp, ok := msg.(*wamp.Authenticate)
if !ok {
return nil, fmt.Errorf("unexpected %v message received from client %v",
msg.MessageType(), client)
}
// Check signature.
if !crsign.VerifySignature(authRsp.Signature, chStr, key) {
return nil, errors.New("invalid signature")
}
// Create welcome message containing auth info.
welcome := &wamp.Welcome{
Details: wamp.Dict{
"authid": authid,
"authrole": authrole,
"authmethod": cr.AuthMethod(),
"authprovider": cr.keyStore.Provider(),
},
}
if ks != nil {
// Tell the keystore that the client was authenticated, and provide the
// transport details if available.
if err = ks.OnWelcome(authid, welcome, details); err != nil {
return nil, err
}
}
return welcome, nil
}
func (cr *CRAuthenticator) makeChallengeStr(session wamp.ID, authid, authrole string) (string, error) {
nonce, err := nonce()
if err != nil {
return "", fmt.Errorf("failed to get nonce: %s", err)
}
return fmt.Sprintf(
"{ \"nonce\":\"%s\", \"authprovider\":\"%s\", \"authid\":\"%s\", \"timestamp\":\"%s\", \"authrole\":\"%s\", \"authmethod\":\"%s\", \"session\":%d }",
nonce, cr.keyStore.Provider(), authid, wamp.NowISO8601(), authrole,
cr.AuthMethod(), int(session)), nil
}
// nonce generates 16 random bytes as a base64 encoded string.
func nonce() (string, error) {
b := make([]byte, 16)
_, err := rand.Read(b)
if err != nil {
return "", err
}
return base64.StdEncoding.EncodeToString(b), nil
}
| {
"pile_set_name": "Github"
} |
// -*- Mode: Java -*-
//
// PropositionError.java
/*
+---------------------------- BEGIN LICENSE BLOCK ---------------------------+
| |
| Version: MPL 1.1/GPL 2.0/LGPL 2.1 |
| |
| The contents of this file are subject to the Mozilla Public License |
| Version 1.1 (the "License"); you may not use this file except in |
| compliance with the License. You may obtain a copy of the License at |
| http://www.mozilla.org/MPL/ |
| |
| Software distributed under the License is distributed on an "AS IS" basis, |
| WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License |
| for the specific language governing rights and limitations under the |
| License. |
| |
| The Original Code is the PowerLoom KR&R System. |
| |
| The Initial Developer of the Original Code is |
| UNIVERSITY OF SOUTHERN CALIFORNIA, INFORMATION SCIENCES INSTITUTE |
| 4676 Admiralty Way, Marina Del Rey, California 90292, U.S.A. |
| |
| Portions created by the Initial Developer are Copyright (C) 1997-2017 |
| the Initial Developer. All Rights Reserved. |
| |
| Contributor(s): |
| |
| Alternatively, the contents of this file may be used under the terms of |
| either the GNU General Public License Version 2 or later (the "GPL"), or |
| the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), |
| in which case the provisions of the GPL or the LGPL are applicable instead |
| of those above. If you wish to allow use of your version of this file only |
| under the terms of either the GPL or the LGPL, and not to allow others to |
| use your version of this file under the terms of the MPL, indicate your |
| decision by deleting the provisions above and replace them with the notice |
| and other provisions required by the GPL or the LGPL. If you do not delete |
| the provisions above, a recipient may use your version of this file under |
| the terms of any one of the MPL, the GPL or the LGPL. |
| |
+----------------------------- END LICENSE BLOCK ----------------------------+
*/
package edu.isi.powerloom.logic;
import edu.isi.stella.javalib.Native;
import edu.isi.stella.javalib.StellaSpecialVariable;
import edu.isi.stella.*;
public class PropositionError extends LogicException {
public PropositionError (String message) {
super(message);
}
public static PropositionError newPropositionError(String message) {
{ PropositionError self = null;
self = new PropositionError(message);
return (self);
}
}
}
| {
"pile_set_name": "Github"
} |
a <=> b - Returns same result with EQUAL(=) operator for non-null operands, but returns TRUE if both are NULL, FALSE if one of the them is NULL
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="UTF-8"?>
<form xmlns="http://www.intellij.com/uidesigner/form/" version="1" bind-to-class="org.jetbrains.plugins.scala.compiler.ScalaCompileServerForm">
<grid id="27dc6" binding="myContentPanel" layout-manager="GridLayoutManager" row-count="8" column-count="4" same-size-horizontally="false" same-size-vertically="false" hgap="-1" vgap="-1">
<margin top="0" left="0" bottom="0" right="0"/>
<constraints>
<xy x="20" y="20" width="1317" height="400"/>
</constraints>
<properties/>
<border type="none"/>
<children>
<grid id="2e578" binding="myCompilationServerPanel" layout-manager="GridLayoutManager" row-count="4" column-count="2" same-size-horizontally="false" same-size-vertically="false" hgap="-1" vgap="-1">
<margin top="0" left="0" bottom="0" right="0"/>
<constraints>
<grid row="1" column="0" row-span="1" col-span="4" vsize-policy="3" hsize-policy="3" anchor="0" fill="3" indent="1" use-parent-layout="false"/>
</constraints>
<properties/>
<border type="none"/>
<children>
<component id="4e54b" class="javax.swing.JLabel">
<constraints>
<grid row="2" column="0" row-span="1" col-span="1" vsize-policy="0" hsize-policy="0" anchor="8" fill="0" indent="0" use-parent-layout="false"/>
</constraints>
<properties>
<enabled value="true"/>
<text resource-bundle="messages/ScalaBundle" key="jvm.options"/>
</properties>
</component>
<component id="c2f5a" class="com.intellij.ui.RawCommandLineEditor" binding="myCompilationServerJvmParameters">
<constraints>
<grid row="2" column="1" row-span="1" col-span="1" vsize-policy="3" hsize-policy="3" anchor="8" fill="0" indent="0" use-parent-layout="false">
<minimum-size width="250" height="-1"/>
<preferred-size width="544" height="27"/>
</grid>
</constraints>
<properties>
<dialogCaption resource-bundle="messages/ScalaBundle" key="compile.server.jvm.command.line.parameters"/>
<enabled value="true"/>
</properties>
</component>
<component id="b6151" class="javax.swing.JLabel">
<constraints>
<grid row="1" column="0" row-span="1" col-span="1" vsize-policy="0" hsize-policy="0" anchor="8" fill="0" indent="0" use-parent-layout="false"/>
</constraints>
<properties>
<enabled value="true"/>
<labelFor value="9ea43"/>
<text resource-bundle="messages/ScalaBundle" key="jvm.maximum.heap.size.mb"/>
</properties>
</component>
<component id="91057" class="javax.swing.JTextField" binding="myCompilationServerMaximumHeapSize">
<constraints>
<grid row="1" column="1" row-span="1" col-span="1" vsize-policy="0" hsize-policy="6" anchor="8" fill="0" indent="0" use-parent-layout="false"/>
</constraints>
<properties>
<columns value="5"/>
<enabled value="true"/>
</properties>
</component>
<component id="c4ed" class="com.intellij.openapi.ui.ex.MultiLineLabel" binding="myNote">
<constraints>
<grid row="3" column="0" row-span="1" col-span="2" vsize-policy="0" hsize-policy="0" anchor="8" fill="0" indent="0" use-parent-layout="false"/>
</constraints>
<properties>
<text resource-bundle="messages/ScalaBundle" key="compile.server.description"/>
</properties>
</component>
<component id="979cd" class="javax.swing.JLabel">
<constraints>
<grid row="0" column="0" row-span="1" col-span="1" vsize-policy="0" hsize-policy="0" anchor="8" fill="0" indent="0" use-parent-layout="false"/>
</constraints>
<properties>
<text resource-bundle="messages/ScalaBundle" key="jdk"/>
</properties>
</component>
<grid id="76924" binding="mySdkPanel" layout-manager="BorderLayout" hgap="0" vgap="0">
<constraints>
<grid row="0" column="1" row-span="1" col-span="1" vsize-policy="3" hsize-policy="7" anchor="8" fill="2" indent="0" use-parent-layout="false"/>
</constraints>
<properties>
<enabled value="false"/>
</properties>
<border type="none"/>
<children/>
</grid>
</children>
</grid>
<component id="2bd37" class="javax.swing.JCheckBox" binding="myEnableCompileServer">
<constraints>
<grid row="0" column="0" row-span="1" col-span="4" vsize-policy="0" hsize-policy="3" anchor="8" fill="0" indent="0" use-parent-layout="false"/>
</constraints>
<properties>
<text resource-bundle="messages/ScalaBundle" key="compile.server.use.for.scala"/>
</properties>
</component>
<grid id="32076" layout-manager="GridLayoutManager" row-count="1" column-count="2" same-size-horizontally="false" same-size-vertically="false" hgap="-1" vgap="-1">
<margin top="0" left="0" bottom="0" right="0"/>
<constraints>
<grid row="6" column="0" row-span="1" col-span="1" vsize-policy="3" hsize-policy="3" anchor="0" fill="3" indent="1" use-parent-layout="false"/>
</constraints>
<properties/>
<border type="none"/>
<children>
<component id="79de2" class="com.intellij.openapi.ui.ex.MultiLineLabel" binding="myProjectHomeNote">
<constraints>
<grid row="0" column="0" row-span="1" col-span="2" vsize-policy="0" hsize-policy="0" anchor="8" fill="0" indent="0" use-parent-layout="false"/>
</constraints>
<properties>
<text resource-bundle="messages/ScalaBundle" key="compile.server.new.project.restart"/>
</properties>
</component>
</children>
</grid>
<vspacer id="3ca2b">
<constraints>
<grid row="7" column="0" row-span="1" col-span="1" vsize-policy="6" hsize-policy="1" anchor="0" fill="2" indent="0" use-parent-layout="false"/>
</constraints>
</vspacer>
<component id="d852a" class="javax.swing.JSeparator">
<constraints>
<grid row="3" column="0" row-span="1" col-span="1" vsize-policy="0" hsize-policy="6" anchor="0" fill="3" indent="0" use-parent-layout="false"/>
</constraints>
<properties/>
</component>
<component id="bfc2f" class="javax.swing.JCheckBox" binding="myProjectHomeChb">
<constraints>
<grid row="5" column="0" row-span="1" col-span="3" vsize-policy="0" hsize-policy="3" anchor="8" fill="0" indent="0" use-parent-layout="false"/>
</constraints>
<properties>
<text resource-bundle="messages/ScalaBundle" key="compile.server.use.project.home"/>
</properties>
</component>
<component id="94b5e" class="javax.swing.JLabel">
<constraints>
<grid row="4" column="0" row-span="1" col-span="1" vsize-policy="0" hsize-policy="0" anchor="8" fill="0" indent="0" use-parent-layout="false"/>
</constraints>
<properties>
<font/>
<text resource-bundle="messages/ScalaBundle" key="advanced.settings"/>
</properties>
</component>
<grid id="618e2" layout-manager="GridLayoutManager" row-count="1" column-count="4" same-size-horizontally="false" same-size-vertically="false" hgap="-1" vgap="-1">
<margin top="0" left="0" bottom="0" right="0"/>
<constraints>
<grid row="2" column="0" row-span="1" col-span="1" vsize-policy="3" hsize-policy="3" anchor="0" fill="3" indent="0" use-parent-layout="false"/>
</constraints>
<properties/>
<border type="none"/>
<children>
<component id="85068" class="javax.swing.JCheckBox" binding="myShutdownServerCheckBox" default-binding="true">
<constraints>
<grid row="0" column="0" row-span="1" col-span="1" vsize-policy="0" hsize-policy="0" anchor="8" fill="0" indent="0" use-parent-layout="false"/>
</constraints>
<properties>
<text resource-bundle="messages/ScalaBundle" key="compile.server.shutdown.if.idle.for"/>
</properties>
</component>
<hspacer id="5e9c4">
<constraints>
<grid row="0" column="3" row-span="1" col-span="1" vsize-policy="1" hsize-policy="6" anchor="0" fill="1" indent="0" use-parent-layout="false"/>
</constraints>
</hspacer>
<component id="fd89f" class="javax.swing.JSpinner" binding="myShutdownDelay">
<constraints>
<grid row="0" column="1" row-span="1" col-span="1" vsize-policy="0" hsize-policy="0" anchor="8" fill="1" indent="0" use-parent-layout="false">
<preferred-size width="100" height="-1"/>
<maximum-size width="100" height="-1"/>
</grid>
</constraints>
<properties/>
</component>
<component id="3a4b1" class="javax.swing.JLabel">
<constraints>
<grid row="0" column="2" row-span="1" col-span="1" vsize-policy="0" hsize-policy="0" anchor="8" fill="0" indent="0" use-parent-layout="false"/>
</constraints>
<properties>
<text resource-bundle="messages/ScalaBundle" key="minutes"/>
</properties>
</component>
</children>
</grid>
</children>
</grid>
</form>
| {
"pile_set_name": "Github"
} |
export enum Inputs {
Name = 'name',
Path = 'path'
}
export enum Outputs {
DownloadPath = 'download-path'
}
| {
"pile_set_name": "Github"
} |
{
"created_at": "2015-02-27T22:29:22.886463",
"description": "Django Assets Managed Nicely",
"fork": false,
"full_name": "funkybob/django-amn",
"language": "Python",
"updated_at": "2015-02-27T23:44:30.273026"
} | {
"pile_set_name": "Github"
} |
// Copyright (c) 2014 The mathutil Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package mathutil
import (
"math"
)
// Approximation type determines approximation methods used by e.g. Envelope.
type Approximation int
// Specific approximation method tags
const (
_ Approximation = iota
Linear // As named
Sinusoidal // Smooth for all derivations
)
// Envelope is an utility for defining simple curves using a small (usually)
// set of data points. Envelope returns a value defined by x, points and
// approximation. The value of x must be in [0,1) otherwise the result is
// undefined or the function may panic. Points are interpreted as dividing the
// [0,1) interval in len(points)-1 sections, so len(points) must be > 1 or the
// function may panic. According to the left and right points closing/adjacent
// to the section the resulting value is interpolated using the chosen
// approximation method. Unsupported values of approximation are silently
// interpreted as 'Linear'.
func Envelope(x float64, points []float64, approximation Approximation) float64 {
step := 1 / float64(len(points)-1)
fslot := math.Floor(x / step)
mod := x - fslot*step
slot := int(fslot)
l, r := points[slot], points[slot+1]
rmod := mod / step
switch approximation {
case Sinusoidal:
k := (math.Sin(math.Pi*(rmod-0.5)) + 1) / 2
return l + (r-l)*k
case Linear:
fallthrough
default:
return l + (r-l)*rmod
}
}
| {
"pile_set_name": "Github"
} |
<?php
/**
* Zend Framework
*
* LICENSE
*
* This source file is subject to the new BSD license that is bundled
* with this package in the file LICENSE.txt.
* It is also available through the world-wide-web at this URL:
* http://framework.zend.com/license/new-bsd
* If you did not receive a copy of the license and are unable to
* obtain it through the world-wide-web, please send an email
* to [email protected] so we can send you a copy immediately.
*
* @category Zend
* @package Zend_File
* @subpackage UnitTests
* @copyright Copyright (c) 2005-2011 Zend Technologies USA Inc. (http://www.zend.com)
* @license http://framework.zend.com/license/new-bsd New BSD License
* @version $Id: AbstractTest.php 23775 2011-03-01 17:25:24Z ralph $
*/
// Call Zend_File_Transfer_Adapter_AbstractTest::main() if this source file is executed directly.
if (!defined("PHPUnit_MAIN_METHOD")) {
define("PHPUnit_MAIN_METHOD", "Zend_File_Transfer_Adapter_AbstractTest::main");
}
require_once 'Zend/File/Transfer/Adapter/Abstract.php';
require_once 'Zend/Filter/BaseName.php';
require_once 'Zend/Filter/StringToLower.php';
require_once 'Zend/Filter/StringToUpper.php';
require_once 'Zend/Loader/PluginLoader.php';
require_once 'Zend/Validate/File/Count.php';
require_once 'Zend/Validate/File/Extension.php';
/**
* Test class for Zend_File_Transfer_Adapter_Abstract
*
* @category Zend
* @package Zend_File
* @subpackage UnitTests
* @copyright Copyright (c) 2005-2011 Zend Technologies USA Inc. (http://www.zend.com)
* @license http://framework.zend.com/license/new-bsd New BSD License
* @group Zend_File
*/
class Zend_File_Transfer_Adapter_AbstractTest extends PHPUnit_Framework_TestCase
{
/**
* Runs the test methods of this class.
*
* @return void
*/
public static function main()
{
$suite = new PHPUnit_Framework_TestSuite("Zend_File_Transfer_Adapter_AbstractTest");
$result = PHPUnit_TextUI_TestRunner::run($suite);
}
/**
* Sets up the fixture, for example, open a network connection.
* This method is called before a test is executed.
*
* @return void
*/
public function setUp()
{
$this->adapter = new Zend_File_Transfer_Adapter_AbstractTest_MockAdapter();
}
/**
* Tears down the fixture, for example, close a network connection.
* This method is called after a test is executed.
*
* @return void
*/
public function tearDown()
{
}
/**
* @expectedException Zend_File_Transfer_Exception
*/
public function testAdapterShouldThrowExceptionWhenRetrievingPluginLoaderOfInvalidType()
{
$this->adapter->getPluginLoader('bogus');
}
public function testAdapterShouldHavePluginLoaderForValidators()
{
$loader = $this->adapter->getPluginLoader('validate');
$this->assertTrue($loader instanceof Zend_Loader_PluginLoader);
}
public function testAdapterShouldAllowAddingCustomPluginLoader()
{
$loader = new Zend_Loader_PluginLoader();
$this->adapter->setPluginLoader($loader, 'filter');
$this->assertSame($loader, $this->adapter->getPluginLoader('filter'));
}
/**
* @expectedException Zend_File_Transfer_Exception
*/
public function testAddingInvalidPluginLoaderTypeToAdapterShouldRaiseException()
{
$loader = new Zend_Loader_PluginLoader();
$this->adapter->setPluginLoader($loader, 'bogus');
}
public function testAdapterShouldProxyAddingPluginLoaderPrefixPath()
{
$loader = $this->adapter->getPluginLoader('validate');
$this->adapter->addPrefixPath('Foo_Valid', 'Foo/Valid/', 'validate');
$paths = $loader->getPaths('Foo_Valid');
$this->assertTrue(is_array($paths));
}
public function testPassingNoTypeWhenAddingPrefixPathToAdapterShouldGeneratePathsForAllTypes()
{
$this->adapter->addPrefixPath('Foo', 'Foo');
$validateLoader = $this->adapter->getPluginLoader('validate');
$filterLoader = $this->adapter->getPluginLoader('filter');
$paths = $validateLoader->getPaths('Foo_Validate');
$this->assertTrue(is_array($paths));
$paths = $filterLoader->getPaths('Foo_Filter');
$this->assertTrue(is_array($paths));
}
/**
* @expectedException Zend_File_Transfer_Exception
*/
public function testPassingInvalidTypeWhenAddingPrefixPathToAdapterShouldThrowException()
{
$this->adapter->addPrefixPath('Foo', 'Foo', 'bogus');
}
public function testAdapterShouldProxyAddingMultiplePluginLoaderPrefixPaths()
{
$validatorLoader = $this->adapter->getPluginLoader('validate');
$filterLoader = $this->adapter->getPluginLoader('filter');
$this->adapter->addPrefixPaths(array(
'validate' => array('prefix' => 'Foo_Valid', 'path' => 'Foo/Valid/'),
'filter' => array(
'Foo_Filter' => 'Foo/Filter/',
'Baz_Filter' => array(
'Baz/Filter/',
'My/Baz/Filter/',
),
),
array('type' => 'filter', 'prefix' => 'Bar_Filter', 'path' => 'Bar/Filter/'),
));
$paths = $validatorLoader->getPaths('Foo_Valid');
$this->assertTrue(is_array($paths));
$paths = $filterLoader->getPaths('Foo_Filter');
$this->assertTrue(is_array($paths));
$paths = $filterLoader->getPaths('Bar_Filter');
$this->assertTrue(is_array($paths));
$paths = $filterLoader->getPaths('Baz_Filter');
$this->assertTrue(is_array($paths));
$this->assertEquals(2, count($paths));
}
public function testValidatorPluginLoaderShouldRegisterPathsForBaseAndFileValidatorsByDefault()
{
$loader = $this->adapter->getPluginLoader('validate');
$paths = $loader->getPaths('Zend_Validate');
$this->assertTrue(is_array($paths));
$paths = $loader->getPaths('Zend_Validate_File');
$this->assertTrue(is_array($paths));
}
public function testAdapterShouldAllowAddingValidatorInstance()
{
$validator = new Zend_Validate_File_Count(array('min' => 1, 'max' => 1));
$this->adapter->addValidator($validator);
$test = $this->adapter->getValidator('Zend_Validate_File_Count');
$this->assertSame($validator, $test);
}
public function testAdapterShouldAllowAddingValidatorViaPluginLoader()
{
$this->adapter->addValidator('Count', false, array('min' => 1, 'max' => 1));
$test = $this->adapter->getValidator('Count');
$this->assertTrue($test instanceof Zend_Validate_File_Count);
}
/**
* @expectedException Zend_File_Transfer_Exception
*/
public function testAdapterhShouldRaiseExceptionWhenAddingInvalidValidatorType()
{
$this->adapter->addValidator(new Zend_Filter_BaseName);
}
public function testAdapterShouldAllowAddingMultipleValidatorsAtOnceUsingBothInstancesAndPluginLoader()
{
$validators = array(
'count' => array('min' => 1, 'max' => 1),
'Exists' => 'C:\temp',
array('validator' => 'Upload', 'options' => array(realpath(__FILE__))),
new Zend_Validate_File_Extension('jpg'),
);
$this->adapter->addValidators($validators);
$test = $this->adapter->getValidators();
$this->assertTrue(is_array($test));
$this->assertEquals(4, count($test), var_export($test, 1));
$count = array_shift($test);
$this->assertTrue($count instanceof Zend_Validate_File_Count);
$exists = array_shift($test);
$this->assertTrue($exists instanceof Zend_Validate_File_Exists);
$size = array_shift($test);
$this->assertTrue($size instanceof Zend_Validate_File_Upload);
$ext = array_shift($test);
$this->assertTrue($ext instanceof Zend_Validate_File_Extension);
$orig = array_pop($validators);
$this->assertSame($orig, $ext);
}
public function testGetValidatorShouldReturnNullWhenNoMatchingIdentifierExists()
{
$this->assertNull($this->adapter->getValidator('Alpha'));
}
public function testAdapterShouldAllowPullingValidatorsByFile()
{
$this->adapter->addValidator('Alpha', false, false, 'foo');
$validators = $this->adapter->getValidators('foo');
$this->assertEquals(1, count($validators));
$validator = array_shift($validators);
$this->assertTrue($validator instanceof Zend_Validate_Alpha);
}
public function testCallingSetValidatorsOnAdapterShouldOverwriteExistingValidators()
{
$this->testAdapterShouldAllowAddingMultipleValidatorsAtOnceUsingBothInstancesAndPluginLoader();
$validators = array(
new Zend_Validate_File_Count(1),
new Zend_Validate_File_Extension('jpg'),
);
$this->adapter->setValidators($validators);
$test = $this->adapter->getValidators();
$this->assertSame($validators, array_values($test));
}
public function testAdapterShouldAllowRetrievingValidatorInstancesByClassName()
{
$this->testAdapterShouldAllowAddingMultipleValidatorsAtOnceUsingBothInstancesAndPluginLoader();
$ext = $this->adapter->getValidator('Zend_Validate_File_Extension');
$this->assertTrue($ext instanceof Zend_Validate_File_Extension);
}
public function testAdapterShouldAllowRetrievingValidatorInstancesByPluginName()
{
$this->testAdapterShouldAllowAddingMultipleValidatorsAtOnceUsingBothInstancesAndPluginLoader();
$count = $this->adapter->getValidator('Count');
$this->assertTrue($count instanceof Zend_Validate_File_Count);
}
public function testAdapterShouldAllowRetrievingAllValidatorsAtOnce()
{
$this->testAdapterShouldAllowAddingMultipleValidatorsAtOnceUsingBothInstancesAndPluginLoader();
$validators = $this->adapter->getValidators();
$this->assertTrue(is_array($validators));
$this->assertEquals(4, count($validators));
foreach ($validators as $validator) {
$this->assertTrue($validator instanceof Zend_Validate_Interface);
}
}
public function testAdapterShouldAllowRemovingValidatorInstancesByClassName()
{
$this->testAdapterShouldAllowAddingMultipleValidatorsAtOnceUsingBothInstancesAndPluginLoader();
$this->assertTrue($this->adapter->hasValidator('Zend_Validate_File_Extension'));
$this->adapter->removeValidator('Zend_Validate_File_Extension');
$this->assertFalse($this->adapter->hasValidator('Zend_Validate_File_Extension'));
}
public function testAdapterShouldAllowRemovingValidatorInstancesByPluginName()
{
$this->testAdapterShouldAllowAddingMultipleValidatorsAtOnceUsingBothInstancesAndPluginLoader();
$this->assertTrue($this->adapter->hasValidator('Count'));
$this->adapter->removeValidator('Count');
$this->assertFalse($this->adapter->hasValidator('Count'));
}
public function testRemovingNonexistentValidatorShouldDoNothing()
{
$this->testAdapterShouldAllowAddingMultipleValidatorsAtOnceUsingBothInstancesAndPluginLoader();
$validators = $this->adapter->getValidators();
$this->assertFalse($this->adapter->hasValidator('Alpha'));
$this->adapter->removeValidator('Alpha');
$this->assertFalse($this->adapter->hasValidator('Alpha'));
$test = $this->adapter->getValidators();
$this->assertSame($validators, $test);
}
public function testAdapterShouldAllowRemovingAllValidatorsAtOnce()
{
$this->testAdapterShouldAllowAddingMultipleValidatorsAtOnceUsingBothInstancesAndPluginLoader();
$this->adapter->clearValidators();
$validators = $this->adapter->getValidators();
$this->assertTrue(is_array($validators));
$this->assertEquals(0, count($validators));
}
public function testValidationShouldReturnTrueForValidTransfer()
{
$this->adapter->addValidator('Count', false, array(1, 3), 'foo');
$this->assertTrue($this->adapter->isValid('foo'));
}
public function testValidationShouldReturnTrueForValidTransferOfMultipleFiles()
{
$this->assertTrue($this->adapter->isValid(null));
}
public function testValidationShouldReturnFalseForInvalidTransfer()
{
$this->adapter->addValidator('Extension', false, 'png', 'foo');
$this->assertFalse($this->adapter->isValid('foo'));
}
public function testValidationShouldThrowExceptionForNonexistentFile()
{
$this->assertFalse($this->adapter->isValid('bogus'));
}
public function testErrorMessagesShouldBeEmptyByDefault()
{
$messages = $this->adapter->getMessages();
$this->assertTrue(is_array($messages));
$this->assertEquals(0, count($messages));
}
public function testErrorMessagesShouldBePopulatedAfterInvalidTransfer()
{
$this->testValidationShouldReturnFalseForInvalidTransfer();
$messages = $this->adapter->getMessages();
$this->assertTrue(is_array($messages));
$this->assertFalse(empty($messages));
}
public function testErrorCodesShouldBeNullByDefault()
{
$errors = $this->adapter->getErrors();
$this->assertTrue(is_array($errors));
$this->assertEquals(0, count($errors));
}
public function testErrorCodesShouldBePopulatedAfterInvalidTransfer()
{
$this->testValidationShouldReturnFalseForInvalidTransfer();
$errors = $this->adapter->getErrors();
$this->assertTrue(is_array($errors));
$this->assertFalse(empty($errors));
}
public function testAdapterShouldHavePluginLoaderForFilters()
{
$loader = $this->adapter->getPluginLoader('filter');
$this->assertTrue($loader instanceof Zend_Loader_PluginLoader);
}
public function testFilterPluginLoaderShouldRegisterPathsForBaseAndFileFiltersByDefault()
{
$loader = $this->adapter->getPluginLoader('filter');
$paths = $loader->getPaths('Zend_Filter');
$this->assertTrue(is_array($paths));
$paths = $loader->getPaths('Zend_Filter_File');
$this->assertTrue(is_array($paths));
}
public function testAdapterShouldAllowAddingFilterInstance()
{
$filter = new Zend_Filter_StringToLower();
$this->adapter->addFilter($filter);
$test = $this->adapter->getFilter('Zend_Filter_StringToLower');
$this->assertSame($filter, $test);
}
public function testAdapterShouldAllowAddingFilterViaPluginLoader()
{
$this->adapter->addFilter('StringTrim');
$test = $this->adapter->getFilter('StringTrim');
$this->assertTrue($test instanceof Zend_Filter_StringTrim);
}
/**
* @expectedException Zend_File_Transfer_Exception
*/
public function testAdapterhShouldRaiseExceptionWhenAddingInvalidFilterType()
{
$this->adapter->addFilter(new Zend_Validate_File_Extension('jpg'));
}
public function testAdapterShouldAllowAddingMultipleFiltersAtOnceUsingBothInstancesAndPluginLoader()
{
$filters = array(
'Word_SeparatorToCamelCase' => array('separator' => ' '),
array('filter' => 'Alpha', 'options' => array(true)),
new Zend_Filter_BaseName(),
);
$this->adapter->addFilters($filters);
$test = $this->adapter->getFilters();
$this->assertTrue(is_array($test));
$this->assertEquals(3, count($test), var_export($test, 1));
$count = array_shift($test);
$this->assertTrue($count instanceof Zend_Filter_Word_SeparatorToCamelCase);
$size = array_shift($test);
$this->assertTrue($size instanceof Zend_Filter_Alpha);
$ext = array_shift($test);
$orig = array_pop($filters);
$this->assertSame($orig, $ext);
}
public function testGetFilterShouldReturnNullWhenNoMatchingIdentifierExists()
{
$this->assertNull($this->adapter->getFilter('Alpha'));
}
public function testAdapterShouldAllowPullingFiltersByFile()
{
$this->adapter->addFilter('Alpha', false, 'foo');
$filters = $this->adapter->getFilters('foo');
$this->assertEquals(1, count($filters));
$filter = array_shift($filters);
$this->assertTrue($filter instanceof Zend_Filter_Alpha);
}
public function testCallingSetFiltersOnAdapterShouldOverwriteExistingFilters()
{
$this->testAdapterShouldAllowAddingMultipleFiltersAtOnceUsingBothInstancesAndPluginLoader();
$filters = array(
new Zend_Filter_StringToUpper(),
new Zend_Filter_Alpha(),
);
$this->adapter->setFilters($filters);
$test = $this->adapter->getFilters();
$this->assertSame($filters, array_values($test));
}
public function testAdapterShouldAllowRetrievingFilterInstancesByClassName()
{
$this->testAdapterShouldAllowAddingMultipleFiltersAtOnceUsingBothInstancesAndPluginLoader();
$ext = $this->adapter->getFilter('Zend_Filter_BaseName');
$this->assertTrue($ext instanceof Zend_Filter_BaseName);
}
public function testAdapterShouldAllowRetrievingFilterInstancesByPluginName()
{
$this->testAdapterShouldAllowAddingMultipleFiltersAtOnceUsingBothInstancesAndPluginLoader();
$count = $this->adapter->getFilter('Alpha');
$this->assertTrue($count instanceof Zend_Filter_Alpha);
}
public function testAdapterShouldAllowRetrievingAllFiltersAtOnce()
{
$this->testAdapterShouldAllowAddingMultipleFiltersAtOnceUsingBothInstancesAndPluginLoader();
$filters = $this->adapter->getFilters();
$this->assertTrue(is_array($filters));
$this->assertEquals(3, count($filters));
foreach ($filters as $filter) {
$this->assertTrue($filter instanceof Zend_Filter_Interface);
}
}
public function testAdapterShouldAllowRemovingFilterInstancesByClassName()
{
$this->testAdapterShouldAllowAddingMultipleFiltersAtOnceUsingBothInstancesAndPluginLoader();
$this->assertTrue($this->adapter->hasFilter('Zend_Filter_BaseName'));
$this->adapter->removeFilter('Zend_Filter_BaseName');
$this->assertFalse($this->adapter->hasFilter('Zend_Filter_BaseName'));
}
public function testAdapterShouldAllowRemovingFilterInstancesByPluginName()
{
$this->testAdapterShouldAllowAddingMultipleFiltersAtOnceUsingBothInstancesAndPluginLoader();
$this->assertTrue($this->adapter->hasFilter('Alpha'));
$this->adapter->removeFilter('Alpha');
$this->assertFalse($this->adapter->hasFilter('Alpha'));
}
public function testRemovingNonexistentFilterShouldDoNothing()
{
$this->testAdapterShouldAllowAddingMultipleFiltersAtOnceUsingBothInstancesAndPluginLoader();
$filters = $this->adapter->getFilters();
$this->assertFalse($this->adapter->hasFilter('Int'));
$this->adapter->removeFilter('Int');
$this->assertFalse($this->adapter->hasFilter('Int'));
$test = $this->adapter->getFilters();
$this->assertSame($filters, $test);
}
public function testAdapterShouldAllowRemovingAllFiltersAtOnce()
{
$this->testAdapterShouldAllowAddingMultipleFiltersAtOnceUsingBothInstancesAndPluginLoader();
$this->adapter->clearFilters();
$filters = $this->adapter->getFilters();
$this->assertTrue(is_array($filters));
$this->assertEquals(0, count($filters));
}
public function testTransferDestinationShouldBeMutable()
{
$directory = dirname(__FILE__);
$this->adapter->setDestination($directory);
$destinations = $this->adapter->getDestination();
$this->assertTrue(is_array($destinations));
foreach ($destinations as $file => $destination) {
$this->assertEquals($directory, $destination);
}
$newdirectory = dirname(__FILE__)
. DIRECTORY_SEPARATOR . '..'
. DIRECTORY_SEPARATOR . '..'
. DIRECTORY_SEPARATOR . '..'
. DIRECTORY_SEPARATOR . '_files';
$this->adapter->setDestination($newdirectory, 'foo');
$this->assertEquals($newdirectory, $this->adapter->getDestination('foo'));
$this->assertEquals($directory, $this->adapter->getDestination('bar'));
}
public function testAdapterShouldAllowRetrievingDestinationsForAnArrayOfSpecifiedFiles()
{
$this->adapter->setDestination(dirname(__FILE__));
$destinations = $this->adapter->getDestination(array('bar', 'baz'));
$this->assertTrue(is_array($destinations));
$directory = dirname(__FILE__);
foreach ($destinations as $file => $destination) {
$this->assertTrue(in_array($file, array('bar', 'baz')));
$this->assertEquals($directory, $destination);
}
}
public function testSettingAndRetrievingOptions()
{
$this->assertEquals(
array(
'bar' => array('ignoreNoFile' => false, 'useByteString' => true),
'baz' => array('ignoreNoFile' => false, 'useByteString' => true),
'foo' => array('ignoreNoFile' => false, 'useByteString' => true, 'detectInfos' => true),
'file_0_' => array('ignoreNoFile' => false, 'useByteString' => true),
'file_1_' => array('ignoreNoFile' => false, 'useByteString' => true),
), $this->adapter->getOptions());
$this->adapter->setOptions(array('ignoreNoFile' => true));
$this->assertEquals(
array(
'bar' => array('ignoreNoFile' => true, 'useByteString' => true),
'baz' => array('ignoreNoFile' => true, 'useByteString' => true),
'foo' => array('ignoreNoFile' => true, 'useByteString' => true, 'detectInfos' => true),
'file_0_' => array('ignoreNoFile' => true, 'useByteString' => true),
'file_1_' => array('ignoreNoFile' => true, 'useByteString' => true),
), $this->adapter->getOptions());
$this->adapter->setOptions(array('ignoreNoFile' => false), 'foo');
$this->assertEquals(
array(
'bar' => array('ignoreNoFile' => true, 'useByteString' => true),
'baz' => array('ignoreNoFile' => true, 'useByteString' => true),
'foo' => array('ignoreNoFile' => false, 'useByteString' => true, 'detectInfos' => true),
'file_0_' => array('ignoreNoFile' => true, 'useByteString' => true),
'file_1_' => array('ignoreNoFile' => true, 'useByteString' => true),
), $this->adapter->getOptions());
}
public function testGetAllAdditionalFileInfos()
{
$files = $this->adapter->getFileInfo();
$this->assertEquals(5, count($files));
$this->assertEquals('baz.text', $files['baz']['name']);
}
public function testGetAdditionalFileInfosForSingleFile()
{
$files = $this->adapter->getFileInfo('baz');
$this->assertEquals(1, count($files));
$this->assertEquals('baz.text', $files['baz']['name']);
}
/**
* @expectedException Zend_File_Transfer_Exception
*/
public function testGetAdditionalFileInfosForUnknownFile()
{
$files = $this->adapter->getFileInfo('unknown');
}
/**
* @expectedException Zend_File_Transfer_Exception
*/
public function testGetUnknownOption()
{
$this->adapter->setOptions(array('unknownOption' => 'unknown'));
}
/**
* @expectedException Zend_File_Transfer_Exception
*/
public function testGetFileIsNotImplemented()
{
$this->adapter->getFile();
}
/**
* @expectedException Zend_File_Transfer_Exception
*/
public function testAddFileIsNotImplemented()
{
$this->adapter->addFile('foo');
}
/**
* @expectedException Zend_File_Transfer_Exception
*/
public function testGetTypeIsNotImplemented()
{
$this->adapter->getType();
}
/**
* @expectedException Zend_File_Transfer_Exception
*/
public function testAddTypeIsNotImplemented()
{
$this->adapter->addType('foo');
}
public function testAdapterShouldAllowRetrievingFileName()
{
$path = dirname(__FILE__)
. DIRECTORY_SEPARATOR . '..'
. DIRECTORY_SEPARATOR . '..'
. DIRECTORY_SEPARATOR . '..'
. DIRECTORY_SEPARATOR . '_files';
$this->adapter->setDestination($path);
$this->assertEquals($path . DIRECTORY_SEPARATOR . 'foo.jpg', $this->adapter->getFileName('foo'));
}
public function testAdapterShouldAllowRetrievingFileNameWithoutPath()
{
$path = dirname(__FILE__)
. DIRECTORY_SEPARATOR . '..'
. DIRECTORY_SEPARATOR . '..'
. DIRECTORY_SEPARATOR . '..'
. DIRECTORY_SEPARATOR . '_files';
$this->adapter->setDestination($path);
$this->assertEquals('foo.jpg', $this->adapter->getFileName('foo', false));
}
public function testAdapterShouldAllowRetrievingAllFileNames()
{
$path = dirname(__FILE__)
. DIRECTORY_SEPARATOR . '..'
. DIRECTORY_SEPARATOR . '..'
. DIRECTORY_SEPARATOR . '..'
. DIRECTORY_SEPARATOR . '_files';
$this->adapter->setDestination($path);
$files = $this->adapter->getFileName();
$this->assertTrue(is_array($files));
$this->assertEquals($path . DIRECTORY_SEPARATOR . 'bar.png', $files['bar']);
}
public function testAdapterShouldAllowRetrievingAllFileNamesWithoutPath()
{
$path = dirname(__FILE__)
. DIRECTORY_SEPARATOR . '..'
. DIRECTORY_SEPARATOR . '..'
. DIRECTORY_SEPARATOR . '..'
. DIRECTORY_SEPARATOR . '_files';
$this->adapter->setDestination($path);
$files = $this->adapter->getFileName(null, false);
$this->assertTrue(is_array($files));
$this->assertEquals('bar.png', $files['bar']);
}
public function testExceptionForUnknownHashValue()
{
try {
$this->adapter->getHash('foo', 'unknown_hash');
$this->fail();
} catch (Zend_Exception $e) {
$this->assertContains('Unknown hash algorithm', $e->getMessage());
}
}
public function testIgnoreHashValue()
{
$this->adapter->addInvalidFile();
$return = $this->adapter->getHash('crc32', 'test');
$this->assertEquals(array(), $return);
}
public function testEmptyTempDirectoryDetection()
{
$this->adapter->_tmpDir = "";
$this->assertTrue(empty($this->adapter->_tmpDir), "Empty temporary directory");
}
public function testTempDirectoryDetection()
{
$this->adapter->getTmpDir();
$this->assertTrue(!empty($this->adapter->_tmpDir), "Temporary directory filled");
}
public function testTemporaryDirectoryAccessDetection()
{
$this->adapter->_tmpDir = ".";
$path = "/NoPath/To/File";
$this->assertFalse($this->adapter->isPathWriteable($path));
$this->assertTrue($this->adapter->isPathWriteable($this->adapter->_tmpDir));
}
public function testFileSizeButNoFileFound()
{
try {
$this->assertEquals(10, $this->adapter->getFileSize());
$this->fail();
} catch (Zend_File_Transfer_Exception $e) {
$this->assertContains('does not exist', $e->getMessage());
}
}
public function testIgnoreFileSize()
{
$this->adapter->addInvalidFile();
$return = $this->adapter->getFileSize('test');
$this->assertEquals(array(), $return);
}
public function testFileSizeByTmpName()
{
$options = $this->adapter->getOptions();
$this->assertTrue($options['baz']['useByteString']);
$this->assertEquals('1.14kB', $this->adapter->getFileSize('baz.text'));
$this->adapter->setOptions(array('useByteString' => false));
$options = $this->adapter->getOptions();
$this->assertFalse($options['baz']['useByteString']);
$this->assertEquals(1172, $this->adapter->getFileSize('baz.text'));
}
public function testMimeTypeButNoFileFound()
{
try {
$this->assertEquals('image/jpeg', $this->adapter->getMimeType());
$this->fail();
} catch (Zend_File_Transfer_Exception $e) {
$this->assertContains('does not exist', $e->getMessage());
}
}
public function testIgnoreMimeType()
{
$this->adapter->addInvalidFile();
$return = $this->adapter->getMimeType('test');
$this->assertEquals(array(), $return);
}
public function testMimeTypeByTmpName()
{
$this->assertEquals('text/plain', $this->adapter->getMimeType('baz.text'));
}
public function testSetOwnErrorMessage()
{
$this->adapter->addValidator('Count', false, array('min' => 5, 'max' => 5, 'messages' => array(Zend_Validate_File_Count::TOO_FEW => 'Zu wenige')));
$this->assertFalse($this->adapter->isValid('foo'));
$message = $this->adapter->getMessages();
$this->assertContains('Zu wenige', $message);
try {
$this->assertEquals('image/jpeg', $this->adapter->getMimeType());
$this->fail();
} catch (Zend_File_Transfer_Exception $e) {
$this->assertContains('does not exist', $e->getMessage());
}
}
public function testTransferDestinationAtNonExistingElement()
{
$directory = dirname(__FILE__);
$this->adapter->setDestination($directory, 'nonexisting');
$this->assertEquals($directory, $this->adapter->getDestination('nonexisting'));
try {
$this->assertTrue(is_string($this->adapter->getDestination('reallynonexisting')));
$this->fail();
} catch(Exception $e) {
$this->assertContains('not find', $e->getMessage());
}
}
/**
* @ZF-7376
*/
public function testSettingMagicFile()
{
$this->adapter->setOptions(array('magicFile' => 'test/file'));
$this->assertEquals(
array(
'bar' => array('magicFile' => 'test/file', 'ignoreNoFile' => false, 'useByteString' => true),
), $this->adapter->getOptions('bar'));
}
/**
* @ZF-8693
*/
public function testAdapterShouldAllowAddingMultipleValidatorsAtOnceUsingBothInstancesAndPluginLoaderForDifferentFiles()
{
$validators = array(
array('MimeType', true, array('image/jpeg')), // no files
array('FilesSize', true, array('max' => '1MB', 'messages' => 'файл больше 1MБ')), // no files
array('Count', true, array('min' => 1, 'max' => '1', 'messages' => 'файл не 1'), 'bar'), // 'bar' from config
array('MimeType', true, array('image/jpeg'), 'bar'), // 'bar' from config
);
$this->adapter->addValidators($validators, 'foo'); // set validators to 'foo'
$test = $this->adapter->getValidators();
$this->assertEquals(3, count($test));
//test files specific validators
$test = $this->adapter->getValidators('foo');
$this->assertEquals(2, count($test));
$mimeType = array_shift($test);
$this->assertTrue($mimeType instanceof Zend_Validate_File_MimeType);
$filesSize = array_shift($test);
$this->assertTrue($filesSize instanceof Zend_Validate_File_FilesSize);
$test = $this->adapter->getValidators('bar');
$this->assertEquals(2, count($test));
$filesSize = array_shift($test);
$this->assertTrue($filesSize instanceof Zend_Validate_File_Count);
$mimeType = array_shift($test);
$this->assertTrue($mimeType instanceof Zend_Validate_File_MimeType);
$test = $this->adapter->getValidators('baz');
$this->assertEquals(0, count($test));
}
/**
* @ZF-9132
*/
public function testSettingAndRetrievingDetectInfosOption()
{
$this->assertEquals(array(
'foo' => array(
'ignoreNoFile' => false,
'useByteString' => true,
'detectInfos' => true))
, $this->adapter->getOptions('foo'));
$this->adapter->setOptions(array('detectInfos' => false));
$this->assertEquals(array(
'foo' => array(
'ignoreNoFile' => false,
'useByteString' => true,
'detectInfos' => false))
, $this->adapter->getOptions('foo'));
}
}
class Zend_File_Transfer_Adapter_AbstractTest_MockAdapter extends Zend_File_Transfer_Adapter_Abstract
{
public $received = false;
public $_tmpDir;
public function __construct()
{
$testfile = dirname(__FILE__) . '/_files/test.txt';
$this->_files = array(
'foo' => array(
'name' => 'foo.jpg',
'type' => 'image/jpeg',
'size' => 126976,
'tmp_name' => '/tmp/489127ba5c89c',
'options' => array('ignoreNoFile' => false, 'useByteString' => true, 'detectInfos' => true),
'validated' => false,
'received' => false,
'filtered' => false,
),
'bar' => array(
'name' => 'bar.png',
'type' => 'image/png',
'size' => 91136,
'tmp_name' => '/tmp/489128284b51f',
'options' => array('ignoreNoFile' => false, 'useByteString' => true),
'validated' => false,
'received' => false,
'filtered' => false,
),
'baz' => array(
'name' => 'baz.text',
'type' => 'text/plain',
'size' => 1172,
'tmp_name' => $testfile,
'options' => array('ignoreNoFile' => false, 'useByteString' => true),
'validated' => false,
'received' => false,
'filtered' => false,
),
'file_0_' => array(
'name' => 'foo.jpg',
'type' => 'image/jpeg',
'size' => 126976,
'tmp_name' => '/tmp/489127ba5c89c',
'options' => array('ignoreNoFile' => false, 'useByteString' => true),
'validated' => false,
'received' => false,
'filtered' => false,
),
'file_1_' => array(
'name' => 'baz.text',
'type' => 'text/plain',
'size' => 1172,
'tmp_name' => $testfile,
'options' => array('ignoreNoFile' => false, 'useByteString' => true),
'validated' => false,
'received' => false,
'filtered' => false,
),
'file' => array(
'name' => 'foo.jpg',
'multifiles' => array(0 => 'file_0_', 1 => 'file_1_')
),
);
}
public function send($options = null)
{
return;
}
public function receive($options = null)
{
$this->received = true;
return;
}
public function isSent($file = null)
{
return false;
}
public function isReceived($file = null)
{
return $this->received;
}
public function isUploaded($files = null)
{
return true;
}
public function isFiltered($files = null)
{
return true;
}
public static function getProgress()
{
return;
}
public function getTmpDir()
{
$this->_tmpDir = parent::_getTmpDir();
}
public function isPathWriteable($path)
{
return parent::_isPathWriteable($path);
}
public function addInvalidFile()
{
$this->_files += array(
'test' => array(
'name' => 'test.txt',
'type' => 'image/jpeg',
'size' => 0,
'tmp_name' => '',
'options' => array('ignoreNoFile' => true, 'useByteString' => true),
'validated' => false,
'received' => false,
'filtered' => false,
)
);
}
}
// Call Zend_File_Transfer_Adapter_AbstractTest::main() if this source file is executed directly.
if (PHPUnit_MAIN_METHOD == "Zend_File_Transfer_Adapter_AbstractTest::main") {
Zend_File_Transfer_Adapter_AbstractTest::main();
}
| {
"pile_set_name": "Github"
} |
package logrus
import (
"bytes"
"fmt"
"os"
"runtime"
"sort"
"strings"
"sync"
"time"
)
const (
red = 31
yellow = 33
blue = 36
gray = 37
)
var baseTimestamp time.Time
func init() {
baseTimestamp = time.Now()
}
// TextFormatter formats logs into text
type TextFormatter struct {
// Set to true to bypass checking for a TTY before outputting colors.
ForceColors bool
// Force disabling colors.
DisableColors bool
// Override coloring based on CLICOLOR and CLICOLOR_FORCE. - https://bixense.com/clicolors/
EnvironmentOverrideColors bool
// Disable timestamp logging. useful when output is redirected to logging
// system that already adds timestamps.
DisableTimestamp bool
// Enable logging the full timestamp when a TTY is attached instead of just
// the time passed since beginning of execution.
FullTimestamp bool
// TimestampFormat to use for display when a full timestamp is printed
TimestampFormat string
// The fields are sorted by default for a consistent output. For applications
// that log extremely frequently and don't use the JSON formatter this may not
// be desired.
DisableSorting bool
// The keys sorting function, when uninitialized it uses sort.Strings.
SortingFunc func([]string)
// Disables the truncation of the level text to 4 characters.
DisableLevelTruncation bool
// QuoteEmptyFields will wrap empty fields in quotes if true
QuoteEmptyFields bool
// Whether the logger's out is to a terminal
isTerminal bool
// FieldMap allows users to customize the names of keys for default fields.
// As an example:
// formatter := &TextFormatter{
// FieldMap: FieldMap{
// FieldKeyTime: "@timestamp",
// FieldKeyLevel: "@level",
// FieldKeyMsg: "@message"}}
FieldMap FieldMap
// CallerPrettyfier can be set by the user to modify the content
// of the function and file keys in the data when ReportCaller is
// activated. If any of the returned value is the empty string the
// corresponding key will be removed from fields.
CallerPrettyfier func(*runtime.Frame) (function string, file string)
terminalInitOnce sync.Once
}
func (f *TextFormatter) init(entry *Entry) {
if entry.Logger != nil {
f.isTerminal = checkIfTerminal(entry.Logger.Out)
}
}
func (f *TextFormatter) isColored() bool {
isColored := f.ForceColors || (f.isTerminal && (runtime.GOOS != "windows"))
if f.EnvironmentOverrideColors {
if force, ok := os.LookupEnv("CLICOLOR_FORCE"); ok && force != "0" {
isColored = true
} else if ok && force == "0" {
isColored = false
} else if os.Getenv("CLICOLOR") == "0" {
isColored = false
}
}
return isColored && !f.DisableColors
}
// Format renders a single log entry
func (f *TextFormatter) Format(entry *Entry) ([]byte, error) {
data := make(Fields)
for k, v := range entry.Data {
data[k] = v
}
prefixFieldClashes(data, f.FieldMap, entry.HasCaller())
keys := make([]string, 0, len(data))
for k := range data {
keys = append(keys, k)
}
var funcVal, fileVal string
fixedKeys := make([]string, 0, 4+len(data))
if !f.DisableTimestamp {
fixedKeys = append(fixedKeys, f.FieldMap.resolve(FieldKeyTime))
}
fixedKeys = append(fixedKeys, f.FieldMap.resolve(FieldKeyLevel))
if entry.Message != "" {
fixedKeys = append(fixedKeys, f.FieldMap.resolve(FieldKeyMsg))
}
if entry.err != "" {
fixedKeys = append(fixedKeys, f.FieldMap.resolve(FieldKeyLogrusError))
}
if entry.HasCaller() {
if f.CallerPrettyfier != nil {
funcVal, fileVal = f.CallerPrettyfier(entry.Caller)
} else {
funcVal = entry.Caller.Function
fileVal = fmt.Sprintf("%s:%d", entry.Caller.File, entry.Caller.Line)
}
if funcVal != "" {
fixedKeys = append(fixedKeys, f.FieldMap.resolve(FieldKeyFunc))
}
if fileVal != "" {
fixedKeys = append(fixedKeys, f.FieldMap.resolve(FieldKeyFile))
}
}
if !f.DisableSorting {
if f.SortingFunc == nil {
sort.Strings(keys)
fixedKeys = append(fixedKeys, keys...)
} else {
if !f.isColored() {
fixedKeys = append(fixedKeys, keys...)
f.SortingFunc(fixedKeys)
} else {
f.SortingFunc(keys)
}
}
} else {
fixedKeys = append(fixedKeys, keys...)
}
var b *bytes.Buffer
if entry.Buffer != nil {
b = entry.Buffer
} else {
b = &bytes.Buffer{}
}
f.terminalInitOnce.Do(func() { f.init(entry) })
timestampFormat := f.TimestampFormat
if timestampFormat == "" {
timestampFormat = defaultTimestampFormat
}
if f.isColored() {
f.printColored(b, entry, keys, data, timestampFormat)
} else {
for _, key := range fixedKeys {
var value interface{}
switch {
case key == f.FieldMap.resolve(FieldKeyTime):
value = entry.Time.Format(timestampFormat)
case key == f.FieldMap.resolve(FieldKeyLevel):
value = entry.Level.String()
case key == f.FieldMap.resolve(FieldKeyMsg):
value = entry.Message
case key == f.FieldMap.resolve(FieldKeyLogrusError):
value = entry.err
case key == f.FieldMap.resolve(FieldKeyFunc) && entry.HasCaller():
value = funcVal
case key == f.FieldMap.resolve(FieldKeyFile) && entry.HasCaller():
value = fileVal
default:
value = data[key]
}
f.appendKeyValue(b, key, value)
}
}
b.WriteByte('\n')
return b.Bytes(), nil
}
func (f *TextFormatter) printColored(b *bytes.Buffer, entry *Entry, keys []string, data Fields, timestampFormat string) {
var levelColor int
switch entry.Level {
case DebugLevel, TraceLevel:
levelColor = gray
case WarnLevel:
levelColor = yellow
case ErrorLevel, FatalLevel, PanicLevel:
levelColor = red
default:
levelColor = blue
}
levelText := strings.ToUpper(entry.Level.String())
if !f.DisableLevelTruncation {
levelText = levelText[0:4]
}
// Remove a single newline if it already exists in the message to keep
// the behavior of logrus text_formatter the same as the stdlib log package
entry.Message = strings.TrimSuffix(entry.Message, "\n")
caller := ""
if entry.HasCaller() {
funcVal := fmt.Sprintf("%s()", entry.Caller.Function)
fileVal := fmt.Sprintf("%s:%d", entry.Caller.File, entry.Caller.Line)
if f.CallerPrettyfier != nil {
funcVal, fileVal = f.CallerPrettyfier(entry.Caller)
}
if fileVal == "" {
caller = funcVal
} else if funcVal == "" {
caller = fileVal
} else {
caller = fileVal + " " + funcVal
}
}
if f.DisableTimestamp {
fmt.Fprintf(b, "\x1b[%dm%s\x1b[0m%s %-44s ", levelColor, levelText, caller, entry.Message)
} else if !f.FullTimestamp {
fmt.Fprintf(b, "\x1b[%dm%s\x1b[0m[%04d]%s %-44s ", levelColor, levelText, int(entry.Time.Sub(baseTimestamp)/time.Second), caller, entry.Message)
} else {
fmt.Fprintf(b, "\x1b[%dm%s\x1b[0m[%s]%s %-44s ", levelColor, levelText, entry.Time.Format(timestampFormat), caller, entry.Message)
}
for _, k := range keys {
v := data[k]
fmt.Fprintf(b, " \x1b[%dm%s\x1b[0m=", levelColor, k)
f.appendValue(b, v)
}
}
func (f *TextFormatter) needsQuoting(text string) bool {
if f.QuoteEmptyFields && len(text) == 0 {
return true
}
for _, ch := range text {
if !((ch >= 'a' && ch <= 'z') ||
(ch >= 'A' && ch <= 'Z') ||
(ch >= '0' && ch <= '9') ||
ch == '-' || ch == '.' || ch == '_' || ch == '/' || ch == '@' || ch == '^' || ch == '+') {
return true
}
}
return false
}
func (f *TextFormatter) appendKeyValue(b *bytes.Buffer, key string, value interface{}) {
if b.Len() > 0 {
b.WriteByte(' ')
}
b.WriteString(key)
b.WriteByte('=')
f.appendValue(b, value)
}
func (f *TextFormatter) appendValue(b *bytes.Buffer, value interface{}) {
stringVal, ok := value.(string)
if !ok {
stringVal = fmt.Sprint(value)
}
if !f.needsQuoting(stringVal) {
b.WriteString(stringVal)
} else {
b.WriteString(fmt.Sprintf("%q", stringVal))
}
}
| {
"pile_set_name": "Github"
} |
---
title: "CreateExpInstance Utility | Microsoft Docs"
ms.date: 11/15/2016
ms.prod: "visual-studio-dev14"
ms.technology: "vs-ide-sdk"
ms.topic: conceptual
helpviewer_keywords:
- "experimental builds"
- "experimental hive"
- "experimental instance"
- "createexpinstance"
- "createexpinst"
ms.assetid: 03779774-9401-49ae-997c-0c3ab25ed0d5
caps.latest.revision: 13
ms.author: gregvanl
manager: jillfra
---
# CreateExpInstance Utility
[!INCLUDE[vs2017banner](../../includes/vs2017banner.md)]
Use the CreateExpInstance utility to create, reset, or delete an experimental instance of Visual Studio. You can use the experimental instance to debug and test Visual Studio extensions without changing the underlying product.
## Syntax
```
CreateExpInstance.exe [/Create | /Reset | /Clean] /VSInstance=VsInstance /RootSuffix=Suffix
```
#### Parameters
/Create
Creates the experimental instance.
/Reset
Deletes the experimental instance, and then creates a new one.
/Clean
Deletes the experimental instance.
/VSInstance
The name of the directory that contains the base Visual Studio instance to copy.
/RootSuffix
The suffix to append to the name of the experimental instance directory.
## Remarks
When you are working on a Visual Studio extension, you can press F5 to open the default experimental instance and install the current extension. If no experimental instance is available, Visual Studio creates one that has the default settings.
The default location of the experimental instance depends on the Visual Studio version number. For example, for Visual Studio 2015, the location is %localappdata%\Microsoft\VisualStudio\14.0Exp\ All the files in the directory location are considered part of that instance. Any additional experimental instances will not be loaded by Visual Studio unless the directory name is changed to the default location.
Visual Studio does not access the system registry when it opens the experimental instance. This differs from earlier versions of Visual Studio, which used an experimental version of the registry hive.
The CreateExpInstance utility replaces the VsRegEx utility.
The following example resets the default experimental instance of Visual Studio.
**CreateExpInstance.exe /Reset /VSInstance=14.0 /RootSuffix=Exp**
## See Also
[Releasing a Product](../../misc/releasing-a-visual-studio-integration-product.md)
| {
"pile_set_name": "Github"
} |
//===----------------------------------------------------------------------===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
// <deque>
// void pop_front()
// Erasing items from the beginning or the end of a deque shall not invalidate iterators
// to items that were not erased.
#include <deque>
#include <cassert>
#include "test_macros.h"
template <typename C>
void test(C c)
{
typename C::iterator it1 = c.begin() + 1;
typename C::iterator it2 = c.end() - 1;
c.pop_front();
typename C::iterator it3 = c.begin();
typename C::iterator it4 = c.end() - 1;
assert( it1 == it3);
assert( *it1 == *it3);
assert(&*it1 == &*it3);
assert( it2 == it4);
assert( *it2 == *it4);
assert(&*it2 == &*it4);
}
int main(int, char**)
{
std::deque<int> queue;
for (int i = 0; i < 20; ++i)
queue.push_back(i);
while (queue.size() > 1)
{
test(queue);
queue.pop_back();
}
return 0;
}
| {
"pile_set_name": "Github"
} |
require 'package'
class Zstd < Package
description 'Zstandard - Fast real-time compression algorithm'
homepage 'http://www.zstd.net'
version '1.3.4'
compatibility 'all'
source_url 'https://github.com/facebook/zstd/archive/v1.3.4.tar.gz'
source_sha256 '92e41b6e8dd26bbd46248e8aa1d86f1551bc221a796277ae9362954f26d605a9'
binary_url ({
aarch64: 'https://dl.bintray.com/chromebrew/chromebrew/zstd-1.3.4-chromeos-armv7l.tar.xz',
armv7l: 'https://dl.bintray.com/chromebrew/chromebrew/zstd-1.3.4-chromeos-armv7l.tar.xz',
i686: 'https://dl.bintray.com/chromebrew/chromebrew/zstd-1.3.4-chromeos-i686.tar.xz',
x86_64: 'https://dl.bintray.com/chromebrew/chromebrew/zstd-1.3.4-chromeos-x86_64.tar.xz',
})
binary_sha256 ({
aarch64: 'b07a39cc1d1261a2d7e6259f04e551823f41a1436bd4fe67729b4a569495362c',
armv7l: 'b07a39cc1d1261a2d7e6259f04e551823f41a1436bd4fe67729b4a569495362c',
i686: '4647ecc2d3f2a1d528acacdaf2c9ba1ca0afa09e74c04b81215092816058b75e',
x86_64: 'ad08a04fc5d985a006851254d6f6e20064f75db92081572e1d536969abf849dc',
})
def self.build
system 'make'
end
def self.install
system 'make',
"DESTDIR=#{CREW_DEST_DIR}",
"PREFIX=#{CREW_PREFIX}",
"LIBDIR=#{CREW_LIB_PREFIX}",
'install'
end
end
| {
"pile_set_name": "Github"
} |
/*
* AAC decoder wrapper
* Copyright (c) 2012 Martin Storsjo
*
* This file is part of FFmpeg.
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
#include <fdk-aac/aacdecoder_lib.h>
#include "libavutil/channel_layout.h"
#include "libavutil/common.h"
#include "libavutil/opt.h"
#include "avcodec.h"
#include "internal.h"
/* The version macro is introduced the same time as the setting enum was
* changed, so this check should suffice. */
#ifndef AACDECODER_LIB_VL0
#define AAC_PCM_MAX_OUTPUT_CHANNELS AAC_PCM_OUTPUT_CHANNELS
#endif
enum ConcealMethod {
CONCEAL_METHOD_SPECTRAL_MUTING = 0,
CONCEAL_METHOD_NOISE_SUBSTITUTION = 1,
CONCEAL_METHOD_ENERGY_INTERPOLATION = 2,
CONCEAL_METHOD_NB,
};
typedef struct FDKAACDecContext {
const AVClass *class;
HANDLE_AACDECODER handle;
uint8_t *decoder_buffer;
int decoder_buffer_size;
uint8_t *anc_buffer;
int conceal_method;
int drc_level;
int drc_boost;
int drc_heavy;
int drc_cut;
int level_limit;
} FDKAACDecContext;
#define DMX_ANC_BUFFSIZE 128
#define DECODER_MAX_CHANNELS 8
#define DECODER_BUFFSIZE 2048 * sizeof(INT_PCM)
#define OFFSET(x) offsetof(FDKAACDecContext, x)
#define AD AV_OPT_FLAG_AUDIO_PARAM | AV_OPT_FLAG_DECODING_PARAM
static const AVOption fdk_aac_dec_options[] = {
{ "conceal", "Error concealment method", OFFSET(conceal_method), AV_OPT_TYPE_INT, { .i64 = CONCEAL_METHOD_NOISE_SUBSTITUTION }, CONCEAL_METHOD_SPECTRAL_MUTING, CONCEAL_METHOD_NB - 1, AD, "conceal" },
{ "spectral", "Spectral muting", 0, AV_OPT_TYPE_CONST, { .i64 = CONCEAL_METHOD_SPECTRAL_MUTING }, INT_MIN, INT_MAX, AD, "conceal" },
{ "noise", "Noise Substitution", 0, AV_OPT_TYPE_CONST, { .i64 = CONCEAL_METHOD_NOISE_SUBSTITUTION }, INT_MIN, INT_MAX, AD, "conceal" },
{ "energy", "Energy Interpolation", 0, AV_OPT_TYPE_CONST, { .i64 = CONCEAL_METHOD_ENERGY_INTERPOLATION }, INT_MIN, INT_MAX, AD, "conceal" },
{ "drc_boost", "Dynamic Range Control: boost, where [0] is none and [127] is max boost",
OFFSET(drc_boost), AV_OPT_TYPE_INT, { .i64 = -1 }, -1, 127, AD, NULL },
{ "drc_cut", "Dynamic Range Control: attenuation factor, where [0] is none and [127] is max compression",
OFFSET(drc_cut), AV_OPT_TYPE_INT, { .i64 = -1 }, -1, 127, AD, NULL },
{ "drc_level", "Dynamic Range Control: reference level, quantized to 0.25dB steps where [0] is 0dB and [127] is -31.75dB",
OFFSET(drc_level), AV_OPT_TYPE_INT, { .i64 = -1}, -1, 127, AD, NULL },
{ "drc_heavy", "Dynamic Range Control: heavy compression, where [1] is on (RF mode) and [0] is off",
OFFSET(drc_heavy), AV_OPT_TYPE_INT, { .i64 = -1}, -1, 1, AD, NULL },
#ifdef AACDECODER_LIB_VL0
{ "level_limit", "Signal level limiting", OFFSET(level_limit), AV_OPT_TYPE_INT, { .i64 = 0 }, -1, 1, AD },
#endif
{ NULL }
};
static const AVClass fdk_aac_dec_class = {
.class_name = "libfdk-aac decoder",
.item_name = av_default_item_name,
.option = fdk_aac_dec_options,
.version = LIBAVUTIL_VERSION_INT,
};
static int get_stream_info(AVCodecContext *avctx)
{
FDKAACDecContext *s = avctx->priv_data;
CStreamInfo *info = aacDecoder_GetStreamInfo(s->handle);
int channel_counts[0x24] = { 0 };
int i, ch_error = 0;
uint64_t ch_layout = 0;
if (!info) {
av_log(avctx, AV_LOG_ERROR, "Unable to get stream info\n");
return AVERROR_UNKNOWN;
}
if (info->sampleRate <= 0) {
av_log(avctx, AV_LOG_ERROR, "Stream info not initialized\n");
return AVERROR_UNKNOWN;
}
avctx->sample_rate = info->sampleRate;
avctx->frame_size = info->frameSize;
for (i = 0; i < info->numChannels; i++) {
AUDIO_CHANNEL_TYPE ctype = info->pChannelType[i];
if (ctype <= ACT_NONE || ctype >= FF_ARRAY_ELEMS(channel_counts)) {
av_log(avctx, AV_LOG_WARNING, "unknown channel type\n");
break;
}
channel_counts[ctype]++;
}
av_log(avctx, AV_LOG_DEBUG,
"%d channels - front:%d side:%d back:%d lfe:%d top:%d\n",
info->numChannels,
channel_counts[ACT_FRONT], channel_counts[ACT_SIDE],
channel_counts[ACT_BACK], channel_counts[ACT_LFE],
channel_counts[ACT_FRONT_TOP] + channel_counts[ACT_SIDE_TOP] +
channel_counts[ACT_BACK_TOP] + channel_counts[ACT_TOP]);
switch (channel_counts[ACT_FRONT]) {
case 4:
ch_layout |= AV_CH_LAYOUT_STEREO | AV_CH_FRONT_LEFT_OF_CENTER |
AV_CH_FRONT_RIGHT_OF_CENTER;
break;
case 3:
ch_layout |= AV_CH_LAYOUT_STEREO | AV_CH_FRONT_CENTER;
break;
case 2:
ch_layout |= AV_CH_LAYOUT_STEREO;
break;
case 1:
ch_layout |= AV_CH_FRONT_CENTER;
break;
default:
av_log(avctx, AV_LOG_WARNING,
"unsupported number of front channels: %d\n",
channel_counts[ACT_FRONT]);
ch_error = 1;
break;
}
if (channel_counts[ACT_SIDE] > 0) {
if (channel_counts[ACT_SIDE] == 2) {
ch_layout |= AV_CH_SIDE_LEFT | AV_CH_SIDE_RIGHT;
} else {
av_log(avctx, AV_LOG_WARNING,
"unsupported number of side channels: %d\n",
channel_counts[ACT_SIDE]);
ch_error = 1;
}
}
if (channel_counts[ACT_BACK] > 0) {
switch (channel_counts[ACT_BACK]) {
case 3:
ch_layout |= AV_CH_BACK_LEFT | AV_CH_BACK_RIGHT | AV_CH_BACK_CENTER;
break;
case 2:
ch_layout |= AV_CH_BACK_LEFT | AV_CH_BACK_RIGHT;
break;
case 1:
ch_layout |= AV_CH_BACK_CENTER;
break;
default:
av_log(avctx, AV_LOG_WARNING,
"unsupported number of back channels: %d\n",
channel_counts[ACT_BACK]);
ch_error = 1;
break;
}
}
if (channel_counts[ACT_LFE] > 0) {
if (channel_counts[ACT_LFE] == 1) {
ch_layout |= AV_CH_LOW_FREQUENCY;
} else {
av_log(avctx, AV_LOG_WARNING,
"unsupported number of LFE channels: %d\n",
channel_counts[ACT_LFE]);
ch_error = 1;
}
}
if (!ch_error &&
av_get_channel_layout_nb_channels(ch_layout) != info->numChannels) {
av_log(avctx, AV_LOG_WARNING, "unsupported channel configuration\n");
ch_error = 1;
}
if (ch_error)
avctx->channel_layout = 0;
else
avctx->channel_layout = ch_layout;
avctx->channels = info->numChannels;
return 0;
}
static av_cold int fdk_aac_decode_close(AVCodecContext *avctx)
{
FDKAACDecContext *s = avctx->priv_data;
if (s->handle)
aacDecoder_Close(s->handle);
av_freep(&s->decoder_buffer);
av_freep(&s->anc_buffer);
return 0;
}
static av_cold int fdk_aac_decode_init(AVCodecContext *avctx)
{
FDKAACDecContext *s = avctx->priv_data;
AAC_DECODER_ERROR err;
s->handle = aacDecoder_Open(avctx->extradata_size ? TT_MP4_RAW : TT_MP4_ADTS, 1);
if (!s->handle) {
av_log(avctx, AV_LOG_ERROR, "Error opening decoder\n");
return AVERROR_UNKNOWN;
}
if (avctx->extradata_size) {
if ((err = aacDecoder_ConfigRaw(s->handle, &avctx->extradata,
&avctx->extradata_size)) != AAC_DEC_OK) {
av_log(avctx, AV_LOG_ERROR, "Unable to set extradata\n");
return AVERROR_INVALIDDATA;
}
}
if ((err = aacDecoder_SetParam(s->handle, AAC_CONCEAL_METHOD,
s->conceal_method)) != AAC_DEC_OK) {
av_log(avctx, AV_LOG_ERROR, "Unable to set error concealment method\n");
return AVERROR_UNKNOWN;
}
if (avctx->request_channel_layout > 0 &&
avctx->request_channel_layout != AV_CH_LAYOUT_NATIVE) {
int downmix_channels = -1;
switch (avctx->request_channel_layout) {
case AV_CH_LAYOUT_STEREO:
case AV_CH_LAYOUT_STEREO_DOWNMIX:
downmix_channels = 2;
break;
case AV_CH_LAYOUT_MONO:
downmix_channels = 1;
break;
default:
av_log(avctx, AV_LOG_WARNING, "Invalid request_channel_layout\n");
break;
}
if (downmix_channels != -1) {
if (aacDecoder_SetParam(s->handle, AAC_PCM_MAX_OUTPUT_CHANNELS,
downmix_channels) != AAC_DEC_OK) {
av_log(avctx, AV_LOG_WARNING, "Unable to set output channels in the decoder\n");
} else {
s->anc_buffer = av_malloc(DMX_ANC_BUFFSIZE);
if (!s->anc_buffer) {
av_log(avctx, AV_LOG_ERROR, "Unable to allocate ancillary buffer for the decoder\n");
return AVERROR(ENOMEM);
}
if (aacDecoder_AncDataInit(s->handle, s->anc_buffer, DMX_ANC_BUFFSIZE)) {
av_log(avctx, AV_LOG_ERROR, "Unable to register downmix ancillary buffer in the decoder\n");
return AVERROR_UNKNOWN;
}
}
}
}
if (s->drc_boost != -1) {
if (aacDecoder_SetParam(s->handle, AAC_DRC_BOOST_FACTOR, s->drc_boost) != AAC_DEC_OK) {
av_log(avctx, AV_LOG_ERROR, "Unable to set DRC boost factor in the decoder\n");
return AVERROR_UNKNOWN;
}
}
if (s->drc_cut != -1) {
if (aacDecoder_SetParam(s->handle, AAC_DRC_ATTENUATION_FACTOR, s->drc_cut) != AAC_DEC_OK) {
av_log(avctx, AV_LOG_ERROR, "Unable to set DRC attenuation factor in the decoder\n");
return AVERROR_UNKNOWN;
}
}
if (s->drc_level != -1) {
if (aacDecoder_SetParam(s->handle, AAC_DRC_REFERENCE_LEVEL, s->drc_level) != AAC_DEC_OK) {
av_log(avctx, AV_LOG_ERROR, "Unable to set DRC reference level in the decoder\n");
return AVERROR_UNKNOWN;
}
}
if (s->drc_heavy != -1) {
if (aacDecoder_SetParam(s->handle, AAC_DRC_HEAVY_COMPRESSION, s->drc_heavy) != AAC_DEC_OK) {
av_log(avctx, AV_LOG_ERROR, "Unable to set DRC heavy compression in the decoder\n");
return AVERROR_UNKNOWN;
}
}
#ifdef AACDECODER_LIB_VL0
if (aacDecoder_SetParam(s->handle, AAC_PCM_LIMITER_ENABLE, s->level_limit) != AAC_DEC_OK) {
av_log(avctx, AV_LOG_ERROR, "Unable to set in signal level limiting in the decoder\n");
return AVERROR_UNKNOWN;
}
#endif
avctx->sample_fmt = AV_SAMPLE_FMT_S16;
s->decoder_buffer_size = DECODER_BUFFSIZE * DECODER_MAX_CHANNELS;
s->decoder_buffer = av_malloc(s->decoder_buffer_size);
if (!s->decoder_buffer)
return AVERROR(ENOMEM);
return 0;
}
static int fdk_aac_decode_frame(AVCodecContext *avctx, void *data,
int *got_frame_ptr, AVPacket *avpkt)
{
FDKAACDecContext *s = avctx->priv_data;
AVFrame *frame = data;
int ret;
AAC_DECODER_ERROR err;
UINT valid = avpkt->size;
err = aacDecoder_Fill(s->handle, &avpkt->data, &avpkt->size, &valid);
if (err != AAC_DEC_OK) {
av_log(avctx, AV_LOG_ERROR, "aacDecoder_Fill() failed: %x\n", err);
return AVERROR_INVALIDDATA;
}
err = aacDecoder_DecodeFrame(s->handle, (INT_PCM *) s->decoder_buffer, s->decoder_buffer_size / sizeof(INT_PCM), 0);
if (err == AAC_DEC_NOT_ENOUGH_BITS) {
ret = avpkt->size - valid;
goto end;
}
if (err != AAC_DEC_OK) {
av_log(avctx, AV_LOG_ERROR,
"aacDecoder_DecodeFrame() failed: %x\n", err);
ret = AVERROR_UNKNOWN;
goto end;
}
if ((ret = get_stream_info(avctx)) < 0)
goto end;
frame->nb_samples = avctx->frame_size;
if ((ret = ff_get_buffer(avctx, frame, 0)) < 0)
goto end;
memcpy(frame->extended_data[0], s->decoder_buffer,
avctx->channels * avctx->frame_size *
av_get_bytes_per_sample(avctx->sample_fmt));
*got_frame_ptr = 1;
ret = avpkt->size - valid;
end:
return ret;
}
static av_cold void fdk_aac_decode_flush(AVCodecContext *avctx)
{
FDKAACDecContext *s = avctx->priv_data;
AAC_DECODER_ERROR err;
if (!s->handle)
return;
if ((err = aacDecoder_SetParam(s->handle,
AAC_TPDEC_CLEAR_BUFFER, 1)) != AAC_DEC_OK)
av_log(avctx, AV_LOG_WARNING, "failed to clear buffer when flushing\n");
}
AVCodec ff_libfdk_aac_decoder = {
.name = "libfdk_aac",
.long_name = NULL_IF_CONFIG_SMALL("Fraunhofer FDK AAC"),
.type = AVMEDIA_TYPE_AUDIO,
.id = AV_CODEC_ID_AAC,
.priv_data_size = sizeof(FDKAACDecContext),
.init = fdk_aac_decode_init,
.decode = fdk_aac_decode_frame,
.close = fdk_aac_decode_close,
.flush = fdk_aac_decode_flush,
.capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_CHANNEL_CONF,
.priv_class = &fdk_aac_dec_class,
.caps_internal = FF_CODEC_CAP_INIT_THREADSAFE |
FF_CODEC_CAP_INIT_CLEANUP,
.wrapper_name = "libfdk",
};
| {
"pile_set_name": "Github"
} |
In ptw32_InterlockedCompareExchange.c, I've added a section for
Borland's compiler; it's identical to that for the MS compiler except
that it uses /* ... */ comments instead of ; comments.
[RPJ: need to define HAVE_TASM32 in config.h to use the above.]
The other file is a makefile suitable for use with Borland's compiler
(run "make -fBmakefile" in the directory). It builds a single version
of the library, pthreadBC.dll and the corresponding pthreadBC.lib
import library, which is comparable to the pthreadVC version; I can't
personally see any demand for the versions that include structured or
C++ exception cancellation handling so I haven't attempted to build
those versions of the library. (I imagine a static version might be
of use to some, but we can't legally use that on my commercial
projects so I can't try that out, unfortunately.)
[RPJ: Added tests\Bmakefile as well.]
Borland C++ doesn't define the ENOSYS constant used by pthreads-win32;
rather than make more extensive patches to the pthreads-win32 source I
have a mostly-arbitrary constant for it in the makefile. However this
doesn't make it visible to the application using the library, so if
anyone actually wants to use this constant in their apps (why?)
someone might like to make a seperate NEED_BCC_something define to add
this stuff.
The makefile also #defines EDEADLK as EDEADLOCK, _timeb as timeb, and
_ftime as ftime, to deal with the minor differences between the two
RTLs' naming conventions, and sets the compiler flags as required to
get a normal compile of the library.
[RPJ: Moved errno values and _timeb etc to pthread.h, so apps will also
use them.]
(While I'm on the subject, the reason Borland users should recompile
the library, rather than using the impdef/implib technique suggested
previously on the mailing list, is that a) the errno constants are
different, so the results returned by the pthread_* functions can be
meaningless, and b) the errno variable/pseudo-variable itself is
different in the MS & BCC runtimes, so you can't access the
pthreadVC's errno from a Borland C++-compiled host application
correctly - I imagine there are other potential problems from the RTL
mismatch too.)
[RPJ: Make sure you use the same RTL in both dll and application builds.
The dll and tests Bmakefiles use cw32mti.lib. Having some trouble with
memory read exceptions running the test suite using BCC55.]
Best regards,
Will
--
Will Bryant
Systems Architect, eCOSM Limited
Cell +64 21 655 443, office +64 3 365 4176
http://www.ecosm.com/
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:orientation="vertical" >
<RelativeLayout
android:layout_width="match_parent"
android:layout_height="wrap_content" >
<Button
android:id="@+id/btn_doodle_done"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignParentRight="true"
android:text="Done" />
<Button
android:id="@+id/btn_doodle_undo"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_toLeftOf="@id/btn_doodle_done"
android:text="Undo" />
</RelativeLayout>
<RadioGroup
android:id="@+id/rg_doodle_menu_group"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:background="#3C3B43"
android:orientation="horizontal" >
<RadioButton
android:id="@+id/rb_doodle_paint_style"
style="@style/sub_menu_rbtn_style"
android:checked="true"
android:text="@string/subtitle_style" />
<RadioButton
android:id="@+id/rb_doodle_paint_ttf"
style="@style/sub_menu_rbtn_style"
android:text="@string/subtitle_font_style"
android:visibility="gone" />
<RadioButton
android:id="@+id/rb_doodle_paint_color"
style="@style/sub_menu_rbtn_style"
android:text="@string/subtitle_color" />
<RadioButton
android:id="@+id/rb_doodle_paint_size"
style="@style/sub_menu_rbtn_style"
android:text="@string/subtitle_size" />
</RadioGroup>
<RelativeLayout
android:id="@+id/rl_doodle_pain_setting_layout"
android:layout_width="match_parent"
android:layout_height="match_parent" >
<include
android:id="@+id/rl_doodle_paint_style_layout"
android:layout_width="match_parent"
android:layout_height="match_parent"
layout="@layout/layout_doodle_paint_style" />
<include
android:id="@+id/rl_doodle_paint_ttf_layout"
android:layout_width="match_parent"
android:layout_height="match_parent"
layout="@layout/layout_doodle_paint_ttf"
android:visibility="gone" />
<include
android:id="@+id/rl_doodle_paint_color_layout"
android:layout_width="match_parent"
android:layout_height="match_parent"
layout="@layout/layout_doodle_paint_color"
android:visibility="gone" />
<include
android:id="@+id/rl_doodle_paint_size_layout"
android:layout_width="match_parent"
android:layout_height="match_parent"
layout="@layout/layout_doodle_paint_size"
android:visibility="gone" />
</RelativeLayout>
</LinearLayout> | {
"pile_set_name": "Github"
} |
diff --git a/drivers/net/wireless/ath5k/base.c b/drivers/net/wireless/ath5k/base.c
index 217d506..4e7a949 100644
--- a/drivers/net/wireless/ath5k/base.c
+++ b/drivers/net/wireless/ath5k/base.c
@@ -1219,7 +1219,9 @@ ath5k_txbuf_setup(struct ath5k_softc *sc, struct ath5k_buf *bf)
bf->skbaddr = pci_map_single(sc->pdev, skb->data, skb->len,
PCI_DMA_TODEVICE);
- if (info->flags & IEEE80211_TX_CTL_NO_ACK)
+ if (info->flags & IEEE80211_TX_CTL_NO_ACK ||
+ (info->flags & IEEE80211_TX_CTL_INJECTED &&
+ !(ieee80211_has_morefrags(((struct ieee80211_hdr *)skb->data)->frame_control))))
flags |= AR5K_TXDESC_NOACK;
pktlen = skb->len;
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ItemGroup>
<Filter Include="Source Files">
<UniqueIdentifier>{4FC737F1-C7A5-4376-A066-2A32D752A2FF}</UniqueIdentifier>
<Extensions>cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx</Extensions>
</Filter>
<Filter Include="Header Files">
<UniqueIdentifier>{93995380-89BD-4b04-88EB-625FBE52EBFB}</UniqueIdentifier>
<Extensions>h;hpp;hxx;hm;inl;inc;xsd</Extensions>
</Filter>
<Filter Include="Resource Files">
<UniqueIdentifier>{67DA6AB6-F800-4c08-8B7A-83BB121AAD01}</UniqueIdentifier>
<Extensions>rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav</Extensions>
</Filter>
</ItemGroup>
<ItemGroup>
<ClCompile Include="..\..\..\progs\demos\CallbackMaker\CallbackMaker.c">
<Filter>Source Files</Filter>
</ClCompile>
</ItemGroup>
</Project> | {
"pile_set_name": "Github"
} |
klog
====
klog is a permanent fork of https://github.com/golang/glog.
## Why was klog created?
The decision to create klog was one that wasn't made lightly, but it was necessary due to some
drawbacks that are present in [glog](https://github.com/golang/glog). Ultimately, the fork was created due to glog not being under active development; this can be seen in the glog README:
> The code in this repo [...] is not itself under development
This makes us unable to solve many use cases without a fork. The factors that contributed to needing feature development are listed below:
* `glog` [presents a lot "gotchas"](https://github.com/kubernetes/kubernetes/issues/61006) and introduces challenges in containerized environments, all of which aren't well documented.
* `glog` doesn't provide an easy way to test logs, which detracts from the stability of software using it
* A long term goal is to implement a logging interface that allows us to add context, change output format, etc.
Historical context is available here:
* https://github.com/kubernetes/kubernetes/issues/61006
* https://github.com/kubernetes/kubernetes/issues/70264
* https://groups.google.com/forum/#!msg/kubernetes-sig-architecture/wCWiWf3Juzs/hXRVBH90CgAJ
* https://groups.google.com/forum/#!msg/kubernetes-dev/7vnijOMhLS0/1oRiNtigBgAJ
----
How to use klog
===============
- Replace imports for `github.com/golang/glog` with `k8s.io/klog`
- Use `klog.InitFlags(nil)` explicitly for initializing global flags as we no longer use `init()` method to register the flags
- You can now use `log-file` instead of `log-dir` for logging to a single file (See `examples/log_file/usage_log_file.go`)
- If you want to redirect everything logged using klog somewhere else (say syslog!), you can use `klog.SetOutput()` method and supply a `io.Writer`. (See `examples/set_output/usage_set_output.go`)
- For more logging conventions (See [Logging Conventions](https://github.com/kubernetes/community/blob/master/contributors/devel/sig-instrumentation/logging.md))
### Coexisting with glog
This package can be used side by side with glog. [This example](examples/coexist_glog/coexist_glog.go) shows how to initialize and syncronize flags from the global `flag.CommandLine` FlagSet. In addition, the example makes use of stderr as combined output by setting `alsologtostderr` (or `logtostderr`) to `true`.
## Community, discussion, contribution, and support
Learn how to engage with the Kubernetes community on the [community page](http://kubernetes.io/community/).
You can reach the maintainers of this project at:
- [Slack](https://kubernetes.slack.com/messages/sig-architecture)
- [Mailing List](https://groups.google.com/forum/#!forum/kubernetes-sig-architecture)
### Code of conduct
Participation in the Kubernetes community is governed by the [Kubernetes Code of Conduct](code-of-conduct.md).
----
glog
====
Leveled execution logs for Go.
This is an efficient pure Go implementation of leveled logs in the
manner of the open source C++ package
https://github.com/google/glog
By binding methods to booleans it is possible to use the log package
without paying the expense of evaluating the arguments to the log.
Through the -vmodule flag, the package also provides fine-grained
control over logging at the file level.
The comment from glog.go introduces the ideas:
Package glog implements logging analogous to the Google-internal
C++ INFO/ERROR/V setup. It provides functions Info, Warning,
Error, Fatal, plus formatting variants such as Infof. It
also provides V-style logging controlled by the -v and
-vmodule=file=2 flags.
Basic examples:
glog.Info("Prepare to repel boarders")
glog.Fatalf("Initialization failed: %s", err)
See the documentation for the V function for an explanation
of these examples:
if glog.V(2) {
glog.Info("Starting transaction...")
}
glog.V(2).Infoln("Processed", nItems, "elements")
The repository contains an open source version of the log package
used inside Google. The master copy of the source lives inside
Google, not here. The code in this repo is for export only and is not itself
under development. Feature requests will be ignored.
Send bug reports to [email protected].
| {
"pile_set_name": "Github"
} |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See LICENSE in the project root for license information.
using Microsoft.MixedReality.Toolkit.Input;
using UnityEngine;
using UnityEngine.EventSystems;
namespace Microsoft.MixedReality.Toolkit.Physics
{
/// <summary>
/// Contains information about which game object has the focus currently.
/// Also contains information about the normal of that point.
/// </summary>
public struct FocusDetails
{
/// <summary>
/// Distance along the ray until a hit, or until the end of the ray if no hit
/// </summary>
public float RayDistance { get; set; }
/// <summary>
/// The hit point of the raycast.
/// </summary>
public Vector3 Point { get; set; }
/// <summary>
/// The normal of the raycast.
/// </summary>
public Vector3 Normal { get; set; }
/// <summary>
/// The object hit by the last raycast.
/// </summary>
public GameObject Object { get; set; }
/// <summary>
/// The last raycast hit info.
/// </summary>
public MixedRealityRaycastHit LastRaycastHit { get; set; }
/// <summary>
/// The last raycast hit info for graphic raycast
/// </summary>
public RaycastResult LastGraphicsRaycastResult { get; set; }
public Vector3 PointLocalSpace { get; set; }
public Vector3 NormalLocalSpace { get; set; }
}
}
| {
"pile_set_name": "Github"
} |
<nav id="my-menu">
<div>
<p>{{ site.title }}</p>
<ul class="pages">
<li><a href="{{ site.baseurl }}/"><i class="fa fa-home"></i> Home</a></li>
<li><a href="{{ site.baseurl }}/posts/"><i class="fa fa-archive"></i> All Posts</a></li>
<li><a href="{{ site.baseurl }}/search/"><i class="fa fa-search"></i> Search</a></li>
</ul>
{% include social_links.html %}
</div>
</nav>
<div class="menu-button" href="#menu"><i class="fa fa-bars"></i></div>
| {
"pile_set_name": "Github"
} |
// Copyright 2015 The etcd Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package etcdserver
import (
"errors"
"fmt"
)
var (
ErrUnknownMethod = errors.New("etcdserver: unknown method")
ErrStopped = errors.New("etcdserver: server stopped")
ErrCanceled = errors.New("etcdserver: request cancelled")
ErrTimeout = errors.New("etcdserver: request timed out")
ErrTimeoutDueToLeaderFail = errors.New("etcdserver: request timed out, possibly due to previous leader failure")
ErrTimeoutDueToConnectionLost = errors.New("etcdserver: request timed out, possibly due to connection lost")
ErrTimeoutLeaderTransfer = errors.New("etcdserver: request timed out, leader transfer took too long")
ErrNotEnoughStartedMembers = errors.New("etcdserver: re-configuration failed due to not enough started members")
ErrNoLeader = errors.New("etcdserver: no leader")
ErrNotLeader = errors.New("etcdserver: not leader")
ErrRequestTooLarge = errors.New("etcdserver: request is too large")
ErrNoSpace = errors.New("etcdserver: no space")
ErrTooManyRequests = errors.New("etcdserver: too many requests")
ErrUnhealthy = errors.New("etcdserver: unhealthy cluster")
ErrKeyNotFound = errors.New("etcdserver: key not found")
ErrCorrupt = errors.New("etcdserver: corrupt cluster")
)
type DiscoveryError struct {
Op string
Err error
}
func (e DiscoveryError) Error() string {
return fmt.Sprintf("failed to %s discovery cluster (%v)", e.Op, e.Err)
}
| {
"pile_set_name": "Github"
} |
@{
ViewBag.Title = "Orders for " + ViewBag.ConferenceName;
}
<h1 class="page__head page__wrapper">Locate registration</h1>
<div class="page__content">
<div class="page__wrapper content-main content-main_register">
<div class="content-main__wrapper">
<div class="content-main__main">
@using (Html.BeginForm()) {
@*<div class="content-main__header"></div>*@
<div class="content">
<div class="form">
<fieldset class="form__fieldset">
<label class="form__label">Email address:<span>*</span></label>
<div class="form__field">
<input type="text" class="" name="email" />
</div>
</fieldset>
<fieldset class="form__fieldset">
<label class="form__label">Order #:<span>*</span></label>
<div class="form__field">
<input type="text" class="" name="accessCode" />
</div>
</fieldset>
<div class="g-clear"></div>
</div>
</div>
<div class="nav content-main__nav">
@*<a class="nav__left" href="#">Back</a>*@
<button id="find" class="nav__right" type="submit">Find</button>
</div>
}
</div>
</div>
</div>
</div>
| {
"pile_set_name": "Github"
} |
module Setup
class FileStoreConfig
include CenitScoped
include RailsAdmin::Models::Setup::FileStoreConfigAdmin
deny :all
build_in_data_type
belongs_to :data_type, class_name: Setup::FileDataType.to_s, inverse_of: nil
field :file_store, type: Module, default: -> { Cenit.default_file_store }
field :public_read, type: Boolean, default: false
attr_readonly :data_type
validates_presence_of :data_type, :file_store
before_save do
start_migration unless @skip_migration_callback
end
def start_migration
if persisted? && (changed_attributes.key?('file_store') || changed_attributes.key?('public_read'))
if Setup::FileStoreMigration.cannot_migrate?(data_type)
errors.add(:file_store, 'can not be updated')
else
msg = { data_type_id: data_type_id }
if changed_attributes.key?('file_store')
msg[:file_store] = file_store.to_s
reset_attribute!('file_store')
end
if msg.key?(:file_store) || changed_attributes.key?('public_read')
msg[:public_read] = public_read.to_s
reset_attribute!('public_read')
end
Setup::FileStoreMigration.process(msg)
end
end
errors.blank?
end
def save(options = {})
@skip_migration_callback = options.delete(:skip_migration)
super
@skip_migration_callback = false
end
class << self
def file_store_enum
Cenit.file_stores.map { |fs| [fs.label, fs] }.to_h
end
end
end
end
| {
"pile_set_name": "Github"
} |
# model settings
model = dict(
type='MaskRCNN',
pretrained='modelzoo://resnet101',
backbone=dict(
type='ResNet',
depth=101,
num_stages=4,
out_indices=(0, 1, 2, 3),
frozen_stages=1,
style='pytorch',
gcb=dict(
ratio=1./16.,
),
stage_with_gcb=(False, True, True, True)),
neck=dict(
type='FPN',
in_channels=[256, 512, 1024, 2048],
out_channels=256,
num_outs=5),
rpn_head=dict(
type='RPNHead',
in_channels=256,
feat_channels=256,
anchor_scales=[8],
anchor_ratios=[0.5, 1.0, 2.0],
anchor_strides=[4, 8, 16, 32, 64],
target_means=[.0, .0, .0, .0],
target_stds=[1.0, 1.0, 1.0, 1.0],
loss_cls=dict(
type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0)),
bbox_roi_extractor=dict(
type='SingleRoIExtractor',
roi_layer=dict(type='RoIAlign', out_size=7, sample_num=2),
out_channels=256,
featmap_strides=[4, 8, 16, 32]),
bbox_head=dict(
type='SharedFCBBoxHead',
num_fcs=2,
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=81,
target_means=[0., 0., 0., 0.],
target_stds=[0.1, 0.1, 0.2, 0.2],
reg_class_agnostic=False,
loss_cls=dict(
type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)),
mask_roi_extractor=dict(
type='SingleRoIExtractor',
roi_layer=dict(type='RoIAlign', out_size=14, sample_num=2),
out_channels=256,
featmap_strides=[4, 8, 16, 32]),
mask_head=dict(
type='FCNMaskHead',
num_convs=4,
in_channels=256,
conv_out_channels=256,
num_classes=81,
loss_mask=dict(
type='CrossEntropyLoss', use_mask=True, loss_weight=1.0)))
# model training and testing settings
train_cfg = dict(
rpn=dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.7,
neg_iou_thr=0.3,
min_pos_iou=0.3,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=256,
pos_fraction=0.5,
neg_pos_ub=-1,
add_gt_as_proposals=False),
allowed_border=0,
pos_weight=-1,
debug=False),
rpn_proposal=dict(
nms_across_levels=False,
nms_pre=2000,
nms_post=2000,
max_num=2000,
nms_thr=0.7,
min_bbox_size=0),
rcnn=dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.5,
neg_iou_thr=0.5,
min_pos_iou=0.5,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
add_gt_as_proposals=True),
mask_size=28,
pos_weight=-1,
debug=False))
test_cfg = dict(
rpn=dict(
nms_across_levels=False,
nms_pre=1000,
nms_post=1000,
max_num=1000,
nms_thr=0.7,
min_bbox_size=0),
rcnn=dict(
score_thr=0.05,
nms=dict(type='nms', iou_thr=0.5),
max_per_img=100,
mask_thr_binary=0.5))
# dataset settings
dataset_type = 'CocoDataset'
data_root = 'data/coco/'
img_norm_cfg = dict(
mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True)
data = dict(
imgs_per_gpu=2,
workers_per_gpu=2,
train=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_train2017.json',
img_prefix=data_root + 'train2017/',
img_scale=(1333, 800),
img_norm_cfg=img_norm_cfg,
size_divisor=32,
flip_ratio=0.5,
with_mask=True,
with_crowd=True,
with_label=True),
val=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_val2017.json',
img_prefix=data_root + 'val2017/',
img_scale=(1333, 800),
img_norm_cfg=img_norm_cfg,
size_divisor=32,
flip_ratio=0,
with_mask=True,
with_crowd=True,
with_label=True),
test=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_val2017.json',
img_prefix=data_root + 'val2017/',
img_scale=(1333, 800),
img_norm_cfg=img_norm_cfg,
size_divisor=32,
flip_ratio=0,
with_mask=False,
with_label=False,
test_mode=True))
# optimizer
optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.0001)
optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2))
# learning policy
lr_config = dict(
policy='step',
warmup='linear',
warmup_iters=500,
warmup_ratio=1.0 / 3,
step=[8, 11])
checkpoint_config = dict(interval=1)
# yapf:disable
log_config = dict(
interval=50,
hooks=[
dict(type='TextLoggerHook'),
# dict(type='TensorboardLoggerHook')
])
# yapf:enable
# runtime settings
total_epochs = 12
dist_params = dict(backend='nccl')
log_level = 'INFO'
work_dir = './work_dirs/mask_rcnn_r16_gcb_c3-c5_r101_fpn_1x'
load_from = None
resume_from = None
workflow = [('train', 1)]
| {
"pile_set_name": "Github"
} |
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = _createTester;
var _noop = require('lodash/noop');
var _noop2 = _interopRequireDefault(_noop);
var _breakLoop = require('./breakLoop');
var _breakLoop2 = _interopRequireDefault(_breakLoop);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _createTester(check, getResult) {
return function (eachfn, arr, iteratee, cb) {
cb = cb || _noop2.default;
var testPassed = false;
var testResult;
eachfn(arr, function (value, _, callback) {
iteratee(value, function (err, result) {
if (err) {
callback(err);
} else if (check(result) && !testResult) {
testPassed = true;
testResult = getResult(true, value);
callback(null, _breakLoop2.default);
} else {
callback();
}
});
}, function (err) {
if (err) {
cb(err);
} else {
cb(null, testPassed ? testResult : getResult(false));
}
});
};
}
module.exports = exports['default']; | {
"pile_set_name": "Github"
} |
[](https://travis-ci.org/ZipArchive/ZipArchive)
# SSZipArchive
ZipArchive is a simple utility class for zipping and unzipping files on iOS, macOS and tvOS.
- Unzip zip files;
- Unzip password protected zip files;
- Unzip AES encrypted zip files;
- Create zip files;
- Create password protected zip files;
- Create AES encrypted zip files;
- Choose compression level;
- Append to existing zip files;
- Zip-up NSData instances. (with a filename)
## Installation and Setup
*The main release branch is configured to support Objective C and Swift 3+.*
SSZipArchive works on Xcode 7-9 and above, iOS 8-11 and above.
### CocoaPods
In your Podfile:
`pod 'SSZipArchive'`
### Carthage
In your Cartfile:
`github "ZipArchive/ZipArchive"`
### Manual
1. Add the `SSZipArchive` and `minizip` folders to your project.
2. Add the `libz` library to your target
SSZipArchive requires ARC.
## Usage
### Objective-C
```objective-c
// Create
[SSZipArchive createZipFileAtPath:zipPath withContentsOfDirectory:sampleDataPath];
// Unzip
[SSZipArchive unzipFileAtPath:zipPath toDestination:unzipPath];
```
### Swift
```swift
// Create
SSZipArchive.createZipFileAtPath(zipPath, withContentsOfDirectory: sampleDataPath)
// Unzip
SSZipArchive.unzipFileAtPath(zipPath, toDestination: unzipPath)
```
## License
SSZipArchive is protected under the [MIT license](https://github.com/samsoffes/ssziparchive/raw/master/LICENSE) and our slightly modified version of [Minizip](https://github.com/nmoinvaz/minizip) 1.2 is licensed under the [Zlib license](http://www.zlib.net/zlib_license.html).
## Acknowledgments
* Big thanks to [aish](http://code.google.com/p/ziparchive) for creating [ZipArchive](http://code.google.com/p/ziparchive). The project that inspired SSZipArchive.
* Thank you [@soffes](https://github.com/soffes) for the actual name of SSZipArchive.
* Thank you [@randomsequence](https://github.com/randomsequence) for implementing the creation support tech.
* Thank you [@johnezang](https://github.com/johnezang) for all his amazing help along the way.
| {
"pile_set_name": "Github"
} |
package org.stepik.android.remote.notification
import io.reactivex.Completable
import io.reactivex.Single
import org.stepic.droid.model.NotificationCategory
import org.stepic.droid.notifications.model.Notification
import org.stepic.droid.notifications.model.NotificationStatuses
import org.stepic.droid.util.PagedList
import org.stepik.android.data.notification.source.NotificationRemoteDataSource
import org.stepik.android.remote.base.mapper.toPagedList
import org.stepik.android.remote.notification.model.NotificationRequest
import org.stepik.android.remote.notification.model.NotificationResponse
import org.stepik.android.remote.notification.model.NotificationStatusesResponse
import org.stepik.android.remote.notification.service.NotificationService
import javax.inject.Inject
class NotificationRemoteDataSourceImpl
@Inject
constructor(
private val notificationService: NotificationService
) : NotificationRemoteDataSource {
override fun putNotifications(vararg notificationIds: Long, isRead: Boolean): Completable =
Completable.concat(notificationIds.map { id ->
val notification = Notification()
notification.isUnread = !isRead
notificationService.putNotification(id, NotificationRequest(notification))
})
override fun getNotifications(notificationCategory: NotificationCategory, page: Int): Single<PagedList<Notification>> =
notificationService
.getNotifications(page, type = getNotificationCategoryString(notificationCategory))
.map { it.toPagedList(NotificationResponse::notifications) }
override fun markNotificationAsRead(notificationCategory: NotificationCategory): Completable =
notificationService
.markNotificationAsRead(getNotificationCategoryString(notificationCategory))
override fun getNotificationStatuses(): Single<List<NotificationStatuses>> =
notificationService
.getNotificationStatuses()
.map(NotificationStatusesResponse::notificationStatuses)
private fun getNotificationCategoryString(notificationCategory: NotificationCategory): String? =
if (notificationCategory === NotificationCategory.all) {
null
} else {
notificationCategory.name
}
} | {
"pile_set_name": "Github"
} |
# Contributing
We would love to have people submit pull requests and help make `grpc-ecosystem/go-grpc-middleware` even better 👍.
Fork, then clone the repo:
```bash
git clone [email protected]:your-username/go-grpc-middleware.git
```
Before checking in please run the following:
```bash
make all
```
This will `vet`, `fmt`, regenerate documentation and run all tests.
Push to your fork and open a pull request. | {
"pile_set_name": "Github"
} |
/***************************************************************************
* ARM Stack Unwinder, [email protected]
* Updated, adapted and several bug fixes on 2018 by Eduardo José Tagle
*
* This program is PUBLIC DOMAIN.
* This means that there is no copyright and anyone is able to take a copy
* for free and use it as they wish, with or without modifications, and in
* any context, commercially or otherwise. The only limitation is that I
* don't guarantee that the software is fit for any purpose or accept any
* liability for it's use or misuse - this software is without warranty.
***************************************************************************
* File Description: Implementation of the interface into the ARM unwinder.
**************************************************************************/
#if defined(__arm__) || defined(__thumb__)
#define MODULE_NAME "UNWINDER"
#include <stdio.h>
#include <string.h>
#include "unwinder.h"
#include "unwarm.h"
#include "unwarmbytab.h"
/* These symbols point to the unwind index and should be provide by the linker script */
extern "C" const UnwTabEntry __exidx_start[];
extern "C" const UnwTabEntry __exidx_end[];
// Detect if unwind information is present or not
static int HasUnwindTableInfo() {
// > 16 because there are default entries we can't supress
return ((char*)(&__exidx_end) - (char*)(&__exidx_start)) > 16 ? 1 : 0;
}
UnwResult UnwindStart(UnwindFrame* frame, const UnwindCallbacks *cb, void *data) {
if (HasUnwindTableInfo()) {
/* We have unwind information tables */
return UnwindByTableStart(frame, cb, data);
}
else {
/* We don't have unwind information tables */
UnwState state;
/* Initialize the unwinding state */
UnwInitState(&state, cb, data, frame->pc, frame->sp);
/* Check the Thumb bit */
return (frame->pc & 0x1) ? UnwStartThumb(&state) : UnwStartArm(&state);
}
}
#endif
| {
"pile_set_name": "Github"
} |
/**
* Copyright 2018 The original authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**/
package io.dekorate;
public interface Logger {
String DEBUG = "DEBUG";
String INFO = "INFO";
String WARN = "WARN";
String ERROR = "ERROR";
void debug(String message);
void info(String message);
void warning(String message);
void error(String message);
}
| {
"pile_set_name": "Github"
} |
/* Class = "NSTextFieldCell"; title = "Bus Speed kHz"; ObjectID = "3Hj-1V-Ezc"; */
"3Hj-1V-Ezc.title" = "Bus Speed kHz";
/* Class = "NSTextField"; ibShadowedToolTip = "<key>Type</key>\n<string>0x0201</string>\nThis result of this value can only be found in Apple's specification and it is used in the window About this Mac, which is displaying the according processor name. Otherwise \"Unknown CPU\" will be displayed. An invocation of CPUID was not possible due to PowerPC and due to Apple's different view of the world SMBIOS table 4 is not used either. Clover knows most values but due to the fact that hardware development does not stand still, you can specify this value. Again, this is purely cosmetic."; ObjectID = "4Pm-s4-81X"; */
"4Pm-s4-81X.ibShadowedToolTip" = "<key>Type</key>\n<string>0x0201</string>\nThis result of this value can only be found in Apple's specification and it is used in the window About this Mac, which is displaying the according processor name. Otherwise \"Unknown CPU\" will be displayed. An invocation of CPUID was not possible due to PowerPC and due to Apple's different view of the world SMBIOS table 4 is not used either. Clover knows most values but due to the fact that hardware development does not stand still, you can specify this value. Again, this is purely cosmetic.";
/* Class = "NSTextField"; ibShadowedToolTip = "<key>SavingMode</key>\n<string>0x1B0</string>\n\nCPU feature which lets system software to set\nEnergy Performance Preference (IA32_ENERGY_PERF_BIAS MSR)."; ObjectID = "6pN-Sk-cjc"; */
"6pN-Sk-cjc.ibShadowedToolTip" = "<key>SavingMode</key>\n<string>0x1B0</string>\n\nCPU feature which lets system software to set\nEnergy Performance Preference (IA32_ENERGY_PERF_BIAS MSR).";
/* Class = "NSButtonCell"; title = "C6"; ObjectID = "9yf-0b-11O"; */
"9yf-0b-11O.title" = "C6";
/* Class = "NSButtonCell"; title = "C4"; ObjectID = "D8E-0o-UqN"; */
"D8E-0o-UqN.title" = "C4";
/* Class = "NSTextFieldCell"; title = "QPI"; ObjectID = "Db0-Q9-RZF"; */
"Db0-Q9-RZF.title" = "QPI";
/* Class = "NSBox"; title = "Box"; ObjectID = "Eu5-vh-xck"; */
"Eu5-vh-xck.title" = "Box";
/* Class = "NSBox"; title = "Skylake settings"; ObjectID = "FVq-NB-BQM"; */
"FVq-NB-BQM.title" = "Skylake settings";
/* Class = "NSTextFieldCell"; title = "Type"; ObjectID = "GIt-bL-uRd"; */
"GIt-bL-uRd.title" = "Type";
/* Class = "NSButtonCell"; title = "C2"; ObjectID = "HIR-bT-8TX"; */
"HIR-bT-8TX.title" = "C2";
/* Class = "CocoaBindingsConnection"; ibShadowedIsNilPlaceholder = "0x"; ObjectID = "HnN-4u-Kif"; */
"HnN-4u-Kif.ibShadowedIsNilPlaceholder" = "0x";
/* Class = "NSTextField"; ibShadowedToolTip = "<key>BusSpeedkHz</key>\n<string>133330</string>\nBus speed in kHz. Very important value for a stable system. It is passed from the boot loader to the kernel. If this value is not correct, the kernel will not start at all. If it is slightly incorrect, clock issues may arise and the system will behave in a strange way.\n\nAn automatic detection was introduced with revision 1060, which relies on the ACPI timer producing much more accurate values than the ones stored in DMI.\n\nDMI stores this value in MHz, which is not accurate in contrast to a value calculated from CPU frequency. You can choose a more accurate value, if needed. For example my DMI has a value of 100 MHz, however manually overriding it to a value of 99790 kHz produced better clocks.\nSome vendors use a different meaning for BusSpeed and FSBSpeed and use values four times bigger in the BIOS. You can distinguish them according to the scale: either it is from 100 MHz to 400 MHz or it matches the formula CPUFrequency=RailFrequency*CPUMultiplier.\nIf ASUS uses a rail frequency of 1600 MHz and a multiplier of 8, the formula does not work - a CPU with 12,8 GHz does not exist; a division into 4 is required.\n\nAttention: UEFI booting will produce an inaccurate value. It is recommended manually specify the value, which is calculated more accurately during a boot with Clover EFI."; ObjectID = "IaF-Qk-1r3"; */
"IaF-Qk-1r3.ibShadowedToolTip" = "<key>BusSpeedkHz</key>\n<string>133330</string>\nBus speed in kHz. Very important value for a stable system. It is passed from the boot loader to the kernel. If this value is not correct, the kernel will not start at all. If it is slightly incorrect, clock issues may arise and the system will behave in a strange way.\n\nAn automatic detection was introduced with revision 1060, which relies on the ACPI timer producing much more accurate values than the ones stored in DMI.\n\nDMI stores this value in MHz, which is not accurate in contrast to a value calculated from CPU frequency. You can choose a more accurate value, if needed. For example my DMI has a value of 100 MHz, however manually overriding it to a value of 99790 kHz produced better clocks.\nSome vendors use a different meaning for BusSpeed and FSBSpeed and use values four times bigger in the BIOS. You can distinguish them according to the scale: either it is from 100 MHz to 400 MHz or it matches the formula CPUFrequency=RailFrequency*CPUMultiplier.\nIf ASUS uses a rail frequency of 1600 MHz and a multiplier of 8, the formula does not work - a CPU with 12,8 GHz does not exist; a division into 4 is required.\n\nAttention: UEFI booting will produce an inaccurate value. It is recommended manually specify the value, which is calculated more accurately during a boot with Clover EFI.";
/* Class = "NSTextField"; ibShadowedToolTip = "<key>Latency</key>\n<string>0x03E9</string>\nThis parameter value represents the C3 entry latency issued when entering C3 state. The critical value is 0x3E8(1000). A lower value will allow SpeedStep, a higher one will not allow it. Real Macs always use 0x3E9, meaning SpeedStep is not turned on. Decide for yourself what you need.\nNotebook users should use 0x00FA to enable power management."; ObjectID = "Idz-qn-n5F"; */
"Idz-qn-n5F.ibShadowedToolTip" = "<key>Latency</key>\n<string>0x03E9</string>\nThis parameter value represents the C3 entry latency issued when entering C3 state. The critical value is 0x3E8(1000). A lower value will allow SpeedStep, a higher one will not allow it. Real Macs always use 0x3E9, meaning SpeedStep is not turned on. Decide for yourself what you need.\nNotebook users should use 0x00FA to enable power management.";
/* Class = "NSTextFieldCell"; placeholderString = "hex or number"; ObjectID = "KCy-ek-uRe"; */
"KCy-ek-uRe.placeholderString" = "hex or number";
/* Class = "NSButton"; ibShadowedToolTip = "Enable C4 state."; ObjectID = "KXW-Lp-WEJ"; */
"KXW-Lp-WEJ.ibShadowedToolTip" = "Enable C4 state.";
/* Class = "NSButtonCell"; title = "HWPEnable"; ObjectID = "NH4-nf-cca"; */
"NH4-nf-cca.title" = "HWPEnable";
/* Class = "NSTextFieldCell"; title = "TDP"; ObjectID = "NlF-Gn-aiH"; */
"NlF-Gn-aiH.title" = "TDP";
/* Class = "NSButton"; ibShadowedToolTip = "Disable Intel Turbo"; ObjectID = "QUJ-Ig-4Pe"; */
"QUJ-Ig-4Pe.ibShadowedToolTip" = "Disable Intel Turbo";
/* Class = "NSTextField"; ibShadowedToolTip = "Set HWP configuration (take the form of: 0xXXXXXXXX). You can use the dedicated converter to set power management and min/max frequency. Any OC may be set in bios first."; ObjectID = "SLR-80-5iW"; */
"SLR-80-5iW.ibShadowedToolTip" = "Set HWP configuration (take the form of: 0xXXXXXXXX). You can use the dedicated converter to set power management and min/max frequency. Any OC may be set in bios first.";
/* Class = "NSButton"; ibShadowedToolTip = "Enable C2 state."; ObjectID = "TG2-vd-PlM"; */
"TG2-vd-PlM.ibShadowedToolTip" = "Enable C2 state.";
/* Class = "NSButton"; ibShadowedToolTip = "A possibility to not inject ARTFrequency introduced by rev 3356 if calculated value is not good somehow"; ObjectID = "VCy-7e-J2B"; */
"VCy-7e-J2B.ibShadowedToolTip" = "A possibility to not inject ARTFrequency introduced by rev 3356 if calculated value is not good somehow";
/* Class = "NSTextField"; ibShadowedToolTip = "<key>QPI</key>\n<string>4800</string>\nSystem Profiler calls it Processor Bus Speed or Bus Speed. Chameleon has an algorithm for calculating this value for Nehalem CPUs (, which is however not correct). Clover has a corrected algorithm according to Intel data sheets. AppleSmbios sources describe two variants: either SMBIOS already contains this value as specified by the vendor, or it is calculated by the formulae BusSpeed*4. After a long argument this value was sourced into the configuration file - write what you want (in MHz). This is a purely cosmetic value. Apparently this value only makes sense for Nehalems, the rest should use the formula stated above - or nothing at all.\n\nNote: Real Mac’s report a hw.busfrequency = 100000000\n\nTo achieve that with Clover here’s what to do:\n1 - Drop SMBIOS table type 132 for Sandy Bridge and newer CPU’s. Clover does this if you set QPI to a string value of 0.\n2 - Set SMBIOS table type 4->ExternalClock to 0 (or 25Mhz as a real Mac). This currently has to be done in the source code and re-compile Clover.\n\nIf you don’t do step 2 then for Sandy Bridge and newer CPU’s, AppleSMBIOS.kext will multiply any non zero values reported by SMBIOS table type 4 -> External Clock by 4"; ObjectID = "Xgc-vE-b48"; */
"Xgc-vE-b48.ibShadowedToolTip" = "<key>QPI</key>\n<string>4800</string>\nSystem Profiler calls it Processor Bus Speed or Bus Speed. Chameleon has an algorithm for calculating this value for Nehalem CPUs (, which is however not correct). Clover has a corrected algorithm according to Intel data sheets. AppleSmbios sources describe two variants: either SMBIOS already contains this value as specified by the vendor, or it is calculated by the formulae BusSpeed*4. After a long argument this value was sourced into the configuration file - write what you want (in MHz). This is a purely cosmetic value. Apparently this value only makes sense for Nehalems, the rest should use the formula stated above - or nothing at all.\n\nNote: Real Mac’s report a hw.busfrequency = 100000000\n\nTo achieve that with Clover here’s what to do:\n1 - Drop SMBIOS table type 132 for Sandy Bridge and newer CPU’s. Clover does this if you set QPI to a string value of 0.\n2 - Set SMBIOS table type 4->ExternalClock to 0 (or 25Mhz as a real Mac). This currently has to be done in the source code and re-compile Clover.\n\nIf you don’t do step 2 then for Sandy Bridge and newer CPU’s, AppleSMBIOS.kext will multiply any non zero values reported by SMBIOS table type 4 -> External Clock by 4";
/* Class = "NSTextField"; ibShadowedToolTip = "<key>TDP</key>\n<integer>55</integer>\n\nThermal Design Power: indicate the designated power in Watt."; ObjectID = "Xqk-hd-kSF"; */
"Xqk-hd-kSF.ibShadowedToolTip" = "<key>TDP</key>\n<integer>55</integer>\n\nThermal Design Power: indicate the designated power in Watt.";
/* Class = "NSTextFieldCell"; title = "SavingMode"; ObjectID = "bcx-iV-afn"; */
"bcx-iV-afn.title" = "SavingMode";
/* Class = "NSTextFieldCell"; title = "HWPValue"; ObjectID = "bkU-us-qph"; */
"bkU-us-qph.title" = "HWPValue";
/* Class = "NSButton"; ibShadowedToolTip = "Enable C6 state."; ObjectID = "cNv-tV-dVt"; */
"cNv-tV-dVt.ibShadowedToolTip" = "Enable C6 state.";
/* Class = "NSTextFieldCell"; title = "Latency"; ObjectID = "e1k-x9-XSj"; */
"e1k-x9-XSj.title" = "Latency";
/* Class = "NSTextFieldCell"; title = "Frequency MHz"; ObjectID = "f1c-ud-01j"; */
"f1c-ud-01j.title" = "Frequency MHz";
/* Class = "CocoaBindingsConnection"; ibShadowedIsNilPlaceholder = "0x"; ObjectID = "fvW-Os-k0F"; */
"fvW-Os-k0F.ibShadowedIsNilPlaceholder" = "0x";
/* Class = "NSButtonCell"; title = "UseARTFrequency"; ObjectID = "q7H-T3-I0k"; */
"q7H-T3-I0k.title" = "UseARTFrequency";
/* Class = "NSTextField"; ibShadowedToolTip = "<key>FrequencyMHz</key>\n<string>3200</string>\nCPU base frequency in MHz. Usually Clover will get it from DMI, however if it is not accurate, you can override it. A wrong value can cause system instability - synchronisation issues, slowness etc. It is recommended not use this parameter at all.\n\nMost probably this parameter will be excluded in coming revisions due to its dangerous and harmful nature."; ObjectID = "rMT-iB-J9F"; */
"rMT-iB-J9F.ibShadowedToolTip" = "<key>FrequencyMHz</key>\n<string>3200</string>\nCPU base frequency in MHz. Usually Clover will get it from DMI, however if it is not accurate, you can override it. A wrong value can cause system instability - synchronisation issues, slowness etc. It is recommended not use this parameter at all.\n\nMost probably this parameter will be excluded in coming revisions due to its dangerous and harmful nature.";
/* Class = "NSButtonCell"; title = "QEMU"; ObjectID = "rSK-Nd-wKc"; */
"rSK-Nd-wKc.title" = "QEMU";
/* Class = "NSButton"; ibShadowedToolTip = "HARDWARE-CONTROLLED PERFORMANCE STATES (Skylake +). When HWP is enabled, the processor autonomously selects performance states as deemed appropriate for the applied workload and with consideration of constraining hints that are programmed by the OS. These OS-provided hints include minimum and maximum performance limits, preference towards energy efficiency or performance, and the specification of a relevant workload history observation time window. The means for the OS to override HWP's autonomous selection of performance state with a specific desired performance target is also provided, however, the effective frequency delivered is subject to the result of energy efficiency and performance optimizations"; ObjectID = "rYB-Nu-ozx"; */
"rYB-Nu-ozx.ibShadowedToolTip" = "HARDWARE-CONTROLLED PERFORMANCE STATES (Skylake +). When HWP is enabled, the processor autonomously selects performance states as deemed appropriate for the applied workload and with consideration of constraining hints that are programmed by the OS. These OS-provided hints include minimum and maximum performance limits, preference towards energy efficiency or performance, and the specification of a relevant workload history observation time window. The means for the OS to override HWP's autonomous selection of performance state with a specific desired performance target is also provided, however, the effective frequency delivered is subject to the result of energy efficiency and performance optimizations";
/* Class = "NSButtonCell"; title = "TurboDisable"; ObjectID = "wqJ-36-Nbh"; */
"wqJ-36-Nbh.title" = "TurboDisable";
/* Class = "CocoaBindingsConnection"; ibShadowedIsNilPlaceholder = "0x"; ObjectID = "yM2-1N-5Fr"; */
"yM2-1N-5Fr.ibShadowedIsNilPlaceholder" = "0x";
/* Class = "NSButton"; ibShadowedToolTip = "Enable QEMU (Quick EMUlator)"; ObjectID = "zID-l3-npz"; */
"zID-l3-npz.ibShadowedToolTip" = "Enable QEMU (Quick EMUlator)";
| {
"pile_set_name": "Github"
} |
import io
import sys
import numpy as np
import tensorflow as tf
keras=tf.contrib.keras
l2=keras.regularizers.l2
ATTENTIONX = 0
ATTENTIONI = 1
ATTENTIONO = 2
def res3d(inputs, weight_decay):
# Res3D Block 1
conv3d_1 = keras.layers.Conv3D(64, (3,7,7), strides=(1,2,2), padding='same',
dilation_rate=(1,1,1), kernel_initializer='he_normal',
kernel_regularizer=l2(weight_decay), use_bias=False,
name='Conv3D_1')(inputs)
conv3d_1 = keras.layers.BatchNormalization(name='BatchNorm_1_0')(conv3d_1)
conv3d_1 = keras.layers.Activation('relu', name='ReLU_1')(conv3d_1)
# Res3D Block 2
conv3d_2a_1 = keras.layers.Conv3D(64, (1,1,1), strides=(1,1,1), padding='same',
dilation_rate=(1,1,1), kernel_initializer='he_normal',
kernel_regularizer=l2(weight_decay), use_bias=False,
name='Conv3D_2a_1')(conv3d_1)
conv3d_2a_1 = keras.layers.BatchNormalization(name='BatchNorm_2a_1')(conv3d_2a_1)
conv3d_2a_a = keras.layers.Conv3D(64, (3,3,3), strides=(1,1,1), padding='same',
dilation_rate=(1,1,1), kernel_initializer='he_normal',
kernel_regularizer=l2(weight_decay), use_bias=False,
name='Conv3D_2a_a')(conv3d_1)
conv3d_2a_a = keras.layers.BatchNormalization(name='BatchNorm_2a_a')(conv3d_2a_a)
conv3d_2a_a = keras.layers.Activation('relu', name='ReLU_2a_a')(conv3d_2a_a)
conv3d_2a_b = keras.layers.Conv3D(64, (3,3,3), strides=(1,1,1), padding='same',
dilation_rate=(1,1,1), kernel_initializer='he_normal',
kernel_regularizer=l2(weight_decay), use_bias=False,
name='Conv3D_2a_b')(conv3d_2a_a)
conv3d_2a_b = keras.layers.BatchNormalization(name='BatchNorm_2a_b')(conv3d_2a_b)
conv3d_2a = keras.layers.Add(name='Add_2a')([conv3d_2a_1, conv3d_2a_b])
conv3d_2a = keras.layers.Activation('relu', name='ReLU_2a')(conv3d_2a)
conv3d_2b_a = keras.layers.Conv3D(64, (3,3,3), strides=(1,1,1), padding='same',
dilation_rate=(1,1,1), kernel_initializer='he_normal',
kernel_regularizer=l2(weight_decay), use_bias=False,
name='Conv3D_2b_a')(conv3d_2a)
conv3d_2b_a = keras.layers.BatchNormalization(name='BatchNorm_2b_a')(conv3d_2b_a)
conv3d_2b_a = keras.layers.Activation('relu', name='ReLU_2b_a')(conv3d_2b_a)
conv3d_2b_b = keras.layers.Conv3D(64, (3,3,3), strides=(1,1,1), padding='same',
dilation_rate=(1,1,1), kernel_initializer='he_normal',
kernel_regularizer=l2(weight_decay), use_bias=False,
name='Conv3D_2b_b')(conv3d_2b_a)
conv3d_2b_b = keras.layers.BatchNormalization(name='BatchNorm_2b_b')(conv3d_2b_b)
conv3d_2b = keras.layers.Add(name='Add_2b')([conv3d_2a, conv3d_2b_b])
conv3d_2b = keras.layers.Activation('relu', name='ReLU_2b')(conv3d_2b)
# Res3D Block 3
conv3d_3a_1 = keras.layers.Conv3D(128, (1,1,1), strides=(2,2,2), padding='same',
dilation_rate=(1,1,1), kernel_initializer='he_normal',
kernel_regularizer=l2(weight_decay), use_bias=False,
name='Conv3D_3a_1')(conv3d_2b)
conv3d_3a_1 = keras.layers.BatchNormalization(name='BatchNorm_3a_1')(conv3d_3a_1)
conv3d_3a_a = keras.layers.Conv3D(128, (3,3,3), strides=(2,2,2), padding='same',
dilation_rate=(1,1,1), kernel_initializer='he_normal',
kernel_regularizer=l2(weight_decay), use_bias=False,
name='Conv3D_3a_a')(conv3d_2b)
conv3d_3a_a = keras.layers.BatchNormalization(name='BatchNorm_3a_a')(conv3d_3a_a)
conv3d_3a_a = keras.layers.Activation('relu', name='ReLU_3a_a')(conv3d_3a_a)
conv3d_3a_b = keras.layers.Conv3D(128, (3,3,3), strides=(1,1,1), padding='same',
dilation_rate=(1,1,1), kernel_initializer='he_normal',
kernel_regularizer=l2(weight_decay), use_bias=False,
name='Conv3D_3a_b')(conv3d_3a_a)
conv3d_3a_b = keras.layers.BatchNormalization(name='BatchNorm_3a_b')(conv3d_3a_b)
conv3d_3a = keras.layers.Add(name='Add_3a')([conv3d_3a_1, conv3d_3a_b])
conv3d_3a = keras.layers.Activation('relu', name='ReLU_3a')(conv3d_3a)
conv3d_3b_a = keras.layers.Conv3D(128, (3,3,3), strides=(1,1,1), padding='same',
dilation_rate=(1,1,1), kernel_initializer='he_normal',
kernel_regularizer=l2(weight_decay), use_bias=False,
name='Conv3D_3b_a')(conv3d_3a)
conv3d_3b_a = keras.layers.BatchNormalization(name='BatchNorm_3b_a')(conv3d_3b_a)
conv3d_3b_a = keras.layers.Activation('relu', name='ReLU_3b_a')(conv3d_3b_a)
conv3d_3b_b = keras.layers.Conv3D(128, (3,3,3), strides=(1,1,1), padding='same',
dilation_rate=(1,1,1), kernel_initializer='he_normal',
kernel_regularizer=l2(weight_decay), use_bias=False,
name='Conv3D_3b_b')(conv3d_3b_a)
conv3d_3b_b = keras.layers.BatchNormalization(name='BatchNorm_3b_b')(conv3d_3b_b)
conv3d_3b = keras.layers.Add(name='Add_3b')([conv3d_3a, conv3d_3b_b])
conv3d_3b = keras.layers.Activation('relu', name='ReLU_3b')(conv3d_3b)
# Res3D Block 4
conv3d_4a_1 = keras.layers.Conv3D(256, (1,1,1), strides=(1,1,1), padding='same',
dilation_rate=(1,1,1), kernel_initializer='he_normal',
kernel_regularizer=l2(weight_decay), use_bias=False,
name='Conv3D_4a_1')(conv3d_3b)
conv3d_4a_1 = keras.layers.BatchNormalization(name='BatchNorm_4a_1')(conv3d_4a_1)
conv3d_4a_a = keras.layers.Conv3D(256, (3,3,3), strides=(1,1,1), padding='same',
dilation_rate=(1,1,1), kernel_initializer='he_normal',
kernel_regularizer=l2(weight_decay), use_bias=False,
name='Conv3D_4a_a')(conv3d_3b)
conv3d_4a_a = keras.layers.BatchNormalization(name='BatchNorm_4a_a')(conv3d_4a_a)
conv3d_4a_a = keras.layers.Activation('relu', name='ReLU_4a_a')(conv3d_4a_a)
conv3d_4a_b = keras.layers.Conv3D(256, (3,3,3), strides=(1,1,1), padding='same',
dilation_rate=(1,1,1), kernel_initializer='he_normal',
kernel_regularizer=l2(weight_decay), use_bias=False,
name='Conv3D_4a_b')(conv3d_4a_a)
conv3d_4a_b = keras.layers.BatchNormalization(name='BatchNorm_4a_b')(conv3d_4a_b)
conv3d_4a = keras.layers.Add(name='Add_4a')([conv3d_4a_1, conv3d_4a_b])
conv3d_4a = keras.layers.Activation('relu', name='ReLU_4a')(conv3d_4a)
conv3d_4b_a = keras.layers.Conv3D(256, (3,3,3), strides=(1,1,1), padding='same',
dilation_rate=(1,1,1), kernel_initializer='he_normal',
kernel_regularizer=l2(weight_decay), use_bias=False,
name='Conv3D_4b_a')(conv3d_4a)
conv3d_4b_a = keras.layers.BatchNormalization(name='BatchNorm_4b_a')(conv3d_4b_a)
conv3d_4b_a = keras.layers.Activation('relu', name='ReLU_4b_a')(conv3d_4b_a)
conv3d_4b_b = keras.layers.Conv3D(256, (3,3,3), strides=(1,1,1), padding='same',
dilation_rate=(1,1,1), kernel_initializer='he_normal',
kernel_regularizer=l2(weight_decay), use_bias=False,
name='Conv3D_4b_b')(conv3d_4b_a)
conv3d_4b_b = keras.layers.BatchNormalization(name='BatchNorm_4b_b')(conv3d_4b_b)
conv3d_4b = keras.layers.Add(name='Add_4b')([conv3d_4a, conv3d_4b_b])
conv3d_4b = keras.layers.Activation('relu', name='ReLU_4b')(conv3d_4b)
return conv3d_4b
def relu6(x):
return keras.activations.relu(x,max_value=6)
def mobilenet(inputs, weight_decay):
conv2d_1a = keras.layers.SeparableConv2D(256, (3,3), strides=(1,1), padding='same',
depthwise_regularizer=l2(weight_decay), pointwise_regularizer=l2(weight_decay),
name='SeparableConv2D_1a')(inputs)
conv2d_1a = keras.layers.BatchNormalization(name='BatchNorm_Conv2d_1a')(conv2d_1a)
conv2d_1a = keras.layers.Activation(relu6, name='ReLU_Conv2d_1a')(conv2d_1a)
conv2d_1b = keras.layers.SeparableConv2D(256, (3,3), strides=(2,2), padding='same',
depthwise_regularizer=l2(weight_decay), pointwise_regularizer=l2(weight_decay),
name='SeparableConv2D_1b')(conv2d_1a)
conv2d_1b = keras.layers.BatchNormalization(name='BatchNorm_Conv2d_1b')(conv2d_1b)
conv2d_1b = keras.layers.Activation(relu6, name='ReLU_Conv2d_1b')(conv2d_1b)
conv2d_2a = keras.layers.SeparableConv2D(512, (3,3), strides=(1,1), padding='same',
depthwise_regularizer=l2(weight_decay), pointwise_regularizer=l2(weight_decay),
name='SeparableConv2D_2a')(conv2d_1b)
conv2d_2a = keras.layers.BatchNormalization(name='BatchNorm_Conv2d_2a')(conv2d_2a)
conv2d_2a = keras.layers.Activation(relu6, name='ReLU_Conv2d_2a')(conv2d_2a)
conv2d_2b = keras.layers.SeparableConv2D(512, (3,3), strides=(1,1), padding='same',
depthwise_regularizer=l2(weight_decay), pointwise_regularizer=l2(weight_decay),
name='SeparableConv2D_2b')(conv2d_2a)
conv2d_2b = keras.layers.BatchNormalization(name='BatchNorm_Conv2d_2b')(conv2d_2b)
conv2d_2b = keras.layers.Activation(relu6, name='ReLU_Conv2d_2b')(conv2d_2b)
conv2d_2c = keras.layers.SeparableConv2D(512, (3,3), strides=(1,1), padding='same',
depthwise_regularizer=l2(weight_decay), pointwise_regularizer=l2(weight_decay),
name='SeparableConv2D_2c')(conv2d_2b)
conv2d_2c = keras.layers.BatchNormalization(name='BatchNorm_Conv2d_2c')(conv2d_2c)
conv2d_2c = keras.layers.Activation(relu6, name='ReLU_Conv2d_2c')(conv2d_2c)
conv2d_2d = keras.layers.SeparableConv2D(512, (3,3), strides=(1,1), padding='same',
depthwise_regularizer=l2(weight_decay), pointwise_regularizer=l2(weight_decay),
name='SeparableConv2D_2d')(conv2d_2c)
conv2d_2d = keras.layers.BatchNormalization(name='BatchNorm_Conv2d_2d')(conv2d_2d)
conv2d_2d = keras.layers.Activation(relu6, name='ReLU_Conv2d_2d')(conv2d_2d)
conv2d_2e = keras.layers.SeparableConv2D(512, (3,3), strides=(1,1), padding='same',
depthwise_regularizer=l2(weight_decay), pointwise_regularizer=l2(weight_decay),
name='SeparableConv2D_2e')(conv2d_2d)
conv2d_2e = keras.layers.BatchNormalization(name='BatchNorm_Conv2d_2e')(conv2d_2e)
conv2d_2e = keras.layers.Activation(relu6, name='ReLU_Conv2d_2e')(conv2d_2e)
conv2d_3a = keras.layers.SeparableConv2D(1024, (3,3), strides=(2,2), padding='same',
depthwise_regularizer=l2(weight_decay), pointwise_regularizer=l2(weight_decay),
name='SeparableConv2D_3a')(conv2d_2e)
conv2d_3a = keras.layers.BatchNormalization(name='BatchNorm_Conv2d_3a')(conv2d_3a)
conv2d_3a = keras.layers.Activation(relu6, name='ReLU_Conv2d_3a')(conv2d_3a)
conv2d_3b = keras.layers.SeparableConv2D(1024, (3,3), strides=(2,2), padding='same',
depthwise_regularizer=l2(weight_decay), pointwise_regularizer=l2(weight_decay),
name='SeparableConv2D_3b')(conv2d_3a)
conv2d_3b = keras.layers.BatchNormalization(name='BatchNorm_Conv2d_3b')(conv2d_3b)
conv2d_3b = keras.layers.Activation(relu6, name='ReLU_Conv2d_3b')(conv2d_3b)
return conv2d_3b
def res3d_aclstm_mobilenet(inputs, seq_len, weight_decay, atten_type):
# Res3D Block
res3d_featmap = res3d(inputs, weight_decay)
# Attention ConvLSTM2D Block
if atten_type==ATTENTIONX:
clstm2d_1 = keras.layers.AttenXConvLSTM2D(256, (3,3), strides=(1,1), padding='same',
kernel_initializer='he_normal', recurrent_initializer='he_normal',
kernel_regularizer=l2(weight_decay), recurrent_regularizer=l2(weight_decay),
return_sequences=True, name='axclstm2d_1')(res3d_featmap)
elif atten_type==ATTENTIONI:
clstm2d_1 = keras.layers.AttenIConvLSTM2D(256, (3,3), strides=(1,1), padding='same',
kernel_initializer='he_normal', recurrent_initializer='he_normal',
kernel_regularizer=l2(weight_decay), recurrent_regularizer=l2(weight_decay),
return_sequences=True, name='aiclstm2d_1')(res3d_featmap)
elif atten_type==ATTENTIONO:
clstm2d_1 = keras.layers.AttenOConvLSTM2D(256, (3,3), strides=(1,1), padding='same',
kernel_initializer='he_normal', recurrent_initializer='he_normal',
kernel_regularizer=l2(weight_decay), recurrent_regularizer=l2(weight_decay),
return_sequences=True, name='aoclstm2d_1')(res3d_featmap)
clstm2d_2 = keras.layers.GatedConvLSTM2D(256, (3,3), strides=(1,1), padding='same',
kernel_initializer='he_normal', recurrent_initializer='he_normal',
kernel_regularizer=l2(weight_decay), recurrent_regularizer=l2(weight_decay),
return_sequences=True, name='gclstm2d_2')(clstm2d_1)
featmap_2d = keras.layers.Reshape((28,28,256), name='clstm_reshape')(clstm2d_2)
# MobileNet
features = mobilenet(featmap_2d, weight_decay)
features = keras.layers.Reshape((seq_len/2,4,4,1024), name='feature_reshape')(features)
gpooling = keras.layers.AveragePooling3D(pool_size=(seq_len/2,4,4), strides=(seq_len/2,4,4),
padding='valid', name='Average_Pooling')(features)
return gpooling
| {
"pile_set_name": "Github"
} |
<testcase>
<info>
<keywords>
Metalink
HTTP
HTTP GET
-J
</keywords>
</info>
#
# Server-side
<reply>
<data nocheck="yes">
HTTP/1.1 200 OK
Date: Thu, 21 Jun 2012 14:50:02 GMT
Server: test-server/fake
Content-Length: 42
Connection: close
Content-Type: text/html
Content-Disposition: filename=name2007; charset=funny; option=strange
Funny-head: yesyes
Something delivered from an HTTP resource
</data>
</reply>
#
# Client-side
<client>
# This relies on the debug feature to allow us to set directory to store the
# -O and -J output in, using the CURL_TESTDIR variable. This test might use
# it upon failure only, successful execution won't actually use it.
<features>
debug
file
Metalink
</features>
<server>
http
</server>
<name>
Metalink local XML file, HTTP resource, using -O -J -D file
</name>
<setenv>
CURL_TESTDIR=%PWD/log
</setenv>
<command option="no-output,no-include">
--metalink file://%PWD/log/test2007.metalink -J -O -D log/heads2007
</command>
# local metalink file written before test command runs
<file name="log/test2007.metalink">
<?xml version="1.0" encoding="utf-8"?>
<metalink version="3.0" xmlns="http://www.metalinker.org/">
<files>
<file name="log/download2007">
<verification>
<hash type="md5">we-only-check-the-strongest-hash-provided</hash>
<hash type="sha256">52899e30f80e3490632d505653204e1fb5b02bda141048704ce9a0ed00b8a3f5</hash>
</verification>
<resources maxconnections="1">
<url type="http" preference="90">http://%HOSTIP:%HTTPPORT/2007</url>
</resources>
</file>
</files>
</metalink>
</file>
<postcheck>
perl %SRCDIR/libtest/notexists.pl log/2007 log/name2007
</postcheck>
</client>
#
# Verify data after the test has been "shot"
<verify>
<strip>
^User-Agent:.*
</strip>
<protocol>
GET /2007 HTTP/1.1
Host: %HOSTIP:%HTTPPORT
Accept: */*
</protocol>
<file1 name="log/download2007">
Something delivered from an HTTP resource
</file1>
<file2 name="log/heads2007">
HTTP/1.1 200 OK
Date: Thu, 21 Jun 2012 14:50:02 GMT
Server: test-server/fake
Content-Length: 42
Connection: close
Content-Type: text/html
Content-Disposition: filename=name2007; charset=funny; option=strange
Funny-head: yesyes
</file2>
<file3 name="log/stdout2007">
</file3>
<file4 name="log/stderr2007">
Metalink: parsing (file://%PWD/log/test2007.metalink) metalink/XML...
Metalink: parsing (file://%PWD/log/test2007.metalink) OK
Metalink: fetching (log/download2007) from (http://%HOSTIP:%HTTPPORT/2007)...
Metalink: fetching (log/download2007) from (http://%HOSTIP:%HTTPPORT/2007) OK
Metalink: validating (log/download2007)...
Metalink: validating (log/download2007) [sha-256] OK
</file4>
<stripfile4>
$_ = '' if (($_ !~ /^Metalink: /) && ($_ !~ /error/i) && ($_ !~ /warn/i))
</stripfile4>
</verify>
</testcase>
| {
"pile_set_name": "Github"
} |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkcloudauth.endpoint import endpoint_data
class DescribeVerifyRecordsRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Cloudauth', '2019-03-07', 'DescribeVerifyRecords','cloudauth')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_StatusList(self):
return self.get_query_params().get('StatusList')
def set_StatusList(self,StatusList):
self.add_query_param('StatusList',StatusList)
def get_StartDate(self):
return self.get_query_params().get('StartDate')
def set_StartDate(self,StartDate):
self.add_query_param('StartDate',StartDate)
def get_PageSize(self):
return self.get_query_params().get('PageSize')
def set_PageSize(self,PageSize):
self.add_query_param('PageSize',PageSize)
def get_TotalCount(self):
return self.get_query_params().get('TotalCount')
def set_TotalCount(self,TotalCount):
self.add_query_param('TotalCount',TotalCount)
def get_CurrentPage(self):
return self.get_query_params().get('CurrentPage')
def set_CurrentPage(self,CurrentPage):
self.add_query_param('CurrentPage',CurrentPage)
def get_QueryId(self):
return self.get_query_params().get('QueryId')
def set_QueryId(self,QueryId):
self.add_query_param('QueryId',QueryId)
def get_BizType(self):
return self.get_query_params().get('BizType')
def set_BizType(self,BizType):
self.add_query_param('BizType',BizType)
def get_IdCardNum(self):
return self.get_query_params().get('IdCardNum')
def set_IdCardNum(self,IdCardNum):
self.add_query_param('IdCardNum',IdCardNum)
def get_EndDate(self):
return self.get_query_params().get('EndDate')
def set_EndDate(self,EndDate):
self.add_query_param('EndDate',EndDate)
def get_BizId(self):
return self.get_query_params().get('BizId')
def set_BizId(self,BizId):
self.add_query_param('BizId',BizId) | {
"pile_set_name": "Github"
} |
{
"created_at": "2015-02-27T22:28:54.684746",
"description": "RTL support for ExtJS 3",
"fork": false,
"full_name": "MeirKriheli/ExtJS-RTL",
"language": "JavaScript",
"updated_at": "2015-02-27T23:43:38.231751"
} | {
"pile_set_name": "Github"
} |
EESchema-LIBRARY Version 2.3
#encoding utf-8
#
# RCLAMP0524PCT-ND
#
DEF RCLAMP0524PCT-ND D 0 40 Y Y 1 F N
F0 "D" -413 539 60 H V L CNN
F1 "RCLAMP0524PCT-ND" -1 -553 60 H V C CNN
F2 "digikey-footprints:UFDFN-10_2.5x1mm" 515 445 60 H I L CNN
F3 "" 200 300 60 H I L CNN
DRAW
C -326 226 8 0 1 0 F
C -325 -100 5 0 1 0 N
C -325 100 5 0 1 0 N
C -324 -224 8 0 1 0 F
C -324 0 6 0 1 0 N
C -50 -226 8 0 1 0 F
C -25 226 8 0 1 0 F
C 74 -376 8 0 1 0 F
C 100 374 8 0 1 0 F
C 274 -226 8 0 1 0 F
C 274 0 8 0 1 0 F
C 274 226 8 0 1 0 F
S -400 500 400 -500 0 1 0 f
P 2 0 1 0 -400 -100 -325 -100 N
P 2 0 1 0 -400 100 -325 100 N
P 2 0 1 0 -225 -225 -325 -225 N
P 2 0 1 0 -225 225 -325 225 N
P 2 0 1 0 -200 -375 200 -375 N
P 2 0 1 0 -200 -350 -200 -400 N
P 2 0 1 0 -200 -225 200 -225 N
P 2 0 1 0 -200 -200 -200 -250 N
P 2 0 1 0 -200 225 200 225 N
P 2 0 1 0 -200 250 -200 200 N
P 2 0 1 0 -200 375 200 375 N
P 2 0 1 0 -200 400 -200 350 N
P 2 0 1 0 -50 0 -325 0 N
P 2 0 1 0 50 -75 25 -100 N
P 2 0 1 0 50 75 50 -75 N
P 2 0 1 0 50 75 75 100 N
P 2 0 1 0 225 -350 225 -400 N
P 2 0 1 0 225 -225 275 -225 N
P 2 0 1 0 225 -200 225 -250 N
P 2 0 1 0 225 225 275 225 N
P 2 0 1 0 225 250 225 200 N
P 2 0 1 0 225 350 225 350 N
P 2 0 1 0 225 400 225 350 N
P 2 0 1 0 275 0 50 0 N
P 2 0 1 0 275 225 275 225 N
P 3 0 1 0 -400 -300 -50 -300 -50 -225 N
P 3 0 1 0 -400 300 -25 300 -25 225 N
P 4 0 1 0 -225 -375 -325 -375 -325 375 -225 375 N
P 4 0 1 0 -225 -350 -225 -400 -200 -375 -225 -350 F
P 4 0 1 0 -225 -200 -225 -250 -200 -225 -225 -200 F
P 4 0 1 0 -225 250 -225 200 -200 225 -225 250 F
P 4 0 1 0 -225 400 -225 350 -200 375 -225 400 F
P 4 0 1 0 -50 75 -50 -75 50 0 -50 75 F
P 4 0 1 0 200 -350 200 -400 225 -375 200 -350 F
P 4 0 1 0 200 -200 200 -250 225 -225 200 -200 F
P 4 0 1 0 200 250 200 200 225 225 200 250 F
P 4 0 1 0 200 400 200 350 225 375 200 400 F
P 4 0 1 0 225 375 275 375 275 -375 225 -375 N
P 5 0 1 0 75 -375 75 -450 -350 -450 -350 -400 -400 -400 N
P 5 0 1 0 100 375 100 450 -350 450 -350 400 -400 400 N
X ~ 1 -500 400 100 R 50 50 1 1 I
X ~ 2 -500 300 100 R 50 50 1 1 I
X ~ 3 -500 100 100 R 50 50 1 1 W
X ~ 4 -500 -300 100 R 50 50 1 1 I
X ~ 5 -500 -400 100 R 50 50 1 1 I
X ~ 6 500 -400 100 L 50 50 1 1 O
X ~ 7 500 -300 100 L 50 50 1 1 O
X ~ 8 -500 -100 100 R 50 50 1 1 W
X ~ 9 500 300 100 L 50 50 1 1 O
X ~ 10 500 400 100 L 50 50 1 1 O
ENDDRAW
ENDDEF
#
#End Library
| {
"pile_set_name": "Github"
} |
/*
* Copyright (C) 2017 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#pragma once
#if USE(PTHREADS) && HAVE(MACHINE_CONTEXT)
#include <signal.h>
#include <tuple>
#include <wtf/Function.h>
#include <wtf/Optional.h>
#include <wtf/PlatformRegisters.h>
namespace WTF {
// Note that SIGUSR1 is used in Pthread-based ports except for Darwin to suspend and resume threads.
enum class Signal {
// Usr will always chain to any non-default handler install before us. Since there is no way to know
// if a signal was intended exclusively for us.
Usr,
// These signals will only chain if we don't have a handler that can process them. If there is nothing
// to chain to we restore the default handler and crash.
Ill,
BadAccess, // For posix this is both SIGSEGV and SIGBUS
NumberOfSignals = BadAccess + 2, // BadAccess is really two signals.
Unknown = NumberOfSignals
};
inline std::tuple<int, Optional<int>> toSystemSignal(Signal signal)
{
switch (signal) {
case Signal::BadAccess: return std::make_tuple(SIGSEGV, SIGBUS);
case Signal::Ill: return std::make_tuple(SIGILL, WTF::nullopt);
case Signal::Usr: return std::make_tuple(SIGILL, WTF::nullopt);
default: break;
}
RELEASE_ASSERT_NOT_REACHED();
}
inline Signal fromSystemSignal(int signal)
{
switch (signal) {
case SIGSEGV: return Signal::BadAccess;
case SIGBUS: return Signal::BadAccess;
case SIGILL: return Signal::Ill;
case SIGUSR2: return Signal::Usr;
default: return Signal::Unknown;
}
}
enum class SignalAction {
Handled,
NotHandled,
ForceDefault
};
struct SigInfo {
void* faultingAddress { 0 };
};
using SignalHandler = Function<SignalAction(Signal, SigInfo&, PlatformRegisters&)>;
// Call this method whenever you want to install a signal handler. It's ok to call this function lazily.
// Note: Your signal handler will be called every time the handler for the desired signal is called.
// Thus it is your responsibility to discern if the signal fired was yours.
// This function is currently a one way street i.e. once installed, a signal handler cannot be uninstalled.
WTF_EXPORT_PRIVATE void installSignalHandler(Signal, SignalHandler&&);
#if HAVE(MACH_EXCEPTIONS)
class Thread;
void registerThreadForMachExceptionHandling(Thread&);
void handleSignalsWithMach();
#endif // HAVE(MACH_EXCEPTIONS)
} // namespace WTF
#if HAVE(MACH_EXCEPTIONS)
using WTF::registerThreadForMachExceptionHandling;
using WTF::handleSignalsWithMach;
#endif // HAVE(MACH_EXCEPTIONS)
using WTF::Signal;
using WTF::SigInfo;
using WTF::toSystemSignal;
using WTF::fromSystemSignal;
using WTF::SignalAction;
using WTF::installSignalHandler;
#endif // USE(PTHREADS) && HAVE(MACHINE_CONTEXT)
| {
"pile_set_name": "Github"
} |
! dudon_mohajira.scl
!
Dudon's Mohajira, neutral diatonic. g^5-g^4=1/2
7
!
153.26216
348.91261
502.17478
697.82522
851.08739
1046.73784
2/1
| {
"pile_set_name": "Github"
} |
---
- provider_name: ScribbleMaps
provider_url: https://scribblemaps.com
endpoints:
- schemes:
- http://www.scribblemaps.com/maps/view/*
- https://www.scribblemaps.com/maps/view/*
- http://scribblemaps.com/maps/view/*
- https://scribblemaps.com/maps/view/*
url: https://scribblemaps.com/api/services/oembed.{format}
example_urls:
- https://scribblemaps.com/api/services/oembed.xml?url=https%3A%2F%2Fscribblemaps.com%2Fmaps%2Fview%2FSaigon%2FJkFLCgwlGt
- https://scribblemaps.com/api/services/oembed.json?url=https%3A%2F%2Fscribblemaps.com%2Fmaps%2Fview%2FSaigon%2FJkFLCgwlGt
discovery: true
... | {
"pile_set_name": "Github"
} |
/*--------------------------------*- C++ -*----------------------------------*\
========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration | Website: https://openfoam.org
\\ / A nd | Version: dev
\\/ M anipulation |
\*---------------------------------------------------------------------------*/
FoamFile
{
version 2.0;
format ascii;
class dictionary;
location "system";
object controlDict;
}
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
DebugSwitches
{
primitiveMesh 1;
polyMesh 1;
fvMesh 1;
polyTopoChange 1;
pointMesh 1;
pointConstraints 1;
}
application icoFoam;
startFrom startTime;
startTime 0;
stopAt endTime;
endTime 30;
deltaT 1;
writeControl timeStep;
writeInterval 1;
purgeWrite 0;
writeFormat ascii;
writePrecision 10;
writeCompression off;
timeFormat general;
timePrecision 6;
runTimeModifiable true;
// ************************************************************************* //
| {
"pile_set_name": "Github"
} |
define([
"dojo/_base/lang",
"dojo/_base/fx",
"dojo/dom-style"
], function(lang, fx, domStyle) {
// Constants used to identify which clip edge is being wiped. The values are
// the index of the clip array that is changed during the animation.
var DOWN = 2,
RIGHT = 3,
UP = 0,
LEFT = 1;
function _clipArray(/*int*/type, /*int*/w, /*int*/h, /*number*/x){
// summary:
// Returns an array containing the down, right, up, and
// left clip region based on the type. If "x" is specified,
// then it is applied to the appropriate clipping edge.
var a = [0, w, 0, 0]; // default to the top edge
if(type == RIGHT){
a = [0, w, h, w];
}else if(type == UP){
a = [h, w, h, 0];
}else if(type == LEFT){
a = [0, 0, h, 0];
}
if(x != null){
a[type] = type == DOWN || type == LEFT ? x : (type % 2 ? w : h) - x;
}
return a; /*Array*/
}
function _setClip(/*DomNode*/n, /*int*/type, /*int*/w, /*int*/h, /*number*/x){
// summary:
// Sets the clip region of the node. If a type is passed in then we
// return a rect(), otherwise return "auto".
domStyle.set(n, "clip", type == null ? "auto" : "rect(" + _clipArray(type, w, h, x).join("px,") + "px)");
}
function _wipe(/*int*/type, /*Object*/args){
// summary:
// Handles the preparation of the dom node and creates the Animation object.
var node = args.next.node,
w = args.rotatorBox.w,
h = args.rotatorBox.h;
domStyle.set(node, {
display: "",
zIndex: (domStyle.get(args.current.node, "zIndex") || 1) + 1
});
_setClip(node, type, w, h);
return new fx.Animation(lang.mixin({
node: node,
curve: [0, type % 2 ? w : h],
onAnimate: function(x){
_setClip(node, type, w, h, parseInt(x));
}
}, args));
}
var exports = {
wipeDown: function(/*Object*/args){
// summary:
// Returns a dojo.Animation that wipes in the next rotator pane from the top.
return _wipe(DOWN, args); /*dojo.Animation*/
},
wipeRight: function(/*Object*/args){
// summary:
// Returns a dojo.Animation that wipes in the next rotator pane from the right.
return _wipe(RIGHT, args); /*dojo.Animation*/
},
wipeUp: function(/*Object*/args){
// summary:
// Returns a dojo.Animation that wipes in the next rotator pane from the bottom.
return _wipe(UP, args); /*dojo.Animation*/
},
wipeLeft: function(/*Object*/args){
// summary:
// Returns a dojo.Animation that wipes in the next rotator pane from the left.
return _wipe(LEFT, args); /*dojo.Animation*/
}
};
// back-compat, remove for 2.0
lang.mixin(lang.getObject("dojox.widget.rotator"), exports);
return exports;
}); | {
"pile_set_name": "Github"
} |
[(Class
{ c_span = [3:1-5:2]; c_annotation = (); c_mode = Mstrict;
c_final = false; c_is_xhp = false; c_has_xhp_keyword = false;
c_kind = Cnormal; c_name = ([3:7-8], "\\B"); c_tparams = [];
c_extends = []; c_uses = []; c_use_as_alias = [];
c_insteadof_alias = []; c_xhp_attr_uses = []; c_xhp_category = None;
c_reqs = []; c_implements = []; c_where_constraints = [];
c_consts = []; c_typeconsts = []; c_vars = [];
c_methods =
[{ m_span = [4:3-41]; m_annotation = (); m_final = false;
m_abstract = false; m_static = false; m_visibility = Public;
m_name = ([4:19-30], "__construct"); m_tparams = [];
m_where_constraints = []; m_variadic = FVnonVariadic;
m_params =
[{ param_annotation = ([4:35-37], num);
param_type_hint = ((num), (Some ([4:31-34], (Hprim Tnum))));
param_is_variadic = false; param_pos = [4:35-37];
param_name = "$x"; param_expr = None; param_callconv = None;
param_user_attributes = []; param_visibility = None }
];
m_cap = ((nothing), None); m_unsafe_cap = ((mixed), None);
m_body = { fb_ast = [([Pos.none], Noop)]; fb_annotation = () };
m_fun_kind = FSync; m_user_attributes = [];
m_ret = ((void), (Some ([4:19-30], (Hprim Tvoid))));
m_external = false; m_doc_comment = None }
];
c_attributes = []; c_xhp_children = []; c_xhp_attrs = [];
c_namespace =
{ Namespace_env.ns_ns_uses = <opaque>; ns_class_uses = <opaque>;
ns_record_def_uses = <opaque>; ns_fun_uses = <opaque>;
ns_const_uses = <opaque>; ns_name = None; ns_auto_ns_map = [];
ns_is_codegen = false; ns_disable_xhp_element_mangling = false };
c_user_attributes = []; c_file_attributes = []; c_enum = None;
c_pu_enums = []; c_doc_comment = None; c_emit_id = None });
(Class
{ c_span = [6:1-10:2]; c_annotation = (); c_mode = Mstrict;
c_final = false; c_is_xhp = false; c_has_xhp_keyword = false;
c_kind = Cnormal; c_name = ([6:7-8], "\\A"); c_tparams = [];
c_extends = [([6:17-18], (Happly (([6:17-18], "\\B"), [])))];
c_uses = []; c_use_as_alias = []; c_insteadof_alias = [];
c_xhp_attr_uses = []; c_xhp_category = None; c_reqs = [];
c_implements = []; c_where_constraints = []; c_consts = [];
c_typeconsts = []; c_vars = [];
c_methods =
[{ m_span = [7:3-9:4]; m_annotation = (); m_final = false;
m_abstract = false; m_static = false; m_visibility = Public;
m_name = ([7:19-30], "__construct"); m_tparams = [];
m_where_constraints = []; m_variadic = FVnonVariadic;
m_params =
[{ param_annotation = ([7:35-37], int);
param_type_hint = ((int), (Some ([7:31-34], (Hprim Tint))));
param_is_variadic = false; param_pos = [7:35-37];
param_name = "$x"; param_expr = None; param_callconv = None;
param_user_attributes = []; param_visibility = None }
];
m_cap = ((nothing), None); m_unsafe_cap = ((mixed), None);
m_body =
{ fb_ast =
[([8:5-29],
(Expr
(([8:5-28], void),
(Call (
(([8:5-24], (function(num $x): void)),
(Class_const ((([8:5-11], B), CIparent),
([8:13-24], "__construct")))),
[], [(([8:25-27], int), (Lvar ([8:25-27], $x)))], None)))))
];
fb_annotation = () };
m_fun_kind = FSync; m_user_attributes = [];
m_ret = ((void), (Some ([7:19-30], (Hprim Tvoid))));
m_external = false; m_doc_comment = None }
];
c_attributes = []; c_xhp_children = []; c_xhp_attrs = [];
c_namespace =
{ Namespace_env.ns_ns_uses = <opaque>; ns_class_uses = <opaque>;
ns_record_def_uses = <opaque>; ns_fun_uses = <opaque>;
ns_const_uses = <opaque>; ns_name = None; ns_auto_ns_map = [];
ns_is_codegen = false; ns_disable_xhp_element_mangling = false };
c_user_attributes = []; c_file_attributes = []; c_enum = None;
c_pu_enums = []; c_doc_comment = None; c_emit_id = None })
]
| {
"pile_set_name": "Github"
} |
/*
* Copyright (c) 2020 Cossack Labs Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.cossacklabs.themis.test;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import com.cossacklabs.themis.InvalidArgumentException;
import com.cossacklabs.themis.NullArgumentException;
import com.cossacklabs.themis.SecureCell;
import com.cossacklabs.themis.SecureCellData;
import com.cossacklabs.themis.SecureCellException;
import com.cossacklabs.themis.SymmetricKey;
import static org.junit.Assert.*;
import org.junit.Ignore;
import org.junit.Test;
public class SecureCellTokenProtectTest {
@Test
public void initWithGenerated() {
SecureCell.TokenProtect cell = SecureCell.TokenProtectWithKey(new SymmetricKey());
assertNotNull(cell);
}
@Test
public void initWithFixed() {
String keyBase64 = "UkVDMgAAAC13PCVZAKOczZXUpvkhsC+xvwWnv3CLmlG0Wzy8ZBMnT+2yx/dg";
byte[] keyBytes = Base64.getDecoder().decode(keyBase64);
SecureCell.TokenProtect cell = SecureCell.TokenProtectWithKey(keyBytes);
assertNotNull(cell);
}
@Test
@SuppressWarnings("ResultOfMethodCallIgnored")
public void initWithEmpty() {
try {
SecureCell.TokenProtectWithKey((SymmetricKey)null);
fail("expected NullArgumentException");
}
catch (NullArgumentException ignored) {}
try {
SecureCell.TokenProtectWithKey((byte[])null);
fail("expected NullArgumentException");
}
catch (NullArgumentException ignored) {}
try {
SecureCell.TokenProtectWithKey(new byte[]{});
fail("expected InvalidArgumentException");
}
catch (InvalidArgumentException ignored) {}
}
@Test
public void roundtrip() throws SecureCellException {
SecureCell.TokenProtect cell = SecureCell.TokenProtectWithKey(new SymmetricKey());
byte[] message = "All your base are belong to us!".getBytes(StandardCharsets.UTF_8);
byte[] context = "For great justice".getBytes(StandardCharsets.UTF_8);
SecureCellData result = cell.encrypt(message, context);
assertNotNull(result);
byte[] encrypted = result.getProtectedData();
byte[] authToken = result.getAdditionalData();
assertNotNull(encrypted);
assertNotNull(authToken);
byte[] decrypted = cell.decrypt(encrypted, authToken, context);
assertNotNull(decrypted);
assertArrayEquals(message, decrypted);
}
@Test
public void dataLengthPreservation() {
SecureCell.TokenProtect cell = SecureCell.TokenProtectWithKey(new SymmetricKey());
byte[] message = "All your base are belong to us!".getBytes(StandardCharsets.UTF_8);
SecureCellData result = cell.encrypt(message);
assertEquals(message.length, result.getProtectedData().length);
assertTrue(result.getAdditionalData().length > 0);
}
@Test
public void contextInclusion() {
SecureCell.TokenProtect cell = SecureCell.TokenProtectWithKey(new SymmetricKey());
byte[] message = "All your base are belong to us!".getBytes(StandardCharsets.UTF_8);
byte[] shortContext = ".".getBytes(StandardCharsets.UTF_8);
byte[] longContext = "You have no chance to survive make your time. Ha ha ha ha ...".getBytes(StandardCharsets.UTF_8);
SecureCellData resultShort = cell.encrypt(message, shortContext);
SecureCellData resultLong = cell.encrypt(message, longContext);
// Context is not (directly) included into encrypted message.
assertEquals(resultShort.getProtectedData().length, resultLong.getProtectedData().length);
assertEquals(resultShort.getAdditionalData().length, resultLong.getAdditionalData().length);
}
@Test
public void withoutContext() throws SecureCellException {
SecureCell.TokenProtect cell = SecureCell.TokenProtectWithKey(new SymmetricKey());
byte[] message = "All your base are belong to us!".getBytes(StandardCharsets.UTF_8);
// Absent, empty, or nil context are all the same.
SecureCellData result1 = cell.encrypt(message);
SecureCellData result2 = cell.encrypt(message, null);
SecureCellData result3 = cell.encrypt(message, new byte[]{});
assertArrayEquals(message, cell.decrypt(result1.getProtectedData(), result1.getAdditionalData()));
assertArrayEquals(message, cell.decrypt(result2.getProtectedData(), result2.getAdditionalData()));
assertArrayEquals(message, cell.decrypt(result3.getProtectedData(), result3.getAdditionalData()));
assertArrayEquals(message, cell.decrypt(result1.getProtectedData(), result1.getAdditionalData(), null));
assertArrayEquals(message, cell.decrypt(result2.getProtectedData(), result2.getAdditionalData(), null));
assertArrayEquals(message, cell.decrypt(result3.getProtectedData(), result3.getAdditionalData(), null));
assertArrayEquals(message, cell.decrypt(result1.getProtectedData(), result1.getAdditionalData(), new byte[]{}));
assertArrayEquals(message, cell.decrypt(result2.getProtectedData(), result2.getAdditionalData(), new byte[]{}));
assertArrayEquals(message, cell.decrypt(result3.getProtectedData(), result3.getAdditionalData(), new byte[]{}));
}
@Test
@SuppressWarnings("ResultOfMethodCallIgnored")
public void contextSignificance() throws SecureCellException {
SecureCell.TokenProtect cell = SecureCell.TokenProtectWithKey(new SymmetricKey());
byte[] message = "All your base are belong to us!".getBytes(StandardCharsets.UTF_8);
byte[] correctContext = "We are CATS".getBytes(StandardCharsets.UTF_8);
byte[] incorrectContext = "Captain !!".getBytes(StandardCharsets.UTF_8);
SecureCellData result = cell.encrypt(message, correctContext);
byte[] encrypted = result.getProtectedData();
byte[] authToken = result.getAdditionalData();
// You cannot use a different context to decrypt data.
try {
cell.decrypt(encrypted, authToken, incorrectContext);
fail("expected SecureCellException");
}
catch (SecureCellException ignored) {}
// Only the original context will work.
byte[] decrypted = cell.decrypt(encrypted, authToken, correctContext);
assertArrayEquals(message, decrypted);
}
@Test
@SuppressWarnings("ResultOfMethodCallIgnored")
public void tokenSignificance() throws SecureCellException {
SecureCell.TokenProtect cell = SecureCell.TokenProtectWithKey(new SymmetricKey());
byte[] message = "All your base are belong to us!".getBytes(StandardCharsets.UTF_8);
SecureCellData result1 = cell.encrypt(message);
byte[] encrypted1 = result1.getProtectedData();
byte[] authToken1 = result1.getAdditionalData();
SecureCellData result2 = cell.encrypt(message);
byte[] encrypted2 = result2.getProtectedData();
byte[] authToken2 = result2.getAdditionalData();
// You cannot use a different token to decrypt data.
try {
cell.decrypt(encrypted1, authToken2);
fail("expected SecureCellException");
}
catch (SecureCellException ignored) {}
try {
cell.decrypt(encrypted2, authToken1);
fail("expected SecureCellException");
}
catch (SecureCellException ignored) {}
// Only the matching token will work.
assertArrayEquals(message, cell.decrypt(encrypted1, authToken1));
assertArrayEquals(message, cell.decrypt(encrypted2, authToken2));
}
@Test
@SuppressWarnings("ResultOfMethodCallIgnored")
public void detectCorruptedData() {
SecureCell.TokenProtect cell = SecureCell.TokenProtectWithKey(new SymmetricKey());
byte[] message = "All your base are belong to us!".getBytes(StandardCharsets.UTF_8);
SecureCellData result = cell.encrypt(message);
byte[] encrypted = result.getProtectedData();
byte[] authToken = result.getAdditionalData();
// Invert every odd byte, this will surely break the message.
byte[] corrupted = Arrays.copyOf(encrypted, encrypted.length);
for (int i = 0; i < corrupted.length; i++) {
if (i % 2 == 1) {
corrupted[i] = (byte)~corrupted[i];
}
}
try {
cell.decrypt(corrupted, authToken);
fail("expected SecureCellException");
}
catch (SecureCellException ignored) {}
}
@Test
@SuppressWarnings("ResultOfMethodCallIgnored")
public void detectTruncatedData() {
SecureCell.TokenProtect cell = SecureCell.TokenProtectWithKey(new SymmetricKey());
byte[] message = "All your base are belong to us!".getBytes(StandardCharsets.UTF_8);
SecureCellData result = cell.encrypt(message);
byte[] encrypted = result.getProtectedData();
byte[] authToken = result.getAdditionalData();
byte[] truncated = Arrays.copyOf(encrypted, encrypted.length - 1);
try {
cell.decrypt(truncated, authToken);
fail("expected SecureCellException");
}
catch (SecureCellException ignored) {}
}
@Test
@SuppressWarnings("ResultOfMethodCallIgnored")
public void detectExtendedData() {
SecureCell.TokenProtect cell = SecureCell.TokenProtectWithKey(new SymmetricKey());
byte[] message = "All your base are belong to us!".getBytes(StandardCharsets.UTF_8);
SecureCellData result = cell.encrypt(message);
byte[] encrypted = result.getProtectedData();
byte[] authToken = result.getAdditionalData();
byte[] extended = Arrays.copyOf(encrypted, encrypted.length + 1);
try {
cell.decrypt(extended, authToken);
fail("expected SecureCellException");
}
catch (SecureCellException ignored) {}
}
@Test
@SuppressWarnings("ResultOfMethodCallIgnored")
public void detectCorruptedToken() {
SecureCell.TokenProtect cell = SecureCell.TokenProtectWithKey(new SymmetricKey());
byte[] message = "All your base are belong to us!".getBytes(StandardCharsets.UTF_8);
SecureCellData result = cell.encrypt(message);
byte[] encrypted = result.getProtectedData();
byte[] authToken = result.getAdditionalData();
// Invert every odd byte, this will surely break the token.
byte[] corruptedToken = Arrays.copyOf(authToken, authToken.length);
for (int i = 0; i < corruptedToken.length; i++) {
if (i % 2 == 1) {
corruptedToken[i] = (byte)~corruptedToken[i];
}
}
try {
cell.decrypt(encrypted, corruptedToken);
fail("expected SecureCellException");
}
catch (SecureCellException ignored) {}
}
@Test
@SuppressWarnings("ResultOfMethodCallIgnored")
public void detectTruncatedToken() {
SecureCell.TokenProtect cell = SecureCell.TokenProtectWithKey(new SymmetricKey());
byte[] message = "All your base are belong to us!".getBytes(StandardCharsets.UTF_8);
SecureCellData result = cell.encrypt(message);
byte[] encrypted = result.getProtectedData();
byte[] authToken = result.getAdditionalData();
byte[] truncatedToken = Arrays.copyOf(authToken, authToken.length - 1);
try {
cell.decrypt(encrypted, truncatedToken);
fail("expected SecureCellException");
}
catch (SecureCellException ignored) {}
}
@Test
public void detectExtendedToken() throws SecureCellException {
SecureCell.TokenProtect cell = SecureCell.TokenProtectWithKey(new SymmetricKey());
byte[] message = "All your base are belong to us!".getBytes(StandardCharsets.UTF_8);
SecureCellData result = cell.encrypt(message);
byte[] encrypted = result.getProtectedData();
byte[] authToken = result.getAdditionalData();
byte[] extendedToken = Arrays.copyOf(authToken, authToken.length + 1);
// Current implementation of Secure Cell allows the token to be overlong.
// Extra data is simply ignored.
byte[] decrypted = cell.decrypt(encrypted, extendedToken);
assertArrayEquals(message, decrypted);
}
@Test
@SuppressWarnings("ResultOfMethodCallIgnored")
public void swapTokenAndData() {
SecureCell.TokenProtect cell = SecureCell.TokenProtectWithKey(new SymmetricKey());
byte[] message = "All your base are belong to us!".getBytes(StandardCharsets.UTF_8);
SecureCellData result = cell.encrypt(message);
byte[] encrypted = result.getProtectedData();
byte[] authToken = result.getAdditionalData();
try {
cell.decrypt(authToken, encrypted);
fail("expected SecureCellException");
}
// Depending on how lucky you are, Themis might or might not detect the error early enough.
// If it does not, it proceeds to allocate some weird buffer which might be too big.
catch (SecureCellException | OutOfMemoryError ignored) {}
}
@Test
@SuppressWarnings("ResultOfMethodCallIgnored")
public void swapContextAndToken() {
SecureCell.TokenProtect cell = SecureCell.TokenProtectWithKey(new SymmetricKey());
byte[] message = "All your base are belong to us!".getBytes(StandardCharsets.UTF_8);
byte[] context = "We are CATS".getBytes(StandardCharsets.UTF_8);
SecureCellData result = cell.encrypt(message, context);
byte[] encrypted = result.getProtectedData();
try {
cell.decrypt(encrypted, context);
fail("expected SecureCellException");
}
catch (SecureCellException ignored) {}
}
@Test
@SuppressWarnings("ResultOfMethodCallIgnored")
public void emptyMessageOrToken() throws SecureCellException {
SecureCell.TokenProtect cell = SecureCell.TokenProtectWithKey(new SymmetricKey());
byte[] message = "All your base are belong to us!".getBytes(StandardCharsets.UTF_8);
try {
cell.encrypt(null);
fail("expected NullArgumentException");
}
catch (NullArgumentException ignored) {}
try {
cell.encrypt(new byte[]{});
fail("expected InvalidArgumentException");
}
catch (InvalidArgumentException ignored) {}
SecureCellData result = cell.encrypt(message);
byte[] encrypted = result.getProtectedData();
byte[] authToken = result.getAdditionalData();
try {
cell.decrypt(encrypted, null);
fail("expected NullArgumentException");
}
catch (NullArgumentException ignored) {}
try {
cell.decrypt(null, authToken);
fail("expected NullArgumentException");
}
catch (NullArgumentException ignored) {}
try {
cell.decrypt(encrypted, new byte[]{});
fail("expected InvalidArgumentException");
}
catch (InvalidArgumentException ignored) {}
try {
cell.decrypt(new byte[]{}, authToken);
fail("expected InvalidArgumentException");
}
catch (InvalidArgumentException ignored) {}
}
@Test
@SuppressWarnings("deprecation")
public void oldAPI() throws SecureCellException {
SymmetricKey key = new SymmetricKey();
SecureCell.TokenProtect newCell = SecureCell.TokenProtectWithKey(key);
SecureCell oldCell = new SecureCell(key.toByteArray(), SecureCell.MODE_TOKEN_PROTECT);
byte[] message = "All your base are belong to us!".getBytes(StandardCharsets.UTF_8);
byte[] context = "We are CATS".getBytes(StandardCharsets.UTF_8);
byte[] encrypted, authToken, decrypted;
SecureCellData result = oldCell.protect(context, message);
assertNotNull(result);
encrypted = result.getProtectedData();
authToken = result.getAdditionalData();
assertNotNull(encrypted);
assertNotNull(authToken);
decrypted = newCell.decrypt(encrypted, authToken, context);
assertArrayEquals(message, decrypted);
result = newCell.encrypt(message, context);
assertNotNull(result);
encrypted = result.getProtectedData();
authToken = result.getAdditionalData();
assertNotNull(encrypted);
assertNotNull(authToken);
decrypted = oldCell.unprotect(context, new SecureCellData(encrypted, authToken));
assertArrayEquals(message, decrypted);
}
@Test
@SuppressWarnings("deprecation")
public void oldAPIWithoutContext() throws SecureCellException {
SymmetricKey key = new SymmetricKey();
SecureCell.TokenProtect newCell = SecureCell.TokenProtectWithKey(key);
SecureCell oldCell = new SecureCell(key.toByteArray(), SecureCell.MODE_TOKEN_PROTECT);
byte[] message = "All your base are belong to us!".getBytes(StandardCharsets.UTF_8);
byte[] encrypted, authToken, decrypted;
SecureCellData result = oldCell.protect((byte[])null, message);
assertNotNull(result);
encrypted = result.getProtectedData();
authToken = result.getAdditionalData();
assertNotNull(encrypted);
assertNotNull(authToken);
decrypted = newCell.decrypt(encrypted, authToken);
assertArrayEquals(message, decrypted);
result = newCell.encrypt(message);
assertNotNull(result);
encrypted = result.getProtectedData();
authToken = result.getAdditionalData();
assertNotNull(encrypted);
assertNotNull(authToken);
decrypted = oldCell.unprotect((byte[])null, new SecureCellData(encrypted, authToken));
assertArrayEquals(message, decrypted);
}
}
| {
"pile_set_name": "Github"
} |
/**
* Copyright (C) 2016 Weibo Inc.
*
* This file is part of Opendcp.
*
* Opendcp is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; version 2 of the License.
*
* Opendcp is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Opendcp; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*/
package models
// Describes a security group.
type GroupIdentifier struct {
// The ID of the security group.
GroupId string `locationName:"groupId" type:"string"`
// The name of the security group.
GroupName string `locationName:"groupName" type:"string"`
}
// Contains the parameters for CreateSecurityGroup.
type CreateSecurityGroupParam struct {
// The name of provider
Provider string `locationName:"provider" type:"string" required:"true"`
// A description for the security group. This is informational only.
//
// Constraints: Up to 255 characters in length
//
// Constraints for Classic: ASCII characters
//
// Constraints for VPC: a-z, A-Z, 0-9, spaces, and ._-:/()#,@[]+=&;{}!$*
Description string `locationName:"GroupDescription" type:"string" required:"true"`
// Checks whether you have the required permissions for the action, without
// actually making the request, and provides an error response. If you have
// the required permissions, the error response is DryRunOperation. Otherwise,
// it is UnauthorizedOperation.
DryRun bool `locationName:"dryRun" type:"boolean"`
// The name of the security group.
//
// Constraints: Up to 255 characters in length
//
// Constraints for Classic: ASCII characters
//
// Constraints for VPC: a-z, A-Z, 0-9, spaces, and ._-:/()#,@[]+=&;{}!$*
GroupName string `type:"string" required:"true"`
// [VPC] The ID of the VPC. Required for EC2-VPC.
VpcId string `type:"string"`
}
// Contains the output of CreateSecurityGroups.
type CreateSecurityGroupsResp struct {
// Information about one or more security groups.
SecurityGroups []SecurityGroup `locationName:"securityGroupInfo" locationNameList:"item" type:"list"`
}
// Describes a security group
type SecurityGroup struct {
// A description of the security group.
Description string `locationName:"groupDescription" type:"string"`
// The ID of the security group.
GroupId string `locationName:"groupId" type:"string"`
// The name of the security group.
GroupName string `locationName:"groupName" type:"string"`
// One or more inbound rules associated with the security group.
IpPermissions []IpPermission `locationName:"ipPermissions" locationNameList:"item" type:"list"`
// [EC2-VPC] One or more outbound rules associated with the security group.
IpPermissionsEgress []IpPermission `locationName:"ipPermissionsEgress" locationNameList:"item" type:"list"`
// The AWS account ID of the owner of the security group.
OwnerId string `locationName:"ownerId" type:"string"`
// Any tags assigned to the security group.
Tags []Tag `locationName:"tagSet" locationNameList:"item" type:"list"`
// [EC2-VPC] The ID of the VPC for the security group.
VpcId string `locationName:"vpcId" type:"string"`
}
// Describes a security group and AWS account ID pair.
type UserIdGroupPair struct {
// The ID of the security group.
GroupId string `locationName:"groupId" type:"string"`
// The name of the security group. In a request, use this parameter for a security
// group in EC2-Classic or a default VPC only. For a security group in a nondefault
// VPC, use the security group ID.
GroupName string `locationName:"groupName" type:"string"`
// The status of a VPC peering connection, if applicable.
PeeringStatus string `locationName:"peeringStatus" type:"string"`
// The ID of an AWS account. For a referenced security group in another VPC,
// the account ID of the referenced security group is returned.
//
// [EC2-Classic] Required when adding or removing rules that reference a security
// group in another AWS account.
UserId string `locationName:"userId" type:"string"`
// The ID of the VPC for the referenced security group, if applicable.
VpcId string `locationName:"vpcId" type:"string"`
// The ID of the VPC peering connection, if applicable.
VpcPeeringConnectionId string `locationName:"vpcPeeringConnectionId" type:"string"`
}
// Contains the output of CreateSecurityGroup.
type SecurityGroupResp struct {
// The ID of the security group.
GroupId string `locationName:"groupId" type:"string"`
}
// Contains the parameters for AuthorizeSecurityGroupIngress.
type AuthorizeSecurityGroupIngress struct {
// The name of provider
Provider string `locationName:"provider" type:"string" required:"true"`
// The CIDR IP address range. You can't specify this parameter when specifying
// a source security group.
CidrIp string `type:"string"`
// Checks whether you have the required permissions for the action, without
// actually making the request, and provides an error response. If you have
// the required permissions, the error response is DryRunOperation. Otherwise,
// it is UnauthorizedOperation.
DryRun bool `locationName:"dryRun" type:"boolean"`
// The start of port range for the TCP and UDP protocols, or an ICMP type number.
// For the ICMP type number, use -1 to specify all ICMP types.
FromPort int64 `type:"integer"`
// The ID of the security group. Required for a nondefault VPC.
GroupId string `type:"string"`
// [EC2-Classic, default VPC] The name of the security group.
GroupName string `type:"string"`
// A set of IP permissions. Can be used to specify multiple rules in a single
// command.
IpPermissions []IpPermission `locationNameList:"item" type:"list"`
// The IP protocol name (tcp, udp, icmp) or number (see Protocol Numbers (http://www.iana.org/assignments/protocol-numbers/protocol-numbers.xhtml)).
// (VPC only) Use -1 to specify all.
IpProtocol string `type:"string"`
// [EC2-Classic, default VPC] The name of the source security group. You can't
// specify this parameter in combination with the following parameters: the
// CIDR IP address range, the start of the port range, the IP protocol, and
// the end of the port range. Creates rules that grant full ICMP, UDP, and TCP
// access. To create a rule with a specific IP protocol and port range, use
// a set of IP permissions instead. For EC2-VPC, the source security group must
// be in the same VPC.
SourceSecurityGroupName string `type:"string"`
// [EC2-Classic] The AWS account number for the source security group, if the
// source security group is in a different account. You can't specify this parameter
// in combination with the following parameters: the CIDR IP address range,
// the IP protocol, the start of the port range, and the end of the port range.
// Creates rules that grant full ICMP, UDP, and TCP access. To create a rule
// with a specific IP protocol and port range, use a set of IP permissions instead.
SourceSecurityGroupOwnerId string `type:"string"`
// The end of port range for the TCP and UDP protocols, or an ICMP code number.
// For the ICMP code number, use -1 to specify all ICMP codes for the ICMP type.
ToPort int64 `type:"integer"`
}
// Describes a security group rule.
type IpPermission struct {
// The start of port range for the TCP and UDP protocols, or an ICMP type number.
// A value of -1 indicates all ICMP types.
FromPort int64 `locationName:"fromPort" type:"integer"`
// The IP protocol name (for tcp, udp, and icmp) or number (see Protocol Numbers
// (http://www.iana.org/assignments/protocol-numbers/protocol-numbers.xhtml)).
//
// [EC2-VPC only] When you authorize or revoke security group rules, you can
// use -1 to specify all.
IpProtocol string `locationName:"ipProtocol" type:"string"`
// One or more IP ranges.
IpRanges []IpRange `locationName:"ipRanges" locationNameList:"item" type:"list"`
// (Valid for AuthorizeSecurityGroupEgress, RevokeSecurityGroupEgress and DescribeSecurityGroups
// only) One or more prefix list IDs for an AWS service. In an AuthorizeSecurityGroupEgress
// request, this is the AWS service that you want to access through a VPC endpoint
// from instances associated with the security group.
PrefixListIds []PrefixListId `locationName:"prefixListIds" locationNameList:"item" type:"list"`
// The end of port range for the TCP and UDP protocols, or an ICMP code. A value
// of -1 indicates all ICMP codes for the specified ICMP type.
ToPort *int64 `locationName:"toPort" type:"integer"`
// One or more security group and AWS account ID pairs.
UserIdGroupPairs []UserIdGroupPair `locationName:"groups" locationNameList:"item" type:"list"`
}
| {
"pile_set_name": "Github"
} |
# created by tools/tclZIC.tcl - do not edit
set TZData(:America/Grand_Turk) {
{-9223372036854775808 -17072 0 LMT}
{-2524504528 -18431 0 KMT}
{-1827687169 -18000 0 EST}
{284014800 -18000 0 EST}
{294217200 -14400 1 EDT}
{309938400 -18000 0 EST}
{325666800 -14400 1 EDT}
{341388000 -18000 0 EST}
{357116400 -14400 1 EDT}
{372837600 -18000 0 EST}
{388566000 -14400 1 EDT}
{404892000 -18000 0 EST}
{420015600 -14400 1 EDT}
{436341600 -18000 0 EST}
{452070000 -14400 1 EDT}
{467791200 -18000 0 EST}
{483519600 -14400 1 EDT}
{499240800 -18000 0 EST}
{514969200 -14400 1 EDT}
{530690400 -18000 0 EST}
{544604400 -14400 1 EDT}
{562140000 -18000 0 EST}
{576054000 -14400 1 EDT}
{594194400 -18000 0 EST}
{607503600 -14400 1 EDT}
{625644000 -18000 0 EST}
{638953200 -14400 1 EDT}
{657093600 -18000 0 EST}
{671007600 -14400 1 EDT}
{688543200 -18000 0 EST}
{702457200 -14400 1 EDT}
{719992800 -18000 0 EST}
{733906800 -14400 1 EDT}
{752047200 -18000 0 EST}
{765356400 -14400 1 EDT}
{783496800 -18000 0 EST}
{796806000 -14400 1 EDT}
{814946400 -18000 0 EST}
{828860400 -14400 1 EDT}
{846396000 -18000 0 EST}
{860310000 -14400 1 EDT}
{877845600 -18000 0 EST}
{891759600 -14400 1 EDT}
{909295200 -18000 0 EST}
{923209200 -14400 1 EDT}
{941349600 -18000 0 EST}
{954658800 -14400 1 EDT}
{972799200 -18000 0 EST}
{986108400 -14400 1 EDT}
{1004248800 -18000 0 EST}
{1018162800 -14400 1 EDT}
{1035698400 -18000 0 EST}
{1049612400 -14400 1 EDT}
{1067148000 -18000 0 EST}
{1081062000 -14400 1 EDT}
{1099202400 -18000 0 EST}
{1112511600 -14400 1 EDT}
{1130652000 -18000 0 EST}
{1143961200 -14400 1 EDT}
{1162101600 -18000 0 EST}
{1173596400 -14400 1 EDT}
{1194156000 -18000 0 EST}
{1205046000 -14400 1 EDT}
{1225605600 -18000 0 EST}
{1236495600 -14400 1 EDT}
{1257055200 -18000 0 EST}
{1268550000 -14400 1 EDT}
{1289109600 -18000 0 EST}
{1299999600 -14400 1 EDT}
{1320559200 -18000 0 EST}
{1331449200 -14400 1 EDT}
{1352008800 -18000 0 EST}
{1362898800 -14400 1 EDT}
{1383458400 -18000 0 EST}
{1394348400 -14400 1 EDT}
{1414908000 -18000 0 EST}
{1425798000 -14400 1 EDT}
{1446361200 -14400 0 AST}
}
| {
"pile_set_name": "Github"
} |
# Themes
## Base 16 Tomorrow

## One

## Solarized

| {
"pile_set_name": "Github"
} |
#!/usr/bin/env bash
# Runs construct
##
usage="Usage: construct [input] [output]"
# if less than 2 args specified, show usage
if [ $# -le 1 ]; then
echo $usage
exit 1
fi
bin="`dirname "$0"`"
bin="`cd "$bin"; pwd`"
. "$bin/succinct-config.sh"
. "$SUCCINCT_PREFIX/bin/load-succinct-env.sh"
JAR_PATH="$SUCCINCT_PREFIX/target"
JAR_COUNT=0
for f in "${JAR_PATH}"/succinct-core-*-jar-with-dependencies.jar; do
if [[ ! -e "$f" ]]; then
echo "Failed to find Succinct Core assembly in $JAR_PATH" 1>&2
echo "You need to build Succinct-Core before running this program" 1>&2
exit 1
fi
SUCCINCT_CORE_JAR="$f"
JAR_COUNT=$((JAR_COUNT+1))
done
if [ "$JAR_COUNT" -gt "1" ]; then
echo "Found multiple Succinct Core assemblies in $JAR_PATH" 1>&2
ls "${JAR_PATH}"/succinct-core-*-jar-with-dependencies.jar 1>&2
echo "Please remove all but one jar." 1>&2
exit 1
fi
java -cp $SUCCINCT_CORE_JAR edu.berkeley.cs.succinct.examples.Construct $@
| {
"pile_set_name": "Github"
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.