text
stringlengths
2
99.9k
meta
dict
# This contains libraries that a) do not change frequently or b) require c code compilation to work # We install these libraries as a single layer in our base docker image # To decode JWT tokens. # cryptography is actually a transitive dependency, and only needed because PyJWT needs it PyJWT==1.7.1 cryptography==2.9 # Postgres driver psycopg2==2.8.4 # Mysql driver mysqlclient==1.4.6 # MS SQL Server Driver pymssql==2.1.4 # Oracle Driver cx-Oracle==7.3.0 # For running flask in production uwsgi==2.0.18
{ "pile_set_name": "Github" }
{ "cells": [ { "cell_type": "code", "execution_count": 1, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "Using TensorFlow backend.\n" ] } ], "source": [ "import numpy as np\n", "import json\n", "from keras.models import Model\n", "from keras.layers import Input\n", "from keras.layers.convolutional import Conv2D\n", "from keras import backend as K\n", "from collections import OrderedDict" ] }, { "cell_type": "code", "execution_count": 2, "metadata": { "collapsed": true }, "outputs": [], "source": [ "def format_decimal(arr, places=6):\n", " return [round(x * 10**places) / 10**places for x in arr]" ] }, { "cell_type": "code", "execution_count": 3, "metadata": { "collapsed": true }, "outputs": [], "source": [ "DATA = OrderedDict()" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "### pipeline 1" ] }, { "cell_type": "code", "execution_count": 4, "metadata": { "collapsed": true }, "outputs": [], "source": [ "random_seed = 1000\n", "data_in_shape = (17, 17, 2)\n", "\n", "layers = [\n", " Conv2D(5, (3,3), activation='relu', padding='same', strides=(2, 2), data_format='channels_last', use_bias=True),\n", " Conv2D(4, (3,3), activation='linear', padding='same', strides=(1, 1), data_format='channels_last', use_bias=True),\n", " Conv2D(2, (3,3), activation='relu', padding='valid', strides=(1, 1), data_format='channels_last', use_bias=True),\n", " Conv2D(3, (5,5), activation='relu', padding='valid', strides=(1, 1), data_format='channels_last', use_bias=True),\n", " Conv2D(2, (3,3), activation='linear', padding='valid', strides=(1, 1), data_format='channels_last', use_bias=True)\n", "]\n", "\n", "input_layer = Input(shape=data_in_shape)\n", "x = layers[0](input_layer)\n", "for layer in layers[1:-1]:\n", " x = layer(x)\n", "output_layer = layers[-1](x)\n", "model = Model(inputs=input_layer, outputs=output_layer)\n", "\n", "np.random.seed(random_seed)\n", "data_in = 2 * np.random.random(data_in_shape) - 1\n", "\n", "# set weights to random (use seed for reproducibility)\n", "weights = []\n", "for i, w in enumerate(model.get_weights()):\n", " np.random.seed(random_seed + i)\n", " weights.append(2 * np.random.random(w.shape) - 1)\n", "model.set_weights(weights)\n", "\n", "result = model.predict(np.array([data_in]))\n", "data_out_shape = result[0].shape\n", "data_in_formatted = format_decimal(data_in.ravel().tolist())\n", "data_out_formatted = format_decimal(result[0].ravel().tolist())\n", "\n", "DATA['pipeline_01'] = {\n", " 'input': {'data': data_in_formatted, 'shape': data_in_shape},\n", " 'weights': [{'data': format_decimal(w.ravel().tolist()), 'shape': w.shape} for w in weights],\n", " 'expected': {'data': data_out_formatted, 'shape': data_out_shape}\n", "}" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "### export for Keras.js tests" ] }, { "cell_type": "code", "execution_count": 5, "metadata": { "collapsed": true }, "outputs": [], "source": [ "import os\n", "\n", "filename = '../../test/data/pipeline/01.json'\n", "if not os.path.exists(os.path.dirname(filename)):\n", " os.makedirs(os.path.dirname(filename))\n", "with open(filename, 'w') as f:\n", " json.dump(DATA, f)" ] }, { "cell_type": "code", "execution_count": 6, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "{\"pipeline_01\": {\"input\": {\"data\": [0.307179, -0.769986, 0.900566, -0.035617, 0.744949, -0.575335, -0.918581, -0.205611, -0.533736, 0.683481, -0.585835, 0.484939, -0.215692, -0.635487, 0.487079, -0.860836, 0.770674, 0.905289, 0.862287, -0.169138, -0.942037, 0.964055, -0.320725, 0.413374, -0.276246, -0.929788, 0.710117, 0.314507, 0.531366, 0.108174, 0.770186, 0.808395, -0.979157, -0.850887, -0.510742, -0.73339, 0.39585, -0.20359, 0.766244, -0.637985, -0.135002, -0.963714, 0.382876, -0.060619, -0.743556, 0.782674, 0.836407, -0.853758, -0.909104, -0.122854, 0.203442, -0.379546, 0.363816, -0.581974, 0.039209, 0.131978, -0.117665, -0.724888, -0.572914, -0.733256, -0.355407, -0.532226, 0.054996, 0.131942, -0.123549, -0.356255, 0.119282, 0.730691, 0.694566, -0.784366, -0.367361, -0.181043, 0.374178, 0.404471, -0.107604, -0.159356, 0.605261, 0.077235, 0.847001, -0.876185, -0.264833, 0.940798, 0.398208, 0.781951, -0.492895, 0.451054, -0.593011, 0.075024, -0.526114, -0.127016, 0.596049, -0.383182, 0.243033, -0.120704, 0.826647, 0.317372, 0.307263, -0.283084, 0.045883, -0.909825, -0.811381, 0.843224, -0.853911, 0.858835, 0.43403, 0.244621, 0.509979, -0.71666, 0.587644, 0.450806, 0.082879, -0.034831, -0.047045, -0.107934, 0.63214, -0.451297, -0.876076, 0.920593, -0.083764, 0.515784, 0.362317, 0.083556, -0.734335, -0.493589, -0.112289, 0.1175, 0.627729, -0.364343, -0.20591, 0.780336, 0.650832, -0.786626, -0.680448, 0.302439, 0.138128, 0.968167, 0.498318, 0.604799, -0.54338, 0.037427, 0.260217, -0.995629, 0.02643, -0.10162, -0.301287, -0.452637, 0.804166, -0.938051, -0.351662, -0.426815, 0.914272, -0.966934, -0.085353, -0.350128, 0.392927, 0.3855, 0.365292, 0.784261, 0.891156, -0.836385, -0.766222, -0.926262, 0.84758, 0.963802, -0.246749, -0.873048, -0.744287, 0.759778, -0.091904, -0.601018, 0.252064, 0.552859, -0.070904, -0.919266, -0.252198, 0.733837, -0.793589, -0.774978, 0.560268, -0.729902, -0.742883, -0.518994, -0.416378, -0.333527, 0.584328, -0.511306, -0.78923, -0.765839, -0.281298, -0.532119, 0.65626, 0.221575, -0.420019, 0.994877, -0.161517, 0.789445, -0.423508, -0.37311, 0.961123, -0.821726, 0.760378, 0.920009, -0.318366, 0.831071, 0.797277, 0.998735, -0.764629, 0.190218, 0.571854, 0.451473, 0.659159, -0.961103, 0.893633, -0.673254, -0.945135, 0.958047, -0.630267, 0.668912, -0.97545, -0.932668, -0.855912, 0.273892, -0.508473, -0.147774, 0.92673, -0.589615, -0.529108, -0.026501, -0.442497, 0.043248, -0.097879, -0.32872, -0.976222, 0.516042, 0.77989, -0.275965, -0.066363, -0.878685, 0.477652, -0.531969, -0.738158, -0.719768, -0.598863, 0.459657, -0.136082, 0.397067, -0.716891, -0.552125, 0.645655, -0.992376, -0.547147, 0.675121, 0.359423, -0.452114, 0.642834, -0.65893, 0.084555, 0.198223, 0.947512, 0.347592, -0.30919, 0.017157, -0.504988, 0.975185, -0.18213, -0.827767, 0.962281, 0.793245, 0.066392, -0.929715, 0.675438, 0.807816, 0.260682, -0.437998, 0.865408, -0.964172, -0.358725, -0.297628, 0.698402, -0.991327, 0.296135, 0.111031, -0.325199, -0.829535, -0.706708, 0.315692, -0.908343, -0.876177, -0.084321, 0.042117, -0.96889, 0.065587, -0.761545, -0.771358, 0.370507, -0.371447, 0.567487, 0.906827, 0.306348, 0.991507, -0.251738, -0.179506, -0.13481, 0.009328, 0.50426, -0.478935, -0.23036, 0.482967, 0.108361, 0.323875, -0.107073, 0.484604, 0.976787, -0.757528, 0.55991, -0.087214, -0.45433, 0.964335, -0.710762, -0.223378, 0.338984, -0.566817, -0.423957, -0.133355, 0.771202, -0.348399, 0.698459, -0.553718, 0.362846, 0.765188, -0.810349, 0.415447, -0.261726, 0.141378, 0.394877, -0.236814, -0.136671, -0.605446, -0.816587, -0.834155, -0.867417, -0.005713, 0.832653, -0.74534, 0.015888, -0.859369, 0.263748, -0.514297, -0.724415, -0.519365, -0.26064, 0.562583, 0.556172, -0.894421, 0.485605, 0.068895, -0.574381, 0.952707, 0.854282, -0.816716, 0.566863, -0.851922, -0.304581, 0.940295, -0.092955, -0.252095, -0.366209, 0.8118, 0.609144, 0.66955, 0.296141, -0.522011, -0.547367, -0.046487, -0.141129, -0.683622, -0.608246, -0.942115, 0.200807, -0.171167, 0.913158, -0.576902, -0.177556, -0.085063, 0.942418, -0.803642, -0.231319, 0.110499, 0.216448, 0.554095, -0.775795, -0.858061, -0.894796, -0.177721, 0.812784, 0.032743, 0.14351, 0.759765, -0.70414, -0.310653, -0.70489, -0.174463, -0.927175, -0.788743, -0.979671, 0.48012, 0.132285, -0.671046, -0.958749, 0.945391, 0.040751, -0.738711, 0.822168, 0.910797, 0.662187, 0.325641, -0.134855, 0.310373, -0.499573, 0.135194, 0.376075, 0.742203, -0.185185, 0.681693, -0.614504, 0.345224, 0.663712, 0.014513, 0.42408, 0.833134, -0.260953, -0.078377, -0.933143, -0.000877, -0.478206, 0.430175, -0.840631, 0.428118, 0.846154, -0.40912, 0.509388, 0.669512, 0.756252, -0.375561, -0.551532, 0.88488, 0.436005, -0.751576, -0.563339, 0.004887, 0.137054, 0.19583, -0.215128, -0.232628, -0.257705, 0.010894, -0.638739, 0.98891, 0.761058, 0.692921, 0.183508, 0.590217, -0.769043, 0.707619, -0.393498, 0.806385, -0.468566, -0.96876, 0.760738, 0.436183, -0.535061, -0.5628, 0.886738, 0.816212, 0.004339, -0.891716, 0.782022, 0.107881, 0.594794, 0.171877, -0.817309, 0.675026, 0.824638, -0.890092, -0.45473, -0.371234, 0.087631, -0.569719, -0.864876, 0.215788, 0.716897, 0.727561, -0.569618, -0.850193, 0.307958, -0.477611, 0.660603, 0.779336, 0.535998, -0.280336, -0.701544, -0.055577, 0.706065, 0.288925, -0.635776, -0.431255, 0.831398, 0.454381, 0.062509, 0.856797, -0.675694, -0.783077, 0.829229, 0.012088, 0.298661, -0.092186, 0.103526, 0.322325, -0.303668, 0.574324, -0.879712, 0.924606, -0.743586, -0.737669, -0.977774, -0.04452, 0.338219, -0.214654, -0.549583, 0.314906, -0.659444, -0.128102, 0.719527, 0.643744, 0.405795, 0.500263, -0.968486, -0.103665, 0.381867, -0.002469, -0.078127, 0.806476, -0.87487, -0.409392, 0.632035, -0.159183, -0.01837, -0.464667, 0.171106, 0.491622, -0.939924, 0.929357, 0.889669, 0.693238, -0.425875, -0.792446, 0.992246, -0.714005, 0.477842, 0.014597, 0.088993, 0.478136, -0.63804, -0.777092, -0.704203, 0.141475, -0.285228, 0.287741, 0.136988, 0.121701, 0.481481, 0.098355, -0.759691, 0.258656, 0.925657, 0.56495, 0.004887, 0.934166], \"shape\": [17, 17, 2]}, \"weights\": [{\"data\": [0.307179, -0.769986, 0.900566, -0.035617, 0.744949, -0.575335, -0.918581, -0.205611, -0.533736, 0.683481, -0.585835, 0.484939, -0.215692, -0.635487, 0.487079, -0.860836, 0.770674, 0.905289, 0.862287, -0.169138, -0.942037, 0.964055, -0.320725, 0.413374, -0.276246, -0.929788, 0.710117, 0.314507, 0.531366, 0.108174, 0.770186, 0.808395, -0.979157, -0.850887, -0.510742, -0.73339, 0.39585, -0.20359, 0.766244, -0.637985, -0.135002, -0.963714, 0.382876, -0.060619, -0.743556, 0.782674, 0.836407, -0.853758, -0.909104, -0.122854, 0.203442, -0.379546, 0.363816, -0.581974, 0.039209, 0.131978, -0.117665, -0.724888, -0.572914, -0.733256, -0.355407, -0.532226, 0.054996, 0.131942, -0.123549, -0.356255, 0.119282, 0.730691, 0.694566, -0.784366, -0.367361, -0.181043, 0.374178, 0.404471, -0.107604, -0.159356, 0.605261, 0.077235, 0.847001, -0.876185, -0.264833, 0.940798, 0.398208, 0.781951, -0.492895, 0.451054, -0.593011, 0.075024, -0.526114, -0.127016], \"shape\": [3, 3, 2, 5]}, {\"data\": [-0.387536, -0.469873, -0.60788, -0.138957, -0.953773], \"shape\": [5]}, {\"data\": [-0.742023, -0.077688, -0.167692, 0.205448, -0.633864, -0.164175, -0.731823, 0.313236, 0.613465, -0.723716, -0.299231, 0.229032, 0.102561, 0.384949, -0.90948, -0.294898, -0.916217, -0.699031, -0.323329, -0.673445, 0.521949, -0.306796, -0.476018, -0.628623, 0.808028, -0.585043, -0.307429, -0.234868, -0.897584, 0.741743, 0.320785, 0.709132, -0.978084, 0.601894, -0.228816, -0.069558, -0.522066, -0.399597, -0.916222, 0.161549, -0.211915, 0.823372, -0.6549, -0.30403, 0.677588, -0.431259, 0.219659, -0.091937, -0.101636, -0.595218, -0.815428, 0.502932, 0.775249, 0.624226, 0.622601, -0.091075, 0.763603, 0.472659, 0.621131, -0.504549, -0.270214, 0.492749, 0.643055, -0.290058, -0.752162, 0.758918, 0.011832, -0.183967, 0.768298, 0.764241, 0.906398, 0.872853, -0.292238, 0.16788, -0.447741, 0.679196, 0.566614, 0.867549, -0.011606, -0.252108, 0.165669, -0.509362, 0.620632, -0.32465, -0.071143, -0.823613, 0.331067, -0.016903, -0.76138, -0.491146, 0.106088, -0.641492, 0.234893, 0.658853, -0.475623, 0.269103, 0.935505, -0.577134, 0.985015, -0.405957, -0.325882, 0.849518, -0.589155, 0.378331, -0.753075, 0.711411, 0.04547, 0.398327, -0.665657, 0.531142, -0.410293, -0.526649, 0.860648, 0.32795, -0.197082, -0.095526, -0.391361, 0.785465, -0.267269, -0.020154, -0.95189, -0.580742, 0.788104, -0.092433, 0.320354, 0.070651, 0.045416, 0.99799, 0.583116, -0.708131, -0.104784, -0.838947, -0.598224, 0.209105, 0.824956, 0.10438, 0.692046, -0.091308, 0.884896, 0.730617, 0.244486, -0.415624, -0.397714, -0.647236, -0.816162, 0.001325, 0.593873, -0.243723, 0.168275, 0.192345, -0.522916, 0.458154, -0.333828, -0.014549, -0.744552, -0.203297, 0.771256, -0.703928, 0.998892, -0.947633, 0.566086, -0.274437, -0.218108, -0.800599, 0.504541, 0.233776, -0.111802, -0.03089, 0.761595, 0.537963, 0.217941, -0.910822, 0.531235, -0.018533, -0.161811, -0.200401, -0.742618, -0.35126, -0.954474, -0.071092], \"shape\": [3, 3, 5, 4]}, {\"data\": [0.195612, -0.128132, -0.96626, 0.193375], \"shape\": [4]}, {\"data\": [-0.922097, 0.712992, 0.493001, 0.727856, 0.119969, -0.839034, -0.536727, -0.515472, 0.231, 0.214218, -0.791636, -0.148304, 0.309846, 0.742779, -0.123022, 0.427583, -0.882276, 0.818571, 0.043634, 0.454859, -0.007311, -0.744895, -0.368229, 0.324805, -0.388758, -0.556215, -0.542859, 0.685655, 0.350785, -0.312753, 0.591401, 0.95999, 0.136369, -0.58844, -0.506667, -0.208736, 0.548969, 0.653173, 0.128943, 0.180094, -0.16098, 0.208798, 0.666245, 0.347307, -0.384733, -0.88354, -0.328468, -0.515324, 0.479247, -0.360647, 0.09069, -0.221424, 0.091284, 0.202631, 0.208087, 0.582248, -0.164064, -0.925036, -0.678806, -0.212846, 0.960861, 0.536089, -0.038634, -0.473456, -0.409408, 0.620315, -0.873085, -0.695405, -0.024465, 0.762843, -0.928228, 0.557106], \"shape\": [3, 3, 4, 2]}, {\"data\": [0.318429, -0.858397], \"shape\": [2]}, {\"data\": [0.486255, -0.547151, 0.285068, 0.764711, 0.481398, 0.442527, -0.409304, 0.051033, -0.652471, 0.623918, 0.698811, -0.48696, -0.525531, -0.083229, -0.54216, -0.595979, 0.965361, 0.961457, 0.469608, -0.18139, 0.237622, -0.841546, -0.201479, 0.210842, -0.099026, -0.017468, -0.270985, -0.421947, 0.990418, 0.633556, -0.46994, -0.283905, 0.339371, 0.851372, -0.963439, 0.672347, -0.592494, 0.115008, 0.77155, -0.629049, -0.284972, 0.08256, -0.526964, -0.579017, 0.048964, 0.296374, -0.503246, -0.95555, -0.759658, -0.148746, 0.527992, 0.419541, -0.601167, -0.246472, 0.611566, -0.47989, -0.796678, 0.845136, -0.929013, 0.081316, -0.965527, 0.064677, 0.687209, -0.781686, 0.556524, -0.294628, 0.343468, -0.693394, -0.864068, 0.522942, -0.854592, 0.954066, 0.352462, 0.404271, 0.935993, 0.006064, -0.614327, -0.951249, -0.974544, -0.981322, -0.524456, -0.353175, -0.283883, 0.270072, 0.336334, -0.529043, 0.880513, -0.663035, -0.709319, -0.69236, 0.233949, 0.90419, -0.721928, 0.580281, -0.149192, -0.246252, 0.099723, 0.986128, -0.979644, 0.242715, 0.433547, 0.199869, -0.572331, -0.152181, 0.329916, -0.071652, -0.580827, 0.88984, -0.857622, -0.926973, -0.444937, -0.183938, 0.72548, -0.238406, -0.651195, -0.770945, -0.97797, -0.666038, 0.253825, 0.001102, -0.769608, -0.364219, 0.653122, -0.845224, -0.900383, 0.916435, 0.562575, 0.577639, -0.655935, 0.683806, -0.955929, 0.271965, 0.670582, -0.874893, -0.671992, -0.124948, 0.354001, -0.289044, -0.880824, -0.505697, 0.975131, -0.404046, 0.345771, -0.013626, -0.077943, 0.837888, -0.371654, 0.453362, 0.331138, -0.360725], \"shape\": [5, 5, 2, 3]}, {\"data\": [0.0965, 0.594443, -0.987782], \"shape\": [3]}, {\"data\": [0.228005, 0.859479, -0.49018, 0.232871, -0.303968, -0.799141, 0.621228, 0.850429, 0.029476, -0.583346, 0.889636, -0.128896, 0.067108, -0.1059, -0.788773, -0.559347, 0.674802, 0.513275, -0.95495, -0.230976, -0.430566, 0.607782, -0.292593, -0.362274, -0.825576, 0.904458, 0.531651, 0.139053, -0.797761, 0.905804, -0.875903, 0.04377, -0.704592, 0.203555, -0.083031, 0.321316, 0.334565, 0.965166, 0.31912, 0.987618, 0.11275, 0.755438, 0.133156, -0.271605, -0.739053, 0.930942, 0.723852, -0.399546, 0.365907, 0.680404, 0.302211, 0.481088, -0.254831, -0.719056], \"shape\": [3, 3, 3, 2]}, {\"data\": [0.229761, -0.670851], \"shape\": [2]}], \"expected\": {\"data\": [1.396554, 4.630284], \"shape\": [1, 1, 2]}}}\n" ] } ], "source": [ "print(json.dumps(DATA))" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "collapsed": true }, "outputs": [], "source": [] } ], "metadata": { "anaconda-cloud": {}, "kernelspec": { "display_name": "Python 3", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.6.3" } }, "nbformat": 4, "nbformat_minor": 2 }
{ "pile_set_name": "Github" }
#include <stdlib.h> #include <unistd.h> #include <string.h> #include <sys/types.h> #include <stdio.h> struct auth { char name[32]; int auth; }; struct auth *auth; char *service; int main(int argc, char **argv) { char line[128]; while(1) { printf("[ auth = %p, service = %p ]\n", auth, service); if(fgets(line, sizeof(line), stdin) == NULL) break; if(strncmp(line, "auth ", 5) == 0) { auth = malloc(sizeof(auth)); memset(auth, 0, sizeof(auth)); if(strlen(line + 5) < 31) { strcpy(auth->name, line + 5); } } if(strncmp(line, "reset", 5) == 0) { free(auth); } if(strncmp(line, "service", 6) == 0) { service = strdup(line + 7); } if(strncmp(line, "login", 5) == 0) { if(auth->auth) { printf("you have logged in already!\n"); } else { printf("please enter your password\n"); } } } }
{ "pile_set_name": "Github" }
<?php namespace phpbu\App\Backup\Cleaner; use phpbu\App\Backup\Cleaner; use phpbu\App\Backup\Collector; use phpbu\App\Result; use phpbu\App\Backup\Target; /** * Simulator interface. * * @package phpbu * @subpackage Backup * @author Sebastian Feldmann <[email protected]> * @copyright Sebastian Feldmann <[email protected]> * @license https://opensource.org/licenses/MIT The MIT License (MIT) * @link http://phpbu.de/ * @since Class available since Release 3.0.0 */ interface Simulator extends Cleaner { /** * Simulate the cleanup execution. * * @param \phpbu\App\Backup\Target $target * @param \phpbu\App\Backup\Collector $collector * @param \phpbu\App\Result $result */ public function simulate(Target $target, Collector $collector, Result $result); }
{ "pile_set_name": "Github" }
framework module Pods_DemoApp { umbrella header "Pods-DemoApp-umbrella.h" export * module * { export * } }
{ "pile_set_name": "Github" }
@file:Suppress("NOTHING_TO_INLINE") package io.ktor.utils.io.bits import io.ktor.utils.io.core.internal.* import java.nio.* @Suppress("ACTUAL_WITHOUT_EXPECT", "EXPERIMENTAL_FEATURE_WARNING") public actual inline class Memory @DangerousInternalIoApi constructor(public val buffer: ByteBuffer) { /** * Size of memory range in bytes. */ public actual inline val size: Long get() = buffer.limit().toLong() /** * Size of memory range in bytes represented as signed 32bit integer * @throws IllegalStateException when size doesn't fit into a signed 32bit integer */ public actual inline val size32: Int get() = buffer.limit() /** * Returns byte at [index] position. */ public actual inline fun loadAt(index: Int): Byte = buffer.get(index) /** * Returns byte at [index] position. */ public actual inline fun loadAt(index: Long): Byte = buffer.get(index.toIntOrFail("index")) /** * Write [value] at the specified [index]. */ public actual inline fun storeAt(index: Int, value: Byte) { buffer.put(index, value) } /** * Write [value] at the specified [index] */ public actual inline fun storeAt(index: Long, value: Byte) { buffer.put(index.toIntOrFail("index"), value) } public actual fun slice(offset: Int, length: Int): Memory = Memory(buffer.sliceSafe(offset, length)) public actual fun slice(offset: Long, length: Long): Memory { return slice(offset.toIntOrFail("offset"), length.toIntOrFail("length")) } /** * Copies bytes from this memory range from the specified [offset] and [length] * to the [destination] at [destinationOffset]. * Copying bytes from a memory to itself is allowed. */ public actual fun copyTo(destination: Memory, offset: Int, length: Int, destinationOffset: Int) { if (buffer.hasArray() && destination.buffer.hasArray() && !buffer.isReadOnly && !destination.buffer.isReadOnly ) { System.arraycopy( buffer.array(), buffer.arrayOffset() + offset, destination.buffer.array(), destination.buffer.arrayOffset() + destinationOffset, length ) return } // NOTE: it is ok here to make copy since it will be escaped by JVM // while temporary moving position/offset makes memory concurrent unsafe that is unacceptable val srcCopy = buffer.duplicate().apply { position(offset) limit(offset + length) } val dstCopy = destination.buffer.duplicate().apply { position(destinationOffset) } dstCopy.put(srcCopy) } /** * Copies bytes from this memory range from the specified [offset] and [length] * to the [destination] at [destinationOffset]. * Copying bytes from a memory to itself is allowed. */ public actual fun copyTo(destination: Memory, offset: Long, length: Long, destinationOffset: Long) { copyTo( destination, offset.toIntOrFail("offset"), length.toIntOrFail("length"), destinationOffset.toIntOrFail("destinationOffset") ) } public actual companion object { public actual val Empty: Memory = Memory(ByteBuffer.allocate(0).order(ByteOrder.BIG_ENDIAN)) } } /** * Copies bytes from this memory range from the specified [offset] and [length] * to the [destination] at [destinationOffset]. */ public actual fun Memory.copyTo( destination: ByteArray, offset: Int, length: Int, destinationOffset: Int ) { if (buffer.hasArray() && !buffer.isReadOnly) { System.arraycopy( buffer.array(), buffer.arrayOffset() + offset, destination, destinationOffset, length ) return } // we need to make a copy to prevent moving position buffer.duplicate().get(destination, destinationOffset, length) } /** * Copies bytes from this memory range from the specified [offset] and [length] * to the [destination] at [destinationOffset]. */ public actual fun Memory.copyTo( destination: ByteArray, offset: Long, length: Int, destinationOffset: Int ) { copyTo(destination, offset.toIntOrFail("offset"), length, destinationOffset) } /** * Copies bytes from this memory range from the specified [offset] * to the [destination] buffer. */ public fun Memory.copyTo( destination: ByteBuffer, offset: Int ) { val size = destination.remaining() if (buffer.hasArray() && !buffer.isReadOnly && destination.hasArray() && !destination.isReadOnly) { val dstPosition = destination.position() System.arraycopy( buffer.array(), buffer.arrayOffset() + offset, destination.array(), destination.arrayOffset() + dstPosition, size ) destination.position(dstPosition + size) return } // we need to make a copy to prevent moving position val source = buffer.duplicate().apply { limit(offset + size) position(offset) } destination.put(source) } /** * Copies bytes from this memory range from the specified [offset] * to the [destination] buffer. */ public fun Memory.copyTo(destination: ByteBuffer, offset: Long) { copyTo(destination, offset.toIntOrFail("offset")) } /** * Copy byte from this buffer moving it's position to the [destination] at [offset]. */ public fun ByteBuffer.copyTo(destination: Memory, offset: Int) { if (hasArray() && !isReadOnly) { destination.storeByteArray(offset, array(), arrayOffset() + position(), remaining()) position(limit()) return } destination.buffer.sliceSafe(offset, remaining()).put(this) } private inline fun ByteBuffer.myDuplicate(): ByteBuffer { duplicate().apply { return suppressNullCheck() } } private inline fun ByteBuffer.mySlice(): ByteBuffer { slice().apply { return suppressNullCheck() } } private inline fun ByteBuffer.suppressNullCheck(): ByteBuffer { return this } internal fun ByteBuffer.sliceSafe(offset: Int, length: Int): ByteBuffer { return myDuplicate().apply { position(offset); limit(offset + length) }.mySlice() } /** * Fill memory range starting at the specified [offset] with [value] repeated [count] times. */ public actual fun Memory.fill(offset: Long, count: Long, value: Byte) { fill(offset.toIntOrFail("offset"), count.toIntOrFail("count"), value) } /** * Fill memory range starting at the specified [offset] with [value] repeated [count] times. */ public actual fun Memory.fill(offset: Int, count: Int, value: Byte) { for (index in offset until offset + count) { buffer.put(index, value) } }
{ "pile_set_name": "Github" }
"use strict"; module.exports = exports = build; exports.usage = 'Attempts to compile the module by dispatching to node-gyp or nw-gyp'; var compile = require('./util/compile.js'); var handle_gyp_opts = require('./util/handle_gyp_opts.js'); var configure = require('./configure.js'); function do_build(gyp,argv,callback) { handle_gyp_opts(gyp,argv,function(err,result) { var final_args = ['build'].concat(result.gyp).concat(result.pre); if (result.unparsed.length > 0) { final_args = final_args. concat(['--']). concat(result.unparsed); } compile.run_gyp(final_args,result.opts,function(err) { return callback(err); }); }); } function build(gyp, argv, callback) { // Form up commands to pass to node-gyp: // We map `node-pre-gyp build` to `node-gyp configure build` so that we do not // trigger a clean and therefore do not pay the penalty of a full recompile if (argv.length && (argv.indexOf('rebuild') > -1)) { // here we map `node-pre-gyp rebuild` to `node-gyp rebuild` which internally means // "clean + configure + build" and triggers a full recompile compile.run_gyp(['clean'],{},function(err) { if (err) return callback(err); configure(gyp,argv,function(err) { if (err) return callback(err); return do_build(gyp,argv,callback); }); }); } else { return do_build(gyp,argv,callback); } }
{ "pile_set_name": "Github" }
# DM Reader <a name="2lzA4"></a> ## 一、插件名称 名称:**dmreader** <a name="jVb3v"></a> ## 二、支持的数据源版本 **DM7、DM8**<br /> <a name="4lw0x"></a> ## 三、参数说明 - **jdbcUrl** - 描述:针对关系型数据库的jdbc连接字符串 <br />jdbcUrl参考文档:[达梦官方文档](http://www.dameng.com/down.aspx?TypeId=12&FId=t14:12:14) - 必选:是 - 默认值:无 - **username** - 描述:数据源的用户名 - 必选:是 - 默认值:无 - **password** - 描述:数据源指定用户名的密码 - 必选:是 - 默认值:无 - **where** - 描述:筛选条件,reader插件根据指定的column、table、where条件拼接SQL,并根据这个SQL进行数据抽取。在实际业务场景中,往往会选择当天的数据进行同步,可以将where条件指定为gmt_create > time。 - 注意:不可以将where条件指定为limit 10,limit不是SQL的合法where子句。 - 必选:否 - 默认值:无 - **splitPk** - 描述:当speed配置中的channel大于1时指定此参数,Reader插件根据并发数和此参数指定的字段拼接sql,使每个并发读取不同的数据,提升读取速率。 - 注意: - 推荐splitPk使用表主键,因为表主键通常情况下比较均匀,因此切分出来的分片也不容易出现数据热点。 - 目前splitPk仅支持整形数据切分,`不支持浮点、字符串、日期等其他类型`。如果用户指定其他非支持类型,FlinkX将报错! - 如果channel大于1但是没有配置此参数,任务将置为失败。 - 必选:否 - 默认值:无 - **queryTimeOut** - 描述:查询超时时间,单位秒。 - 注意:当数据量很大,或者从视图查询,或者自定义sql查询时,可通过此参数指定超时时间。 - 必选:否 - 默认值:3000 - **customSql** - 描述:自定义的查询语句,如果只指定字段不能满足需求时,可通过此参数指定查询的sql,可以是任意复杂的查询语句。 - 注意: - 只能是查询语句,否则会导致任务失败; - 查询语句返回的字段需要和column列表里的字段对应; - 当指定了此参数时,connection里指定的table无效; - 当指定此参数时,column必须指定具体字段信息,不能以*号代替; - 必选:否 - 默认值:无 - **column** - 描述:需要读取的字段。 - 格式:支持3种格式 <br />1.读取全部字段,如果字段数量很多,可以使用下面的写法: ```bash "column":["*"] ``` 2.只指定字段名称: ```json "column":["ID","NAME"] ``` 3.指定具体信息: ```json "column": [{ "name": "COL", "type": "datetime", "format": "yyyy-MM-dd hh:mm:ss", "value": "value" }] ``` - 属性说明: - name:字段名称,注意应该为大写,否则可能会出错 - type:字段类型,可以和数据库里的字段类型不一样,程序会做一次类型转换 - format:如果字段是时间字符串,可以指定时间的格式,将字段类型转为日期格式返回 - value:如果数据库里不存在指定的字段,则会把value的值作为常量列返回,如果指定的字段存在,当指定字段的值为null时,会以此value值作为默认值返回 - 必选:是 - 默认值:无 - **polling** - 描述:是否开启间隔轮询,开启后会根据`pollingInterval`轮询间隔时间周期性的从数据库拉取数据。开启间隔轮询还需配置参数`pollingInterval`,`increColumn`,可以选择配置参数`startLocation`。若不配置参数`startLocation`,任务启动时将会从数据库中查询增量字段最大值作为轮询的开始位置。 - 必选:否 - 默认值:false - **pollingInterval** - 描述:轮询间隔时间,从数据库中拉取数据的间隔时间,默认为5000毫秒。 - 必选:否 - 默认值:5000 - **requestAccumulatorInterval** - 描述:发送查询累加器请求的间隔时间。 - 必选:否 - 默认值:2 <a name="1LBc2"></a> ## 四、配置示例 <a name="xhLRp"></a> #### 1、基础配置 ```json { "job": { "content": [ { "reader": { "name": "dmreader", "parameter": { "column": [ { "name": "ID", "type": "int" }, { "name": "AGE", "type": "int" } ], "increColumn": "", "startLocation": "", "username": "SYSDBA", "password": "SYSDBA", "connection": [ { "jdbcUrl": [ "jdbc:dm://localhost:5236" ], "table": [ "PERSON.STUDENT" ] } ], "where": "" } }, "writer": { "name": "streamwriter", "parameter": { "print": true } } } ], "setting": { "speed": { "channel": 1, "bytes": 0 }, "errorLimit": { "record": 100 }, "restore": { "maxRowNumForCheckpoint": 0, "isRestore": false, "restoreColumnName": "", "restoreColumnIndex": 0 } } } } ``` <a name="obMdk"></a> #### 2、多通道 ```json { "job": { "content": [ { "reader": { "name": "dmreader", "parameter": { "column": [ { "name": "ID", "type": "int" }, { "name": "AGE", "type": "int" } ], "splitPk": "ID", "increColumn": "", "startLocation": "", "username": "SYSDBA", "password": "SYSDBA", "connection": [ { "jdbcUrl": [ "jdbc:dm://localhost:5236" ], "table": [ "PERSON.STUDENT" ] } ], "where": "" } }, "writer": { "name": "streamwriter", "parameter": { "print": true } } } ], "setting": { "speed": { "channel": 3, "bytes": 0 }, "errorLimit": { "record": 100 }, "restore": { "maxRowNumForCheckpoint": 0, "isRestore": false, "restoreColumnName": "", "restoreColumnIndex": 0 } } } } ``` <a name="zuPBB"></a> #### 3、指定customSql ```json { "job": { "content": [ { "reader": { "name": "dmreader", "parameter": { "column": [ { "name": "ID", "type": "int" }, { "name": "AGE", "type": "int" } ], "increColumn": "", "startLocation": "", "customSql": "SELECT * FROM PERSON.STUDENT WHERE ID>30", "username": "SYSDBA", "password": "SYSDBA", "connection": [ { "jdbcUrl": [ "jdbc:dm://localhost:5236" ], "table": [ "PERSON.STUDENT" ] } ], "where": "" } }, "writer": { "name": "streamwriter", "parameter": { "print": true } } } ], "setting": { "speed": { "channel": 1, "bytes": 0 }, "errorLimit": { "record": 100 }, "restore": { "maxRowNumForCheckpoint": 0, "isRestore": false, "restoreColumnName": "", "restoreColumnIndex": 0 } } } } ``` <a name="KyWmu"></a> #### 4、增量同步startLocation ```json { "job": { "content": [ { "reader": { "name": "dmreader", "parameter": { "column": [ { "name": "ID", "type": "int" }, { "name": "AGE", "type": "int" } ], "increColumn": "ID", "startLocation": "20", "username": "SYSDBA", "password": "SYSDBA", "connection": [ { "jdbcUrl": [ "jdbc:dm://localhost:5236" ], "table": [ "PERSON.STUDENT" ] } ], "where": "" } }, "writer": { "name": "streamwriter", "parameter": { "print": true } } } ], "setting": { "speed": { "channel": 1, "bytes": 0 }, "errorLimit": { "record": 100 }, "restore": { "maxRowNumForCheckpoint": 0, "isRestore": false, "restoreColumnName": "", "restoreColumnIndex": 0 } } } } ``` <a name="auGyQ"></a> #### 5、间隔轮询 ```json { "job": { "content": [ { "reader": { "name": "dmreader", "parameter": { "column": [ { "name": "ID", "type": "int" }, { "name": "AGE", "type": "int" } ], "increColumn": "", "startLocation": "", "username": "SYSDBA", "password": "SYSDBA", "polling": true, "pollingInterval": 3000, "connection": [ { "jdbcUrl": [ "jdbc:dm://localhost:5236" ], "table": [ "PERSON.STUDENT" ] } ], "where": "" } }, "writer": { "name": "streamwriter", "parameter": { "print": true } } } ], "setting": { "speed": { "channel": 1, "bytes": 0 }, "errorLimit": { "record": 100 }, "restore": { "maxRowNumForCheckpoint": 0, "isRestore": false, "restoreColumnName": "", "restoreColumnIndex": 0 } } } } ```
{ "pile_set_name": "Github" }
/* ************************************************************************************* * Copyright 2013 Normation SAS ************************************************************************************* * * This file is part of Rudder. * * Rudder is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * In accordance with the terms of section 7 (7. Additional Terms.) of * the GNU General Public License version 3, the copyright holders add * the following Additional permissions: * Notwithstanding to the terms of section 5 (5. Conveying Modified Source * Versions) and 6 (6. Conveying Non-Source Forms.) of the GNU General * Public License version 3, when you create a Related Module, this * Related Module is not considered as a part of the work and may be * distributed under the license agreement of your choice. * A "Related Module" means a set of sources files including their * documentation that, without modification of the Source Code, enables * supplementary functions or services in addition to those offered by * the Software. * * Rudder is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with Rudder. If not, see <http://www.gnu.org/licenses/>. * ************************************************************************************* */ package bootstrap.liftweb package checks import java.io.File import com.normation.eventlog.ModificationId import com.normation.rudder.batch.{AsyncDeploymentActor, AutomaticStartDeployment} import com.normation.rudder.domain.eventlog.RudderEventActor import com.normation.utils.StringUuidGenerator /** * Check at webapp startup if a policy update was requested when webapp was stopped * If flag file is present then start a new policy update * This needs to be achieved after all tasks that could modify configuration */ class TriggerPolicyUpdate( asyncGeneration : AsyncDeploymentActor , uuidGen : StringUuidGenerator ) extends BootstrapChecks { override val description = "Trigger policy update if it was requested during shutdown" override def checks() : Unit = { val filePath = asyncGeneration.triggerPolicyUpdateFlagPath // Check if the flag file is present, and start a new policy update if needed val file = new File(filePath) try { if (file.exists) { // File exists, update policies BootstrapLogger.logEffect.info(s"Flag file '${filePath}' found, Start a new policy update now") asyncGeneration ! AutomaticStartDeployment(ModificationId(uuidGen.newUuid), RudderEventActor) } else { BootstrapLogger.logEffect.debug(s"Flag file '${filePath}' does not exist, No need to start a new policy update") } } catch { // Exception while checking the file existence case e : Exception => BootstrapLogger.logEffect.error(s"An error occurred while accessing flag file '${filePath}', cause is: ${e.getMessage}") } } }
{ "pile_set_name": "Github" }
/****************************************************************************** * * Copyright(c) 2009-2012 Realtek Corporation. * * This program is free software; you can redistribute it and/or modify it * under the terms of version 2 of the GNU General Public License as * published by the Free Software Foundation. * * This program is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for * more details. * * The full GNU General Public License is included in this distribution in the * file called LICENSE. * * Contact Information: * wlanfae <[email protected]> * Realtek Corporation, No. 2, Innovation Road II, Hsinchu Science Park, * Hsinchu 300, Taiwan. * * Larry Finger <[email protected]> * *****************************************************************************/ #ifndef __RTL92DE_SW_H__ #define __RTL92DE_SW_H__ extern spinlock_t globalmutex_power; extern spinlock_t globalmutex_for_fwdownload; extern spinlock_t globalmutex_for_power_and_efuse; #endif
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="utf-8"?><!-- ~ Copyright 2019 Google LLC ~ ~ Licensed under the Apache License, Version 2.0 (the "License"); ~ you may not use this file except in compliance with the License. ~ You may obtain a copy of the License at ~ ~ https://www.apache.org/licenses/LICENSE-2.0 ~ ~ Unless required by applicable law or agreed to in writing, software ~ distributed under the License is distributed on an "AS IS" BASIS, ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ~ See the License for the specific language governing permissions and ~ limitations under the License. --> <layout xmlns:android="http://schemas.android.com/apk/res/android"> <androidx.recyclerview.widget.RecyclerView android:id="@+id/questions" android:layout_width="match_parent" android:layout_height="match_parent" /> </layout>
{ "pile_set_name": "Github" }
[ { "category": "``apigateway``", "description": "Update apigateway command to latest version", "type": "api-change" }, { "category": "``ssm``", "description": "Update ssm command to latest version", "type": "api-change" }, { "category": "``apigatewayv2``", "description": "Update apigatewayv2 command to latest version", "type": "api-change" }, { "category": "``elbv2``", "description": "Update elbv2 command to latest version", "type": "api-change" }, { "category": "``application-insights``", "description": "Update application-insights command to latest version", "type": "api-change" }, { "category": "``fsx``", "description": "Update fsx command to latest version", "type": "api-change" }, { "category": "``service-quotas``", "description": "Update service-quotas command to latest version", "type": "api-change" }, { "category": "``resourcegroupstaggingapi``", "description": "Update resourcegroupstaggingapi command to latest version", "type": "api-change" }, { "category": "``securityhub``", "description": "Update securityhub command to latest version", "type": "api-change" } ]
{ "pile_set_name": "Github" }
/* GNU Ocrad - Optical Character Recognition program Copyright (C) 2003-2015 Antonio Diaz Diaz. This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. */ #include <climits> #include <cstdlib> #include <vector> #include "rectangle.h" #include "segment.h" #include "mask.h" int Mask::left( const int row ) const { if( top() <= row && row <= bottom() && data[row-top()].valid() ) return data[row-top()].left; return -1; } int Mask::right( const int row ) const { if( top() <= row && row <= bottom() && data[row-top()].valid() ) return data[row-top()].right; return -1; } void Mask::top( const int t ) { if( t == top() ) return; if( t < top() ) data.insert( data.begin(), top() - t, Csegment() ); else data.erase( data.begin(), data.begin() + ( t - top() ) ); Rectangle::top( t ); } void Mask::bottom( const int b ) { if( b != bottom() ) { Rectangle::bottom( b ); data.resize( height() ); } } void Mask::add_mask( const Mask & m ) { if( m.top() < top() ) top( m.top() ); if( m.bottom() > bottom() ) bottom( m.bottom() ); for( int i = m.top(); i <= m.bottom(); ++i ) { Csegment & seg = data[i-top()]; seg.add_csegment( m.data[i-m.top()] ); if( seg.left < left() ) left( seg.left ); if( seg.right > right() ) right( seg.right ); } } void Mask::add_point( const int row, const int col ) { if( row < top() ) top( row ); else if( row > bottom() ) bottom( row ); data[row-top()].add_point( col ); if( col < left() ) left( col ); else if( col > right() ) right( col ); } void Mask::add_rectangle( const Rectangle & re ) { if( re.top() < top() ) top( re.top() ); if( re.bottom() > bottom() ) bottom( re.bottom() ); const Csegment rseg( re.left(), re.right() ); for( int i = re.top(); i <= re.bottom(); ++i ) { Csegment & seg = data[i-top()]; seg.add_csegment( rseg ); if( seg.left < left() ) left( seg.left ); if( seg.right > right() ) right( seg.right ); } } bool Mask::includes( const Rectangle & re ) const { if( re.top() < top() || re.bottom() > bottom() ) return false; const Csegment seg( re.left(), re.right() ); for( int i = re.top(); i <= re.bottom(); ++i ) if( !data[i-top()].includes( seg ) ) return false; return true; } bool Mask::includes( const int row, const int col ) const { return ( row >= top() && row <= bottom() && data[row-top()].includes( col ) ); } int Mask::distance( const Rectangle & re ) const { const Csegment seg( re.left(), re.right() ); int mindist = INT_MAX; for( int i = bottom(); i >= top(); --i ) { const int vd = re.v_distance( i ); if( vd >= mindist ) { if( i < re.top() ) break; else continue; } const int hd = data[i-top()].distance( seg ); if( hd >= mindist ) continue; const int d = Rectangle::hypoti( hd, vd ); if( d < mindist ) mindist = d; } return mindist; } int Mask::distance( const int row, const int col ) const { int mindist = INT_MAX; for( int i = bottom(); i >= top(); --i ) { const int vd = std::abs( i - row ); if( vd >= mindist ) { if( i < row ) break; else continue; } const int hd = data[i-top()].distance( col ); if( hd >= mindist ) continue; const int d = Rectangle::hypoti( hd, vd ); if( d < mindist ) mindist = d; } return mindist; }
{ "pile_set_name": "Github" }
""" Base class with plot generating commands. Does not define any special non-GMT methods (savefig, show, etc). """ import contextlib import numpy as np import pandas as pd from .clib import Session from .exceptions import GMTError, GMTInvalidInput from .helpers import ( build_arg_string, dummy_context, data_kind, fmt_docstring, use_alias, kwargs_to_strings, ) class BasePlotting: """ Base class for Figure and Subplot. Defines the plot generating methods and a hook for subclasses to insert special arguments (the _preprocess method). """ def _preprocess(self, **kwargs): # pylint: disable=no-self-use """ Make any changes to kwargs or required actions before plotting. This method is run before all plotting commands and can be used to insert special arguments into the kwargs or make any actions that are required before ``call_module``. For example, the :class:`pygmt.Figure` needs this to tell the GMT modules to plot to a specific figure. This is a dummy method that does nothing. Returns ------- kwargs : dict The same input kwargs dictionary. Examples -------- >>> base = BasePlotting() >>> base._preprocess(resolution='low') {'resolution': 'low'} """ return kwargs @fmt_docstring @use_alias( R="region", J="projection", A="area_thresh", B="frame", D="resolution", I="rivers", L="map_scale", N="borders", W="shorelines", G="land", S="water", U="timestamp", X="xshift", Y="yshift", p="perspective", t="transparency", ) @kwargs_to_strings(R="sequence", p="sequence") def coast(self, **kwargs): """ Plot continents, shorelines, rivers, and borders on maps Plots grayshaded, colored, or textured land-masses [or water-masses] on maps and [optionally] draws coastlines, rivers, and political boundaries. Alternatively, it can (1) issue clip paths that will contain all land or all water areas, or (2) dump the data to an ASCII table. The data files come in 5 different resolutions: (**f**)ull, (**h**)igh, (**i**)ntermediate, (**l**)ow, and (**c**)rude. The full resolution files amount to more than 55 Mb of data and provide great detail; for maps of larger geographical extent it is more economical to use one of the other resolutions. If the user selects to paint the land-areas and does not specify fill of water-areas then the latter will be transparent (i.e., earlier graphics drawn in those areas will not be overwritten). Likewise, if the water-areas are painted and no land fill is set then the land-areas will be transparent. A map projection must be supplied. Full option list at :gmt-docs:`coast.html` {aliases} Parameters ---------- {J} {R} area_thresh : int, float, or str ``'min_area[/min_level/max_level][+ag|i|s|S][+r|l][+ppercent]'`` Features with an area smaller than min_area in km^2 or of hierarchical level that is lower than min_level or higher than max_level will not be plotted. {B} C : str Set the shade, color, or pattern for lakes and river-lakes. resolution : str Selects the resolution of the data set to use ((f)ull, (h)igh, (i)ntermediate, (l)ow, and (c)rude). land : str Select filling or clipping of “dry” areas. rivers : str ``'river[/pen]'`` Draw rivers. Specify the type of rivers and [optionally] append pen attributes. map_scale : str ``'[g|j|J|n|x]refpoint'`` Draws a simple map scale centered on the reference point specified. borders : str ``'border[/pen]'`` Draw political boundaries. Specify the type of boundary and [optionally] append pen attributes water : str Select filling or clipping of “wet” areas. {U} shorelines : str ``'[level/]pen'`` Draw shorelines [Default is no shorelines]. Append pen attributes. {XY} {p} {t} """ kwargs = self._preprocess(**kwargs) with Session() as lib: lib.call_module("coast", build_arg_string(kwargs)) @fmt_docstring @use_alias( R="region", J="projection", B="frame", C="cmap", D="position", F="box", G="truncate", W="scale", X="xshift", Y="yshift", p="perspective", t="transparency", ) @kwargs_to_strings(R="sequence", G="sequence", p="sequence") def colorbar(self, **kwargs): """ Plot a gray or color scale-bar on maps. Both horizontal and vertical scales are supported. For CPTs with gradational colors (i.e., the lower and upper boundary of an interval have different colors) we will interpolate to give a continuous scale. Variations in intensity due to shading/illumination may be displayed by setting the option -I. Colors may be spaced according to a linear scale, all be equal size, or by providing a file with individual tile widths. Full option list at :gmt-docs:`colorbar.html` {aliases} Parameters ---------- position : str ``[g|j|J|n|x]refpoint[+wlength[/width]][+e[b|f][length]][+h|v] [+jjustify][+m[a|c|l|u]][+n[txt]][+odx[/dy]]``. Defines the reference point on the map for the color scale using one of four coordinate systems: (1) Use *g* for map (user) coordinates, (2) use *j* or *J* for setting refpoint via a 2-char justification code that refers to the (invisible) map domain rectangle, (3) use *n* for normalized (0-1) coordinates, or (4) use *x* for plot coordinates (inches, cm, etc.). All but *x* requires both *region* and *projection* to be specified. Append +w followed by the length and width of the color bar. If width is not specified then it is set to 4% of the given length. Give a negative length to reverse the scale bar. Append +h to get a horizontal scale [Default is vertical (+v)]. By default, the anchor point on the scale is assumed to be the bottom left corner (BL), but this can be changed by appending +j followed by a 2-char justification code *justify*. box : bool or str ``[+cclearances][+gfill][+i[[gap/]pen]][+p[pen]][+r[radius]] [+s[[dx/dy/][shade]]]``. If set to True, draws a rectangular border around the color scale. Alternatively, specify a different pen with +ppen. Add +gfill to fill the scale panel [no fill]. Append +cclearance where clearance is either gap, xgap/ygap, or lgap/rgap/bgap/tgap where these items are uniform, separate in x- and y-direction, or individual side spacings between scale and border. Append +i to draw a secondary, inner border as well. We use a uniform gap between borders of 2p and the MAP_DEFAULTS_PEN unless other values are specified. Append +r to draw rounded rectangular borders instead, with a 6p corner radius. You can override this radius by appending another value. Finally, append +s to draw an offset background shaded region. Here, dx/dy indicates the shift relative to the foreground frame [4p/-4p] and shade sets the fill style to use for shading [gray50]. truncate : list or str ``zlo/zhi`` Truncate the incoming CPT so that the lowest and highest z-levels are to zlo and zhi. If one of these equal NaN then we leave that end of the CPT alone. The truncation takes place before the plotting. scale : float Multiply all z-values in the CPT by the provided scale. By default the CPT is used as is. {XY} {p} {t} """ kwargs = self._preprocess(**kwargs) with Session() as lib: lib.call_module("colorbar", build_arg_string(kwargs)) @fmt_docstring @use_alias( A="annotation", B="frame", C="interval", G="label_placement", J="projection", L="limit", Q="cut", R="region", S="resample", U="timestamp", W="pen", l="label", X="xshift", Y="yshift", p="perspective", t="transparency", ) @kwargs_to_strings(R="sequence", L="sequence", A="sequence_plus", p="sequence") def grdcontour(self, grid, **kwargs): """ Convert grids or images to contours and plot them on maps Takes a grid file name or an xarray.DataArray object as input. Full option list at :gmt-docs:`grdcontour.html` {aliases} Parameters ---------- grid : str or xarray.DataArray The file name of the input grid or the grid loaded as a DataArray. interval : str or int Specify the contour lines to generate. - The filename of a `CPT` file where the color boundaries will be used as contour levels. - The filename of a 2 (or 3) column file containing the contour levels (col 1), (C)ontour or (A)nnotate (col 2), and optional angle (col 3) - A fixed contour interval ``cont_int`` or a single contour with ``+[cont_int]`` annotation : str, int, or list Specify or disable annotated contour levels, modifies annotated contours specified in ``-C``. - Specify a fixed annotation interval ``annot_int`` or a single annotation level ``+[annot_int]`` - Disable all annotation with ``'-'`` - Optional label modifiers can be specified as a single string ``'[annot_int]+e'`` or with a list of options ``([annot_int], 'e', 'f10p', 'gred')``. limit : str or list of 2 ints Do no draw contours below `low` or above `high`, specify as string ``'[low]/[high]'`` or list ``[low,high]``. cut : str or int Do not draw contours with less than `cut` number of points. resample : str or int Resample smoothing factor. {J} {R} {B} {G} {U} {W} {XY} label : str Add a legend entry for the contour being plotted. Normally, the annotated contour is selected for the legend. You can select the regular contour instead, or both of them, by considering the label to be of the format [*annotcontlabel*][/*contlabel*]. If either label contains a slash (/) character then use ``|`` as the separator for the two labels instead. {p} {t} """ kwargs = self._preprocess(**kwargs) kind = data_kind(grid, None, None) with Session() as lib: if kind == "file": file_context = dummy_context(grid) elif kind == "grid": file_context = lib.virtualfile_from_grid(grid) else: raise GMTInvalidInput("Unrecognized data type: {}".format(type(grid))) with file_context as fname: arg_str = " ".join([fname, build_arg_string(kwargs)]) lib.call_module("grdcontour", arg_str) @fmt_docstring @use_alias( A="img_out", B="frame", C="cmap", D="img_in", E="dpi", G="bit_color", I="shading", J="projection", M="monochrome", N="no_clip", Q="nan_transparent", R="region", U="timestamp", V="verbose", X="xshift", Y="yshift", n="interpolation", p="perspective", t="transparency", x="cores", ) @kwargs_to_strings(R="sequence", p="sequence") def grdimage(self, grid, **kwargs): """ Project and plot grids or images. Reads a 2-D grid file and produces a gray-shaded (or colored) map by building a rectangular image and assigning pixels a gray-shade (or color) based on the z-value and the CPT file. Optionally, illumination may be added by providing a file with intensities in the (-1,+1) range or instructions to derive intensities from the input data grid. Values outside this range will be clipped. Such intensity files can be created from the grid using `grdgradient` and, optionally, modified by `grdmath` or `grdhisteq`. If GMT is built with GDAL support, *grid* can be an image file (geo-referenced or not). In this case the image can optionally be illuminated with the file provided via the *shading* option. Here, if image has no coordinates then those of the intensity file will be used. When using map projections, the grid is first resampled on a new rectangular grid with the same dimensions. Higher resolution images can be obtained by using the *dpi* option. To obtain the resampled value (and hence shade or color) of each map pixel, its location is inversely projected back onto the input grid after which a value is interpolated between the surrounding input grid values. By default bi-cubic interpolation is used. Aliasing is avoided by also forward projecting the input grid nodes. If two or more nodes are projected onto the same pixel, their average will dominate in the calculation of the pixel value. Interpolation and aliasing is controlled with the *interpolation* option. The *region* option can be used to select a map region larger or smaller than that implied by the extent of the grid. Full option list at :gmt-docs:`grdimage.html` {aliases} Parameters ---------- grid : str or xarray.DataArray The file name or a DataArray containing the input 2-D gridded data set or image to be plotted (See GRID FILE FORMATS at :gmt-docs:`grdimage.html#grid-file-formats`). img_out : str ``out_img[=driver]``. Save an image in a raster format instead of PostScript. Use extension .ppm for a Portable Pixel Map format which is the only raster format GMT can natively write. For GMT installations configured with GDAL support there are more choices: Append *out_img* to select the image file name and extension. If the extension is one of .bmp, .gif, .jpg, .png, or .tif then no driver information is required. For other output formats you must append the required GDAL driver. The *driver* is the driver code name used by GDAL; see your GDAL installation's documentation for available drivers. Append a **+c**\\ *options* string where options is a list of one or more concatenated number of GDAL **-co** options. For example, to write a GeoPDF with the TerraGo format use ``=PDF+cGEO_ENCODING=OGC_BP``. Notes: (1) If a tiff file (.tif) is selected then we will write a GeoTiff image if the GMT projection syntax translates into a PROJ syntax, otherwise a plain tiff file is produced. (2) Any vector elements will be lost. {B} {CPT} img_in : str ``[r]`` GMT will automatically detect standard image files (Geotiff, TIFF, JPG, PNG, GIF, etc.) and will read those via GDAL. For very obscure image formats you may need to explicitly set *img_in*, which specifies that the grid is in fact an image file to be read via GDAL. Append **r** to assign the region specified by *region* to the image. For example, if you have used ``region='d'`` then the image will be assigned a global domain. This mode allows you to project a raw image (an image without referencing coordinates). dpi : int ``[i|dpi]``. Sets the resolution of the projected grid that will be created if a map projection other than Linear or Mercator was selected [100]. By default, the projected grid will be of the same size (rows and columns) as the input file. Specify **i** to use the PostScript image operator to interpolate the image at the device resolution. bit_color : str ``color[+b|f]``. This option only applies when a resulting 1-bit image otherwise would consist of only two colors: black (0) and white (255). If so, this option will instead use the image as a transparent mask and paint the mask with the given color. Append **+b** to paint the background pixels (1) or **+f** for the foreground pixels [Default]. shading : str ``[intensfile|intensity|modifiers]``. Give the name of a grid file with intensities in the (-1,+1) range, or a constant intensity to apply everywhere (affects the ambient light). Alternatively, derive an intensity grid from the input data grid via a call to `grdgradient`; append **+a**\\ *azimuth*, **+n**\\ *args*, and **+m**\\ *ambient* to specify azimuth, intensity, and ambient arguments for that module, or just give **+d** to select the default arguments (``+a-45+nt1+m0``). If you want a more specific intensity scenario then run `grdgradient` separately first. If we should derive intensities from another file than grid, specify the file with suitable modifiers [Default is no illumination]. {J} monochrome : bool Force conversion to monochrome image using the (television) YIQ transformation. Cannot be used with *nan_transparent*. no_clip : bool Do not clip the image at the map boundary (only relevant for non-rectangular maps). nan_transparent : bool Make grid nodes with z = NaN transparent, using the color-masking feature in PostScript Level 3 (the PS device must support PS Level 3). {R} {V} {XY} {n} {p} {t} {x} """ kwargs = self._preprocess(**kwargs) kind = data_kind(grid, None, None) with Session() as lib: if kind == "file": file_context = dummy_context(grid) elif kind == "grid": file_context = lib.virtualfile_from_grid(grid) else: raise GMTInvalidInput("Unrecognized data type: {}".format(type(grid))) with file_context as fname: arg_str = " ".join([fname, build_arg_string(kwargs)]) lib.call_module("grdimage", arg_str) @fmt_docstring @use_alias( R="region", J="projection", Jz="zscale", JZ="zsize", B="frame", C="cmap", G="drapegrid", N="plane", Q="surftype", Wc="contourpen", Wm="meshpen", Wf="facadepen", I="shading", X="xshift", Y="yshift", p="perspective", t="transparency", ) @kwargs_to_strings(R="sequence", p="sequence") def grdview(self, grid, **kwargs): """ Create 3-D perspective image or surface mesh from a grid. Reads a 2-D grid file and produces a 3-D perspective plot by drawing a mesh, painting a colored/gray-shaded surface made up of polygons, or by scanline conversion of these polygons to a raster image. Options include draping a data set on top of a surface, plotting of contours on top of the surface, and apply artificial illumination based on intensities provided in a separate grid file. Full option list at :gmt-docs:`grdview.html` {aliases} Parameters ---------- grid : str or xarray.DataArray The file name of the input relief grid or the grid loaded as a DataArray. zscale/zsize : float or str Set z-axis scaling or z-axis size. cmap : str The name of the color palette table to use. drapegrid : str or xarray.DataArray The file name or a DataArray of the image grid to be draped on top of the relief provided by grid. [Default determines colors from grid]. Note that -Jz and -N always refers to the grid. The drapegrid only provides the information pertaining to colors, which (if drapegrid is a grid) will be looked-up via the CPT (see -C). plane : float or str ``level[+gfill]``. Draws a plane at this z-level. If the optional color is provided via the +g modifier, and the projection is not oblique, the frontal facade between the plane and the data perimeter is colored. surftype : str Specifies cover type of the grid. Select one of following settings: 1. 'm' for mesh plot [Default]. 2. 'mx' or 'my' for waterfall plots (row or column profiles). 3. 's' for surface plot. 4. 'i' for image plot. 5. 'c'. Same as 'i' but will make nodes with z = NaN transparent. For any of these choices, you may force a monochrome image by appending the modifier +m. contourpen : str Draw contour lines on top of surface or mesh (not image). Append pen attributes used for the contours. meshpen : str Sets the pen attributes used for the mesh. You must also select -Qm or -Qsm for meshlines to be drawn. facadepen :str Sets the pen attributes used for the facade. You must also select -N for the facade outline to be drawn. shading : str Provide the name of a grid file with intensities in the (-1,+1) range, or a constant intensity to apply everywhere (affects the ambient light). Alternatively, derive an intensity grid from the input data grid reliefgrid via a call to ``grdgradient``; append ``+aazimuth``, ``+nargs``, and ``+mambient`` to specify azimuth, intensity, and ambient arguments for that module, or just give ``+d`` to select the default arguments (``+a-45+nt1+m0``). {XY} {p} {t} """ kwargs = self._preprocess(**kwargs) kind = data_kind(grid, None, None) with Session() as lib: if kind == "file": file_context = dummy_context(grid) elif kind == "grid": file_context = lib.virtualfile_from_grid(grid) else: raise GMTInvalidInput(f"Unrecognized data type for grid: {type(grid)}") with contextlib.ExitStack() as stack: fname = stack.enter_context(file_context) if "G" in kwargs: drapegrid = kwargs["G"] if data_kind(drapegrid) in ("file", "grid"): if data_kind(drapegrid) == "grid": drape_context = lib.virtualfile_from_grid(drapegrid) drapefile = stack.enter_context(drape_context) kwargs["G"] = drapefile else: raise GMTInvalidInput( f"Unrecognized data type for drapegrid: {type(drapegrid)}" ) arg_str = " ".join([fname, build_arg_string(kwargs)]) lib.call_module("grdview", arg_str) @fmt_docstring @use_alias( R="region", J="projection", B="frame", S="style", G="color", W="pen", i="columns", l="label", C="cmap", U="timestamp", X="xshift", Y="yshift", p="perspective", t="transparency", ) @kwargs_to_strings(R="sequence", i="sequence_comma", p="sequence") def plot(self, x=None, y=None, data=None, sizes=None, direction=None, **kwargs): """ Plot lines, polygons, and symbols on maps. Used to be psxy. Takes a matrix, (x,y) pairs, or a file name as input and plots lines, polygons, or symbols at those locations on a map. Must provide either *data* or *x* and *y*. If providing data through *x* and *y*, *color* (G) can be a 1d array that will be mapped to a colormap. If a symbol is selected and no symbol size given, then psxy will interpret the third column of the input data as symbol size. Symbols whose size is <= 0 are skipped. If no symbols are specified then the symbol code (see *S* below) must be present as last column in the input. If *S* is not used, a line connecting the data points will be drawn instead. To explicitly close polygons, use *L*. Select a fill with *G*. If *G* is set, *W* will control whether the polygon outline is drawn or not. If a symbol is selected, *G* and *W* determines the fill and outline/no outline, respectively. Full option list at :gmt-docs:`plot.html` {aliases} Parameters ---------- x/y : float or 1d arrays The x and y coordinates, or arrays of x and y coordinates of the data points data : str or 2d array Either a data file name or a 2d numpy array with the tabular data. Use option *columns* (i) to choose which columns are x, y, color, and size, respectively. sizes : 1d array The sizes of the data points in units specified in *style* (S). Only valid if using *x* and *y*. direction : list of two 1d arrays If plotting vectors (using ``style='V'`` or ``style='v'``), then should be a list of two 1d arrays with the vector directions. These can be angle and length, azimuth and length, or x and y components, depending on the style options chosen. {J} {R} A : bool or str ``'[m|p|x|y]'`` By default, geographic line segments are drawn as great circle arcs. To draw them as straight lines, use *A*. {B} {CPT} D : str ``'dx/dy'``: Offset the plot symbol or line locations by the given amounts dx/dy. E : bool or str ``'[x|y|X|Y][+a][+cl|f][+n][+wcap][+ppen]'``. Draw symmetrical error bars. {G} style : str Plot symbols (including vectors, pie slices, fronts, decorated or quoted lines). {W} {U} {XY} label : str Add a legend entry for the symbol or line being plotted. {p} {t} """ kwargs = self._preprocess(**kwargs) kind = data_kind(data, x, y) extra_arrays = [] if "S" in kwargs and kwargs["S"][0] in "vV" and direction is not None: extra_arrays.extend(direction) if "G" in kwargs and not isinstance(kwargs["G"], str): if kind != "vectors": raise GMTInvalidInput( "Can't use arrays for color if data is matrix or file." ) extra_arrays.append(kwargs["G"]) del kwargs["G"] if sizes is not None: if kind != "vectors": raise GMTInvalidInput( "Can't use arrays for sizes if data is matrix or file." ) extra_arrays.append(sizes) with Session() as lib: # Choose how data will be passed in to the module if kind == "file": file_context = dummy_context(data) elif kind == "matrix": file_context = lib.virtualfile_from_matrix(data) elif kind == "vectors": file_context = lib.virtualfile_from_vectors( np.atleast_1d(x), np.atleast_1d(y), *extra_arrays ) with file_context as fname: arg_str = " ".join([fname, build_arg_string(kwargs)]) lib.call_module("plot", arg_str) @fmt_docstring @use_alias( R="region", J="projection", B="frame", S="skip", G="label_placement", W="pen", L="triangular_mesh_pen", i="columns", l="label", C="levels", X="xshift", Y="yshift", p="perspective", t="transparency", ) @kwargs_to_strings(R="sequence", i="sequence_comma", p="sequence") def contour(self, x=None, y=None, z=None, data=None, **kwargs): """ Contour table data by direct triangulation. Takes a matrix, (x,y,z) pairs, or a file name as input and plots lines, polygons, or symbols at those locations on a map. Must provide either *data* or *x*, *y*, and *z*. [TODO: Insert more documentation] Full option list at :gmt-docs:`contour.html` {aliases} Parameters ---------- x/y/z : 1d arrays Arrays of x and y coordinates and values z of the data points. data : str or 2d array Either a data file name or a 2d numpy array with the tabular data. {J} {R} A : bool or str ``'[m|p|x|y]'`` By default, geographic line segments are drawn as great circle arcs. To draw them as straight lines, use *A*. {B} levels : str Contour file or level(s) D : str Dump contour coordinates E : str Network information label_placement : str Placement of labels I : bool Color the triangles using CPT triangular_mesh_pen : str Pen to draw the underlying triangulation (default none) N : bool Do not clip contours Q : float or str Do not draw contours with less than cut number of points. ``'[cut[unit]][+z]'`` skip : bool or str Skip input points outside region ``'[p|t]'`` {W} label : str Add a legend entry for the contour being plotted. Normally, the annotated contour is selected for the legend. You can select the regular contour instead, or both of them, by considering the label to be of the format [*annotcontlabel*][/*contlabel*]. If either label contains a slash (/) character then use ``|`` as the separator for the two labels instead. {XY} {p} {t} """ kwargs = self._preprocess(**kwargs) kind = data_kind(data, x, y, z) if kind == "vectors" and z is None: raise GMTInvalidInput("Must provided both x, y, and z.") with Session() as lib: # Choose how data will be passed in to the module if kind == "file": file_context = dummy_context(data) elif kind == "matrix": file_context = lib.virtualfile_from_matrix(data) elif kind == "vectors": file_context = lib.virtualfile_from_vectors(x, y, z) with file_context as fname: arg_str = " ".join([fname, build_arg_string(kwargs)]) lib.call_module("contour", arg_str) @fmt_docstring @use_alias( R="region", J="projection", B="frame", L="map_scale", Td="rose", Tm="compass", U="timestamp", X="xshift", Y="yshift", p="perspective", t="transparency", ) @kwargs_to_strings(R="sequence", p="sequence") def basemap(self, **kwargs): """ Produce a basemap for the figure. Several map projections are available, and the user may specify separate tick-mark intervals for boundary annotation, ticking, and [optionally] gridlines. A simple map scale or directional rose may also be plotted. At least one of the options *frame*, *map_scale*, *rose* or *compass* must be specified. Full option list at :gmt-docs:`basemap.html` {aliases} Parameters ---------- {J} {R} {B} map_scale : str ``'[g|j|J|n|x]refpoint'`` Draws a simple map scale centered on the reference point specified. rose : str Draws a map directional rose on the map at the location defined by the reference and anchor points. compass : str Draws a map magnetic rose on the map at the location defined by the reference and anchor points {U} {XY} {p} {t} """ kwargs = self._preprocess(**kwargs) if not ("B" in kwargs or "L" in kwargs or "T" in kwargs): raise GMTInvalidInput("At least one of B, L, or T must be specified.") with Session() as lib: lib.call_module("basemap", build_arg_string(kwargs)) @fmt_docstring @use_alias( R="region", J="projection", U="timestamp", D="position", F="box", X="xshift", Y="yshift", p="perspective", t="transparency", ) @kwargs_to_strings(R="sequence", p="sequence") def logo(self, **kwargs): """ Place the GMT graphics logo on a map. By default, the GMT logo is 2 inches wide and 1 inch high and will be positioned relative to the current plot origin. Use various options to change this and to place a transparent or opaque rectangular map panel behind the GMT logo. Full option list at :gmt-docs:`logo.html` {aliases} Parameters ---------- {J} {R} position : str ``'[g|j|J|n|x]refpoint+wwidth[+jjustify][+odx[/dy]]'``. Sets reference point on the map for the image. box : bool or str Without further options, draws a rectangular border around the GMT logo. {U} {XY} {p} {t} """ kwargs = self._preprocess(**kwargs) if "D" not in kwargs: raise GMTInvalidInput("Option D must be specified.") with Session() as lib: lib.call_module("logo", build_arg_string(kwargs)) @fmt_docstring @use_alias( R="region", J="projection", D="position", F="box", M="monochrome", X="xshift", Y="yshift", p="perspective", t="transparency", ) @kwargs_to_strings(R="sequence", p="sequence") def image(self, imagefile, **kwargs): """ Place images or EPS files on maps. Reads an Encapsulated PostScript file or a raster image file and plots it on a map. Full option list at :gmt-docs:`image.html` {aliases} Parameters ---------- imagefile : str This must be an Encapsulated PostScript (EPS) file or a raster image. An EPS file must contain an appropriate BoundingBox. A raster file can have a depth of 1, 8, 24, or 32 bits and is read via GDAL. Note: If GDAL was not configured during GMT installation then only EPS files are supported. {J} {R} position : str ``'[g|j|J|n|x]refpoint+rdpi+w[-]width[/height][+jjustify] [+nnx[/ny]][+odx[/dy]]'`` Sets reference point on the map for the image. box : bool or str ``'[+cclearances][+gfill][+i[[gap/]pen]][+p[pen]][+r[radius]] [+s[[dx/dy/][shade]]]'`` Without further options, draws a rectangular border around the image using **MAP_FRAME_PEN**. monochrome : bool Convert color image to monochrome grayshades using the (television) YIQ-transformation. {XY} {p} {t} """ kwargs = self._preprocess(**kwargs) with Session() as lib: arg_str = " ".join([imagefile, build_arg_string(kwargs)]) lib.call_module("image", arg_str) @fmt_docstring @use_alias( R="region", J="projection", D="position", F="box", X="xshift", Y="yshift", p="perspective", t="transparency", ) @kwargs_to_strings(R="sequence", p="sequence") def legend(self, spec=None, position="JTR+jTR+o0.2c", box="+gwhite+p1p", **kwargs): """ Plot legends on maps. Makes legends that can be overlaid on maps. Reads specific legend-related information from an input file, or automatically creates legend entries from plotted symbols that have labels. Unless otherwise noted, annotations will be made using the primary annotation font and size in effect (i.e., FONT_ANNOT_PRIMARY). Full option list at :gmt-docs:`legend.html` {aliases} Parameters ---------- spec : None or str Either None (default) for using the automatically generated legend specification file, or a filename pointing to the legend specification file. {J} {R} position : str ``'[g|j|J|n|x]refpoint+wwidth[/height][+jjustify][+lspacing] [+odx[/dy]]'`` Defines the reference point on the map for the legend. By default, uses 'JTR+jTR+o0.2c' which places the legend at the top-right corner inside the map frame, with a 0.2 cm offset. box : bool or str ``'[+cclearances][+gfill][+i[[gap/]pen]][+p[pen]][+r[radius]] [+s[[dx/dy/][shade]]]'`` Without further options, draws a rectangular border around the legend using **MAP_FRAME_PEN**. By default, uses '+gwhite+p1p' which draws a box around the legend using a 1 point black pen and adds a white background. {XY} {p} {t} """ kwargs = self._preprocess(**kwargs) if "D" not in kwargs: kwargs["D"] = position if "F" not in kwargs: kwargs["F"] = box with Session() as lib: if spec is None: specfile = "" elif data_kind(spec) == "file": specfile = spec else: raise GMTInvalidInput("Unrecognized data type: {}".format(type(spec))) arg_str = " ".join([specfile, build_arg_string(kwargs)]) lib.call_module("legend", arg_str) @fmt_docstring @use_alias( R="region", J="projection", B="frame", C="clearance", D="offset", G="fill", W="pen", X="xshift", Y="yshift", p="perspective", t="transparency", ) @kwargs_to_strings( R="sequence", textfiles="sequence_space", angle="sequence_comma", font="sequence_comma", justify="sequence_comma", p="sequence", ) def text( self, textfiles=None, x=None, y=None, position=None, text=None, angle=None, font=None, justify=None, **kwargs, ): """ Plot or typeset text strings of variable size, font type, and orientation. Must provide at least one of the following combinations as input: - *textfiles* - *x*, *y*, and *text* - *position* and *text* Full option list at :gmt-docs:`text.html` {aliases} Parameters ---------- textfiles : str or list A text data file name, or a list of filenames containing 1 or more records with (x, y[, angle, font, justify], text). x/y : float or 1d arrays The x and y coordinates, or an array of x and y coordinates to plot the text position : str Sets reference point on the map for the text by using x,y coordinates extracted from *region* instead of providing them through *x* and *y*. Specify with a two letter (order independent) code, chosen from: * Horizontal: L(eft), C(entre), R(ight) * Vertical: T(op), M(iddle), B(ottom) For example, position="TL" plots the text at the Upper Left corner of the map. text : str or 1d array The text string, or an array of strings to plot on the figure angle: int, float, str or bool Set the angle measured in degrees counter-clockwise from horizontal. E.g. 30 sets the text at 30 degrees. If no angle is explicitly given (i.e. angle=True) then the input textfile(s) must have this as a column. font : str or bool Set the font specification with format "size,font,color" where size is text size in points, font is the font to use, and color sets the font color. E.g. "12p,Helvetica-Bold,red" selects a 12p red Helvetica-Bold font. If no font info is explicitly given (i.e. font=True), then the input textfile(s) must have this information in one of its columns. justify : str or bool Set the alignment which refers to the part of the text string that will be mapped onto the (x,y) point. Choose a 2 character combination of L, C, R (for left, center, or right) and T, M, B for top, middle, or bottom. E.g., BL for lower left. If no justification is explicitly given (i.e. justify=True), then the input textfile(s) must have this as a column. {J} {R} clearance : str ``[dx/dy][+to|O|c|C]`` Adjust the clearance between the text and the surrounding box [15%]. Only used if *pen* or *fill* are specified. Append the unit you want ('c' for cm, 'i' for inch, or 'p' for point; if not given we consult 'PROJ_LENGTH_UNIT') or '%' for a percentage of the font size. Optionally, use modifier '+t' to set the shape of the textbox when using *fill* and/or *pen*. Append lower case 'o' to get a straight rectangle [Default]. Append upper case 'O' to get a rounded rectangle. In paragraph mode (*paragraph*) you can also append lower case 'c' to get a concave rectangle or append upper case 'C' to get a convex rectangle. fill : str Sets the shade or color used for filling the text box [Default is no fill]. offset : str ``[j|J]dx[/dy][+v[pen]]`` Offsets the text from the projected (x,y) point by dx,dy [0/0]. If dy is not specified then it is set equal to dx. Use offset='j' to offset the text away from the point instead (i.e., the text justification will determine the direction of the shift). Using offset='J' will shorten diagonal offsets at corners by sqrt(2). Optionally, append '+v' which will draw a line from the original point to the shifted point; append a pen to change the attributes for this line. pen : str Sets the pen used to draw a rectangle around the text string (see *clearance*) [Default is width = default, color = black, style = solid]. {XY} {p} {t} """ kwargs = self._preprocess(**kwargs) # Ensure inputs are either textfiles, x/y/text, or position/text if position is None: kind = data_kind(textfiles, x, y, text) else: if x is not None or y is not None: raise GMTInvalidInput( "Provide either position only, or x/y pairs, not both" ) kind = "vectors" if kind == "vectors" and text is None: raise GMTInvalidInput("Must provide text with x/y pairs or position") # Build the `-F` argument in gmt text. if "F" not in kwargs.keys() and ( ( position is not None or angle is not None or font is not None or justify is not None ) ): kwargs.update({"F": ""}) if angle is not None and isinstance(angle, (int, float, str)): kwargs["F"] += f"+a{str(angle)}" if font is not None and isinstance(font, str): kwargs["F"] += f"+f{font}" if justify is not None and isinstance(justify, str): kwargs["F"] += f"+j{justify}" if position is not None and isinstance(position, str): kwargs["F"] += f'+c{position}+t"{text}"' with Session() as lib: file_context = dummy_context(textfiles) if kind == "file" else "" if kind == "vectors": if position is not None: file_context = dummy_context("") else: file_context = lib.virtualfile_from_vectors( np.atleast_1d(x), np.atleast_1d(y), np.atleast_1d(text) ) with file_context as fname: arg_str = " ".join([fname, build_arg_string(kwargs)]) lib.call_module("text", arg_str) @fmt_docstring @use_alias( R="region", J="projection", B="frame", C="offset", X="xshift", Y="yshift", p="perspective", t="transparency", ) @kwargs_to_strings(R="sequence", p="sequence") def meca( self, spec, scale, longitude=None, latitude=None, depth=None, convention=None, component="full", plot_longitude=None, plot_latitude=None, **kwargs, ): """ Plot focal mechanisms. Full option list at :gmt-docs:`supplements/seis/meca.html` Note ---- Currently, labeling of beachballs with text strings is only supported via providing a file to `spec` as input. {aliases} Parameters ---------- spec: dict, 1D array, 2D array, pd.DataFrame, or str Either a filename containing focal mechanism parameters as columns, a 1- or 2-D array with the same, or a dictionary. If a filename or array, `convention` is required so we know how to interpret the columns/entries. If a dictionary, the following combinations of keys are supported; these determine the convention. Dictionary may contain values for a single focal mechanism or lists of values for many focal mechanisms. A Pandas DataFrame may optionally contain columns latitude, longitude, depth, plot_longitude, and/or plot_latitude instead of passing them to the meca method. - ``"aki"`` — *strike, dip, rake, magnitude* - ``"gcmt"`` — *strike1, dip1, rake1, strike2, dip2, rake2, mantissa, exponent* - ``"mt"`` — *mrr, mtt, mff, mrt, mrf, mtf, exponent* - ``"partial"`` — *strike1, dip1, strike2, fault_type, magnitude* - ``"principal_axis"`` — *t_exponent, t_azimuth, t_plunge, n_exponent, n_azimuth, n_plunge, p_exponent, p_azimuth, p_plunge, exponent* scale: str Adjusts the scaling of the radius of the beachball, which is proportional to the magnitude. Scale defines the size for magnitude = 5 (i.e. scalar seismic moment M0 = 4.0E23 dynes-cm) longitude: int, float, list, or 1d numpy array Longitude(s) of event location. Ignored if `spec` is not a dictionary. List must be the length of the number of events. Ignored if `spec` is a DataFrame and contains a 'longitude' column. latitude: int, float, list, or 1d numpy array Latitude(s) of event location. Ignored if `spec` is not a dictionary. List must be the length of the number of events. Ignored if `spec` is a DataFrame and contains a 'latitude' column. depth: int, float, list, or 1d numpy array Depth(s) of event location in kilometers. Ignored if `spec` is not a dictionary. List must be the length of the number of events. Ignored if `spec` is a DataFrame and contains a 'depth' column. convention: str ``"aki"`` (Aki & Richards), ``"gcmt"`` (global CMT), ``"mt"`` (seismic moment tensor), ``"partial"`` (partial focal mechanism), or ``"principal_axis"`` (principal axis). Ignored if `spec` is a dictionary or dataframe. component: str The component of the seismic moment tensor to plot. ``"full"`` (the full seismic moment tensor), ``"dc"`` (the closest double couple with zero trace and zero determinant), ``"deviatoric"`` (zero trace) plot_longitude: int, float, list, or 1d numpy array Longitude(s) at which to place beachball, only used if `spec` is a dictionary. List must be the length of the number of events. Ignored if `spec` is a DataFrame and contains a 'plot_longitude' column. plot_latitude: int, float, list, or 1d numpy array Latitude(s) at which to place beachball, only used if `spec` is a dictionary. List must be the length of the number of events. Ignored if `spec` is a DataFrame and contains a 'plot_latitude' column. offset: bool or str Offsets beachballs to the longitude, latitude specified in the last two columns of the input file or array, or by `plot_longitude` and `plot_latitude` if provided. A small circle is plotted at the initial location and a line connects the beachball to the circle. Specify pen and optionally append ``+ssize`` to change the line style and/or size of the circle. {J} {R} {B} {XY} {p} {t} """ # pylint warnings that need to be fixed # pylint: disable=too-many-locals # pylint: disable=too-many-nested-blocks # pylint: disable=too-many-branches # pylint: disable=no-self-use # pylint: disable=too-many-statements def set_pointer(data_pointers, spec): """Set optional parameter pointers based on DataFrame or dict, if those parameters are present in the DataFrame or dict.""" for param in list(data_pointers.keys()): if param in spec: # set pointer based on param name data_pointers[param] = spec[param] def update_pointers(data_pointers): """Updates variables based on the location of data, as the following data can be passed as parameters or it can be contained in `spec`.""" # update all pointers longitude = data_pointers["longitude"] latitude = data_pointers["latitude"] depth = data_pointers["depth"] plot_longitude = data_pointers["plot_longitude"] plot_latitude = data_pointers["plot_latitude"] return (longitude, latitude, depth, plot_longitude, plot_latitude) # Check the spec and parse the data according to the specified # convention if isinstance(spec, (dict, pd.DataFrame)): # dicts and DataFrames are handed similarly but not identically if ( longitude is None or latitude is None or depth is None ) and not isinstance(spec, (dict, pd.DataFrame)): raise GMTError("Location not fully specified.") param_conventions = { "AKI": ["strike", "dip", "rake", "magnitude"], "GCMT": ["strike1", "dip1", "dip2", "rake2", "mantissa", "exponent"], "MT": ["mrr", "mtt", "mff", "mrt", "mrf", "mtf", "exponent"], "PARTIAL": ["strike1", "dip1", "strike2", "fault_type", "magnitude"], "PRINCIPAL_AXIS": [ "t_exponent", "t_azimuth", "t_plunge", "n_exponent", "n_azimuth", "n_plunge", "p_exponent", "p_azimuth", "p_plunge", "exponent", ], } # to keep track of where optional parameters exist data_pointers = { "longitude": longitude, "latitude": latitude, "depth": depth, "plot_longitude": plot_longitude, "plot_latitude": plot_latitude, } # make a DataFrame copy to check convention if it contains # other parameters if isinstance(spec, (dict, pd.DataFrame)): # check if a copy is necessary copy = False drop_list = [] for pointer in data_pointers: if pointer in spec: copy = True drop_list.append(pointer) if copy: spec_conv = spec.copy() # delete optional parameters from copy for convention check for item in drop_list: del spec_conv[item] else: spec_conv = spec # set convention and focal parameters based on spec convention convention_assigned = False for conv in param_conventions: if set(spec_conv.keys()) == set(param_conventions[conv]): convention = conv.lower() foc_params = param_conventions[conv] convention_assigned = True break if not convention_assigned: raise GMTError( "Parameters in spec dictionary do not match known " "conventions." ) # create a dict type pointer for easier to read code if isinstance(spec, dict): dict_type_pointer = list(spec.values())[0] elif isinstance(spec, pd.DataFrame): # use df.values as pointer for DataFrame behavior dict_type_pointer = spec.values # assemble the 1D array for the case of floats and ints as values if isinstance(dict_type_pointer, (int, float)): # update pointers set_pointer(data_pointers, spec) # look for optional parameters in the right place ( longitude, latitude, depth, plot_longitude, plot_latitude, ) = update_pointers(data_pointers) # Construct the array (order matters) spec = [longitude, latitude, depth] + [spec[key] for key in foc_params] # Add in plotting options, if given, otherwise add 0s for arg in plot_longitude, plot_latitude: if arg is None: spec.append(0) else: if "C" not in kwargs: kwargs["C"] = True spec.append(arg) # or assemble the 2D array for the case of lists as values elif isinstance(dict_type_pointer, list): # update pointers set_pointer(data_pointers, spec) # look for optional parameters in the right place ( longitude, latitude, depth, plot_longitude, plot_latitude, ) = update_pointers(data_pointers) # before constructing the 2D array lets check that each key # of the dict has the same quantity of values to avoid bugs list_length = len(list(spec.values())[0]) for value in list(spec.values()): if len(value) != list_length: raise GMTError( "Unequal number of focal mechanism " "parameters supplied in 'spec'." ) # lets also check the inputs for longitude, latitude, # and depth if it is a list or array if ( isinstance(longitude, (list, np.ndarray)) or isinstance(latitude, (list, np.ndarray)) or isinstance(depth, (list, np.ndarray)) ): if (len(longitude) != len(latitude)) or ( len(longitude) != len(depth) ): raise GMTError( "Unequal number of focal mechanism " "locations supplied." ) # values are ok, so build the 2D array spec_array = [] for index in range(list_length): # Construct the array one row at a time (note that order # matters here, hence the list comprehension!) row = [longitude[index], latitude[index], depth[index]] + [ spec[key][index] for key in foc_params ] # Add in plotting options, if given, otherwise add 0s as # required by GMT for arg in plot_longitude, plot_latitude: if arg is None: row.append(0) else: if "C" not in kwargs: kwargs["C"] = True row.append(arg[index]) spec_array.append(row) spec = spec_array # or assemble the array for the case of pd.DataFrames elif isinstance(dict_type_pointer, np.ndarray): # update pointers set_pointer(data_pointers, spec) # look for optional parameters in the right place ( longitude, latitude, depth, plot_longitude, plot_latitude, ) = update_pointers(data_pointers) # lets also check the inputs for longitude, latitude, and depth # just in case the user entered different length lists if ( isinstance(longitude, (list, np.ndarray)) or isinstance(latitude, (list, np.ndarray)) or isinstance(depth, (list, np.ndarray)) ): if (len(longitude) != len(latitude)) or ( len(longitude) != len(depth) ): raise GMTError( "Unequal number of focal mechanism locations supplied." ) # values are ok, so build the 2D array in the correct order spec_array = [] for index in range(len(spec)): # Construct the array one row at a time (note that order # matters here, hence the list comprehension!) row = [longitude[index], latitude[index], depth[index]] + [ spec[key][index] for key in foc_params ] # Add in plotting options, if given, otherwise add 0s as # required by GMT for arg in plot_longitude, plot_latitude: if arg is None: row.append(0) else: if "C" not in kwargs: kwargs["C"] = True row.append(arg[index]) spec_array.append(row) spec = spec_array else: raise GMTError( "Parameter 'spec' contains values of an unsupported type." ) # Add condition and scale to kwargs if convention == "aki": data_format = "a" elif convention == "gcmt": data_format = "c" elif convention == "mt": # Check which component of mechanism the user wants plotted if component == "deviatoric": data_format = "z" elif component == "dc": data_format = "d" else: # component == 'full' data_format = "m" elif convention == "partial": data_format = "p" elif convention == "principal_axis": # Check which component of mechanism the user wants plotted if component == "deviatoric": data_format = "t" elif component == "dc": data_format = "y" else: # component == 'full' data_format = "x" # Support old-school GMT format options elif convention in ["a", "c", "m", "d", "z", "p", "x", "y", "t"]: data_format = convention else: raise GMTError("Convention not recognized.") # Assemble -S flag kwargs["S"] = data_format + scale kind = data_kind(spec) with Session() as lib: if kind == "matrix": file_context = lib.virtualfile_from_matrix(np.atleast_2d(spec)) elif kind == "file": file_context = dummy_context(spec) else: raise GMTInvalidInput("Unrecognized data type: {}".format(type(spec))) with file_context as fname: arg_str = " ".join([fname, build_arg_string(kwargs)]) lib.call_module("meca", arg_str)
{ "pile_set_name": "Github" }
.. _multigraph: ================================================================= MultiGraph---Undirected graphs with self loops and parallel edges ================================================================= Overview ======== .. currentmodule:: networkx .. autoclass:: MultiGraph Methods ======= Adding and removing nodes and edges ----------------------------------- .. autosummary:: :toctree: generated/ MultiGraph.__init__ MultiGraph.add_node MultiGraph.add_nodes_from MultiGraph.remove_node MultiGraph.remove_nodes_from MultiGraph.add_edge MultiGraph.add_edges_from MultiGraph.add_weighted_edges_from MultiGraph.new_edge_key MultiGraph.remove_edge MultiGraph.remove_edges_from MultiGraph.update MultiGraph.clear MultiGraph.clear_edges Reporting nodes edges and neighbors ----------------------------------- .. autosummary:: :toctree: generated/ MultiGraph.nodes MultiGraph.__iter__ MultiGraph.has_node MultiGraph.__contains__ MultiGraph.edges MultiGraph.has_edge MultiGraph.get_edge_data MultiGraph.neighbors MultiGraph.adj MultiGraph.__getitem__ MultiGraph.adjacency MultiGraph.nbunch_iter Counting nodes edges and neighbors ---------------------------------- .. autosummary:: :toctree: generated/ MultiGraph.order MultiGraph.number_of_nodes MultiGraph.__len__ MultiGraph.degree MultiGraph.size MultiGraph.number_of_edges Making copies and subgraphs --------------------------- .. autosummary:: :toctree: generated/ MultiGraph.copy MultiGraph.to_undirected MultiGraph.to_directed MultiGraph.subgraph MultiGraph.edge_subgraph
{ "pile_set_name": "Github" }
<?php // Heading $_['heading_title'] = 'Latest Orders'; // Text $_['text_extension'] = 'Extensions'; $_['text_success'] = 'Success: You have modified dashboard recent orders!'; $_['text_edit'] = 'Edit Dashboard Recent Orders'; // Column $_['column_order_id'] = 'Order ID'; $_['column_customer'] = 'Customer'; $_['column_status'] = 'Status'; $_['column_total'] = 'Total'; $_['column_date_added'] = 'Date Added'; $_['column_action'] = 'Action'; // Entry $_['entry_status'] = 'Status'; $_['entry_sort_order'] = 'Sort Order'; $_['entry_width'] = 'Width'; // Error $_['error_permission'] = 'Warning: You do not have permission to modify dashboard recent orders!';
{ "pile_set_name": "Github" }
// ------------------------------------------------------------------------------ // Copyright (c) Microsoft Corporation. All Rights Reserved. Licensed under the MIT License. See License in the project root for license information. // ------------------------------------------------------------------------------ package com.microsoft.graph.models.generated; /** * The Enum Signin Frequency Type. */ public enum SigninFrequencyType { /** * days */ DAYS, /** * hours */ HOURS, /** * For SigninFrequencyType values that were not expected from the service */ UNEXPECTED_VALUE }
{ "pile_set_name": "Github" }
: ^a ^b ^c ^d ^e ^f ^g ^h ^i ^j ^k ^l ^m ^n ^o ^p ^q ^r ^s ^t ^u ^v ^w ^x ^y ^z ^0 ^1 ^2 ^3 ^4 ^5 ^6 ^7 ^8 ^9 ^A ^B ^C ^D ^E ^F ^G ^H ^I ^J ^K ^L ^M ^N ^O ^P ^Q ^R ^S ^T ^U ^V ^W ^X ^Y ^Z
{ "pile_set_name": "Github" }
protocol NSLocking { func lock() func unlock() } class NSLock : NSObject, NSLocking { func tryLock() -> Bool func lock(before limit: NSDate) -> Bool @available(iOS 2.0, *) var name: String? func lock() func unlock() } class NSConditionLock : NSObject, NSLocking { init(condition condition: Int) var condition: Int { get } func lock(whenCondition condition: Int) func tryLock() -> Bool func tryWhenCondition(_ condition: Int) -> Bool func unlock(withCondition condition: Int) func lock(before limit: NSDate) -> Bool func lock(whenCondition condition: Int, before limit: NSDate) -> Bool @available(iOS 2.0, *) var name: String? func lock() func unlock() } class NSRecursiveLock : NSObject, NSLocking { func tryLock() -> Bool func lock(before limit: NSDate) -> Bool @available(iOS 2.0, *) var name: String? func lock() func unlock() } @available(iOS 2.0, *) class NSCondition : NSObject, NSLocking { func wait() func wait(until limit: NSDate) -> Bool func signal() func broadcast() @available(iOS 2.0, *) var name: String? @available(iOS 2.0, *) func lock() @available(iOS 2.0, *) func unlock() }
{ "pile_set_name": "Github" }
<?php return [ /* |-------------------------------------------------------------------------- | Application Name |-------------------------------------------------------------------------- | | This value is the name of your application. This value is used when the | framework needs to place the application's name in a notification or | any other location as required by the application or its packages. | */ 'name' => env('APP_NAME', 'HackIT'), 'description' => env("Algiers's big student hackathon"), /* |-------------------------------------------------------------------------- | Application Environment |-------------------------------------------------------------------------- | | This value determines the "environment" your application is currently | running in. This may determine how you prefer to configure various | services the application utilizes. Set this in your ".env" file. | */ 'env' => env('APP_ENV', 'production'), /* |-------------------------------------------------------------------------- | Application Debug Mode |-------------------------------------------------------------------------- | | When your application is in debug mode, detailed error messages with | stack traces will be shown on every error that occurs within your | application. If disabled, a simple generic error page is shown. | */ 'debug' => env('APP_DEBUG', false), /* |-------------------------------------------------------------------------- | Application URL |-------------------------------------------------------------------------- | | This URL is used by the console to properly generate URLs when using | the Artisan command line tool. You should set this to the root of | your application so that it is used when running Artisan tasks. | */ 'url' => env('APP_URL', 'http://localhost'), 'asset_url' => env('ASSET_URL', null), /* |-------------------------------------------------------------------------- | Application Timezone |-------------------------------------------------------------------------- | | Here you may specify the default timezone for your application, which | will be used by the PHP date and date-time functions. We have gone | ahead and set this to a sensible default for you out of the box. | */ 'timezone' => 'UTC', /* |-------------------------------------------------------------------------- | Application Locale Configuration |-------------------------------------------------------------------------- | | The application locale determines the default locale that will be used | by the translation service provider. You are free to set this value | to any of the locales which will be supported by the application. | */ 'locale' => 'en', /* |-------------------------------------------------------------------------- | Application Fallback Locale |-------------------------------------------------------------------------- | | The fallback locale determines the locale to use when the current one | is not available. You may change the value to correspond to any of | the language folders that are provided through your application. | */ 'fallback_locale' => 'en', /* |-------------------------------------------------------------------------- | Faker Locale |-------------------------------------------------------------------------- | | This locale will be used by the Faker PHP library when generating fake | data for your database seeds. For example, this will be used to get | localized telephone numbers, street address information and more. | */ 'faker_locale' => 'en_US', /* |-------------------------------------------------------------------------- | Encryption Key |-------------------------------------------------------------------------- | | This key is used by the Illuminate encrypter service and should be set | to a random, 32 character string, otherwise these encrypted strings | will not be safe. Please do this before deploying an application! | */ 'key' => env('APP_KEY'), 'cipher' => 'AES-256-CBC', /* /* |-------------------------------------------------------------------------- | Autoloaded Service Providers |-------------------------------------------------------------------------- | | The service providers listed here will be automatically loaded on the | request to your application. Feel free to add your own services to | this array to grant expanded functionality to your applications. | */ 'providers' => [ /* * Laravel Framework Service Providers... */ Illuminate\Auth\AuthServiceProvider::class, Illuminate\Broadcasting\BroadcastServiceProvider::class, Illuminate\Bus\BusServiceProvider::class, Illuminate\Cache\CacheServiceProvider::class, Illuminate\Foundation\Providers\ConsoleSupportServiceProvider::class, Illuminate\Cookie\CookieServiceProvider::class, Illuminate\Database\DatabaseServiceProvider::class, Illuminate\Encryption\EncryptionServiceProvider::class, Illuminate\Filesystem\FilesystemServiceProvider::class, Illuminate\Foundation\Providers\FoundationServiceProvider::class, Illuminate\Hashing\HashServiceProvider::class, Illuminate\Mail\MailServiceProvider::class, Illuminate\Notifications\NotificationServiceProvider::class, Illuminate\Pagination\PaginationServiceProvider::class, Illuminate\Pipeline\PipelineServiceProvider::class, Illuminate\Queue\QueueServiceProvider::class, Illuminate\Redis\RedisServiceProvider::class, Illuminate\Auth\Passwords\PasswordResetServiceProvider::class, Illuminate\Session\SessionServiceProvider::class, Illuminate\Translation\TranslationServiceProvider::class, Illuminate\Validation\ValidationServiceProvider::class, Illuminate\View\ViewServiceProvider::class, Fedeisas\LaravelMailCssInliner\LaravelMailCssInlinerServiceProvider::class, /* * Package Service Providers... */ /* * Application Service Providers... */ App\Providers\AppServiceProvider::class, App\Providers\AuthServiceProvider::class, // App\Providers\BroadcastServiceProvider::class, App\Providers\EventServiceProvider::class, App\Providers\RouteServiceProvider::class, ], /* |-------------------------------------------------------------------------- | Class Aliases |-------------------------------------------------------------------------- | | This array of class aliases will be registered when this application | is started. However, feel free to register as many as you wish as | the aliases are "lazy" loaded so they don't hinder performance. | */ 'aliases' => [ 'App' => Illuminate\Support\Facades\App::class, 'Arr' => Illuminate\Support\Arr::class, 'Artisan' => Illuminate\Support\Facades\Artisan::class, 'Auth' => Illuminate\Support\Facades\Auth::class, 'Blade' => Illuminate\Support\Facades\Blade::class, 'Broadcast' => Illuminate\Support\Facades\Broadcast::class, 'Bus' => Illuminate\Support\Facades\Bus::class, 'Cache' => Illuminate\Support\Facades\Cache::class, 'Config' => Illuminate\Support\Facades\Config::class, 'Cookie' => Illuminate\Support\Facades\Cookie::class, 'Crypt' => Illuminate\Support\Facades\Crypt::class, 'DB' => Illuminate\Support\Facades\DB::class, 'Eloquent' => Illuminate\Database\Eloquent\Model::class, 'Event' => Illuminate\Support\Facades\Event::class, 'File' => Illuminate\Support\Facades\File::class, 'Gate' => Illuminate\Support\Facades\Gate::class, 'Hash' => Illuminate\Support\Facades\Hash::class, 'Lang' => Illuminate\Support\Facades\Lang::class, 'Log' => Illuminate\Support\Facades\Log::class, 'Mail' => Illuminate\Support\Facades\Mail::class, 'Notification' => Illuminate\Support\Facades\Notification::class, 'Password' => Illuminate\Support\Facades\Password::class, 'Queue' => Illuminate\Support\Facades\Queue::class, 'Redirect' => Illuminate\Support\Facades\Redirect::class, 'Redis' => Illuminate\Support\Facades\Redis::class, 'Request' => Illuminate\Support\Facades\Request::class, 'Response' => Illuminate\Support\Facades\Response::class, 'Route' => Illuminate\Support\Facades\Route::class, 'Schema' => Illuminate\Support\Facades\Schema::class, 'Session' => Illuminate\Support\Facades\Session::class, 'Storage' => Illuminate\Support\Facades\Storage::class, 'Str' => Illuminate\Support\Str::class, 'URL' => Illuminate\Support\Facades\URL::class, 'Validator' => Illuminate\Support\Facades\Validator::class, 'View' => Illuminate\Support\Facades\View::class, ], ];
{ "pile_set_name": "Github" }
<!doctype html> <html lang="en"> <head> <meta charset="UTF-8"> <title>Example - example-cache-factory</title> <link href="style.css" rel="stylesheet" type="text/css"> <script src="../../../angular.min.js"></script> <script src="script.js"></script> </head> <body ng-app="cacheExampleApp"> <div ng-controller="CacheController"> <input ng-model="newCacheKey" placeholder="Key"> <input ng-model="newCacheValue" placeholder="Value"> <button ng-click="put(newCacheKey, newCacheValue)">Cache</button> <p ng-if="keys.length">Cached Values</p> <div ng-repeat="key in keys"> <span ng-bind="key"></span> <span>: </span> <b ng-bind="cache.get(key)"></b> </div> <p>Cache Info</p> <div ng-repeat="(key, value) in cache.info()"> <span ng-bind="key"></span> <span>: </span> <b ng-bind="value"></b> </div> </div> </body> </html>
{ "pile_set_name": "Github" }
/////////////////////////////////////////////////////////////////////////////// // Name: src/generic/fontpickerg.cpp // Purpose: wxGenericFontButton class implementation // Author: Francesco Montorsi // Modified by: // Created: 15/04/2006 // Copyright: (c) Francesco Montorsi // Licence: wxWindows licence /////////////////////////////////////////////////////////////////////////////// // ============================================================================ // declarations // ============================================================================ // ---------------------------------------------------------------------------- // headers // ---------------------------------------------------------------------------- // For compilers that support precompilation, includes "wx.h". #include "wx/wxprec.h" #ifdef __BORLANDC__ #pragma hdrstop #endif #if wxUSE_FONTPICKERCTRL #include "wx/fontpicker.h" #include "wx/fontdlg.h" // ============================================================================ // implementation // ============================================================================ wxIMPLEMENT_DYNAMIC_CLASS(wxGenericFontButton, wxButton); // ---------------------------------------------------------------------------- // wxGenericFontButton // ---------------------------------------------------------------------------- bool wxGenericFontButton::Create( wxWindow *parent, wxWindowID id, const wxFont &initial, const wxPoint &pos, const wxSize &size, long style, const wxValidator& validator, const wxString &name) { wxString label = (style & wxFNTP_FONTDESC_AS_LABEL) ? wxString() : // label will be updated by UpdateFont _("Choose font"); // create this button if (!wxButton::Create( parent, id, label, pos, size, style, validator, name )) { wxFAIL_MSG( wxT("wxGenericFontButton creation failed") ); return false; } // and handle user clicks on it Connect(GetId(), wxEVT_BUTTON, wxCommandEventHandler(wxGenericFontButton::OnButtonClick), NULL, this); InitFontData(); m_selectedFont = initial.IsOk() ? initial : *wxNORMAL_FONT; UpdateFont(); return true; } void wxGenericFontButton::InitFontData() { m_data.SetAllowSymbols(true); m_data.SetColour(*wxBLACK); m_data.EnableEffects(true); } void wxGenericFontButton::OnButtonClick(wxCommandEvent& WXUNUSED(ev)) { // update the wxFontData to be shown in the dialog m_data.SetInitialFont(m_selectedFont); // create the font dialog and display it wxFontDialog dlg(this, m_data); if (dlg.ShowModal() == wxID_OK) { m_data = dlg.GetFontData(); SetSelectedFont(m_data.GetChosenFont()); // fire an event wxFontPickerEvent event(this, GetId(), m_selectedFont); GetEventHandler()->ProcessEvent(event); } } void wxGenericFontButton::UpdateFont() { if ( !m_selectedFont.IsOk() ) return; SetForegroundColour(m_data.GetColour()); if (HasFlag(wxFNTP_USEFONT_FOR_LABEL)) { // use currently selected font for the label... wxButton::SetFont(m_selectedFont); } if (HasFlag(wxFNTP_FONTDESC_AS_LABEL)) { SetLabel(wxString::Format(wxT("%s, %d"), m_selectedFont.GetFaceName().c_str(), m_selectedFont.GetPointSize())); } } #endif // wxUSE_FONTPICKERCTRL
{ "pile_set_name": "Github" }
#-- copyright # OpenProject is an open source project management software. # Copyright (C) 2012-2020 the OpenProject GmbH # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License version 3. # # OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows: # Copyright (C) 2006-2017 Jean-Philippe Lang # Copyright (C) 2010-2013 the ChiliProject Team # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # # See docs/COPYRIGHT.rdoc for more details. #++ require 'spec_helper' describe Queries::WorkPackages::Filter::WatcherFilter, type: :model do let(:user) { FactoryBot.build_stubbed(:user) } it_behaves_like 'basic query filter' do let(:type) { :list } let(:class_key) { :watcher_id } let(:principal_loader) do loader = double('principal_loader') allow(loader) .to receive(:user_values) .and_return([]) loader end before do allow(Queries::WorkPackages::Filter::PrincipalLoader) .to receive(:new) .with(project) .and_return(principal_loader) end describe '#available?' do it 'is true if the user is logged in' do allow(User) .to receive_message_chain(:current, :logged?) .and_return true expect(instance).to be_available end it 'is true if the user is allowed to see watchers and if there are users' do allow(User) .to receive_message_chain(:current, :logged?) .and_return false allow(User) .to receive_message_chain(:current, :allowed_to?) .and_return true allow(principal_loader) .to receive(:user_values) .and_return([user]) expect(instance).to be_available end it 'is false if the user is allowed to see watchers but there are no users' do allow(User) .to receive_message_chain(:current, :logged?) .and_return false allow(User) .to receive_message_chain(:current, :allowed_to?) .and_return true allow(principal_loader) .to receive(:user_values) .and_return([]) expect(instance).to_not be_available end it 'is false if the user is not allowed to see watchers but there are users' do allow(User) .to receive_message_chain(:current, :logged?) .and_return false allow(User) .to receive_message_chain(:current, :allowed_to?) .and_return false allow(principal_loader) .to receive(:user_values) .and_return([user]) expect(instance).to_not be_available end end describe '#allowed_values' do context 'contains the me value if the user is logged in' do before do allow(User) .to receive_message_chain(:current, :logged?) .and_return true expect(instance.allowed_values) .to match_array [[I18n.t(:label_me), 'me']] end end context 'contains the user values loaded if the user is allowed to see them' do before do allow(User) .to receive_message_chain(:current, :logged?) .and_return true allow(User) .to receive_message_chain(:current, :allowed_to?) .and_return true allow(principal_loader) .to receive(:user_values) .and_return([user]) expect(instance.allowed_values) .to match_array [[I18n.t(:label_me), 'me'], [user.name, user.id.to_s]] end end end describe '#ar_object_filter?' do it 'is true' do expect(instance) .to be_ar_object_filter end end describe '#value_objects' do let(:user1) { FactoryBot.build_stubbed(:user) } before do allow(Principal) .to receive(:where) .with(id: [user1.id.to_s]) .and_return([user1]) instance.values = [user1.id.to_s] end it 'returns an array of users' do expect(instance.value_objects) .to match_array([user1]) end end end end
{ "pile_set_name": "Github" }
/* gzlib.c -- zlib functions common to reading and writing gzip files * Copyright (C) 2004-2017 Mark Adler * For conditions of distribution and use, see copyright notice in zlib.h */ #include "gzguts.h" #if defined(_WIN32) && !defined(__BORLANDC__) && !defined(__MINGW32__) # define LSEEK _lseeki64 #else #if defined(_LARGEFILE64_SOURCE) && _LFS64_LARGEFILE-0 # define LSEEK lseek64 #else # define LSEEK lseek #endif #endif /* Local functions */ local void gz_reset OF((gz_statep)); local gzFile gz_open OF((const void *, int, const char *)); #if defined UNDER_CE /* Map the Windows error number in ERROR to a locale-dependent error message string and return a pointer to it. Typically, the values for ERROR come from GetLastError. The string pointed to shall not be modified by the application, but may be overwritten by a subsequent call to gz_strwinerror The gz_strwinerror function does not change the current setting of GetLastError. */ char ZLIB_INTERNAL *gz_strwinerror (error) DWORD error; { static char buf[1024]; wchar_t *msgbuf; DWORD lasterr = GetLastError(); DWORD chars = FormatMessage(FORMAT_MESSAGE_FROM_SYSTEM | FORMAT_MESSAGE_ALLOCATE_BUFFER, NULL, error, 0, /* Default language */ (LPVOID)&msgbuf, 0, NULL); if (chars != 0) { /* If there is an \r\n appended, zap it. */ if (chars >= 2 && msgbuf[chars - 2] == '\r' && msgbuf[chars - 1] == '\n') { chars -= 2; msgbuf[chars] = 0; } if (chars > sizeof (buf) - 1) { chars = sizeof (buf) - 1; msgbuf[chars] = 0; } wcstombs(buf, msgbuf, chars + 1); LocalFree(msgbuf); } else { sprintf(buf, "unknown win32 error (%ld)", error); } SetLastError(lasterr); return buf; } #endif /* UNDER_CE */ /* Reset gzip file state */ local void gz_reset(state) gz_statep state; { state->x.have = 0; /* no output data available */ if (state->mode == GZ_READ) { /* for reading ... */ state->eof = 0; /* not at end of file */ state->past = 0; /* have not read past end yet */ state->how = LOOK; /* look for gzip header */ } state->seek = 0; /* no seek request pending */ gz_error(state, Z_OK, NULL); /* clear error */ state->x.pos = 0; /* no uncompressed data yet */ state->strm.avail_in = 0; /* no input data yet */ } /* Open a gzip file either by name or file descriptor. */ local gzFile gz_open(path, fd, mode) const void *path; int fd; const char *mode; { gz_statep state; z_size_t len; int oflag; #ifdef O_CLOEXEC int cloexec = 0; #endif #ifdef O_EXCL int exclusive = 0; #endif /* check input */ if (path == NULL) return NULL; /* allocate gzFile structure to return */ state = (gz_statep)malloc(sizeof(gz_state)); if (state == NULL) return NULL; state->size = 0; /* no buffers allocated yet */ state->want = GZBUFSIZE; /* requested buffer size */ state->msg = NULL; /* no error message yet */ /* interpret mode */ state->mode = GZ_NONE; state->level = Z_DEFAULT_COMPRESSION; state->strategy = Z_DEFAULT_STRATEGY; state->direct = 0; while (*mode) { if (*mode >= '0' && *mode <= '9') state->level = *mode - '0'; else switch (*mode) { case 'r': state->mode = GZ_READ; break; #ifndef NO_GZCOMPRESS case 'w': state->mode = GZ_WRITE; break; case 'a': state->mode = GZ_APPEND; break; #endif case '+': /* can't read and write at the same time */ free(state); return NULL; case 'b': /* ignore -- will request binary anyway */ break; #ifdef O_CLOEXEC case 'e': cloexec = 1; break; #endif #ifdef O_EXCL case 'x': exclusive = 1; break; #endif case 'f': state->strategy = Z_FILTERED; break; case 'h': state->strategy = Z_HUFFMAN_ONLY; break; case 'R': state->strategy = Z_RLE; break; case 'F': state->strategy = Z_FIXED; break; case 'T': state->direct = 1; break; default: /* could consider as an error, but just ignore */ ; } mode++; } /* must provide an "r", "w", or "a" */ if (state->mode == GZ_NONE) { free(state); return NULL; } /* can't force transparent read */ if (state->mode == GZ_READ) { if (state->direct) { free(state); return NULL; } state->direct = 1; /* for empty file */ } /* save the path name for error messages */ #ifdef WIDECHAR if (fd == -2) { len = wcstombs(NULL, path, 0); if (len == (z_size_t)-1) len = 0; } else #endif len = strlen((const char *)path); state->path = (char *)malloc(len + 1); if (state->path == NULL) { free(state); return NULL; } #ifdef WIDECHAR if (fd == -2) if (len) wcstombs(state->path, path, len + 1); else *(state->path) = 0; else #endif #if !defined(NO_snprintf) && !defined(NO_vsnprintf) (void)snprintf(state->path, len + 1, "%s", (const char *)path); #else strcpy(state->path, path); #endif /* compute the flags for open() */ oflag = #ifdef O_LARGEFILE O_LARGEFILE | #endif #ifdef O_BINARY O_BINARY | #endif #ifdef O_CLOEXEC (cloexec ? O_CLOEXEC : 0) | #endif (state->mode == GZ_READ ? O_RDONLY : (O_WRONLY | O_CREAT | #ifdef O_EXCL (exclusive ? O_EXCL : 0) | #endif (state->mode == GZ_WRITE ? O_TRUNC : O_APPEND))); /* open the file with the appropriate flags (or just use fd) */ state->fd = fd > -1 ? fd : ( #ifdef WIDECHAR fd == -2 ? _wopen(path, oflag, 0666) : #endif open((const char *)path, oflag, 0666)); if (state->fd == -1) { free(state->path); free(state); return NULL; } if (state->mode == GZ_APPEND) { LSEEK(state->fd, 0, SEEK_END); /* so gzoffset() is correct */ state->mode = GZ_WRITE; /* simplify later checks */ } /* save the current position for rewinding (only if reading) */ if (state->mode == GZ_READ) { state->start = LSEEK(state->fd, 0, SEEK_CUR); if (state->start == -1) state->start = 0; } /* initialize stream */ gz_reset(state); /* return stream */ return (gzFile)state; } /* -- see zlib.h -- */ gzFile ZEXPORT gzopen(path, mode) const char *path; const char *mode; { return gz_open(path, -1, mode); } /* -- see zlib.h -- */ gzFile ZEXPORT gzopen64(path, mode) const char *path; const char *mode; { return gz_open(path, -1, mode); } /* -- see zlib.h -- */ gzFile ZEXPORT gzdopen(fd, mode) int fd; const char *mode; { char *path; /* identifier for error messages */ gzFile gz; if (fd == -1 || (path = (char *)malloc(7 + 3 * sizeof(int))) == NULL) return NULL; #if !defined(NO_snprintf) && !defined(NO_vsnprintf) (void)snprintf(path, 7 + 3 * sizeof(int), "<fd:%d>", fd); #else sprintf(path, "<fd:%d>", fd); /* for debugging */ #endif gz = gz_open(path, fd, mode); free(path); return gz; } /* -- see zlib.h -- */ #ifdef WIDECHAR gzFile ZEXPORT gzopen_w(path, mode) const wchar_t *path; const char *mode; { return gz_open(path, -2, mode); } #endif /* -- see zlib.h -- */ int ZEXPORT gzbuffer(file, size) gzFile file; unsigned size; { gz_statep state; /* get internal structure and check integrity */ if (file == NULL) return -1; state = (gz_statep)file; if (state->mode != GZ_READ && state->mode != GZ_WRITE) return -1; /* make sure we haven't already allocated memory */ if (state->size != 0) return -1; /* check and set requested size */ if ((size << 1) < size) return -1; /* need to be able to double it */ if (size < 2) size = 2; /* need two bytes to check magic header */ state->want = size; return 0; } /* -- see zlib.h -- */ int ZEXPORT gzrewind(file) gzFile file; { gz_statep state; /* get internal structure */ if (file == NULL) return -1; state = (gz_statep)file; /* check that we're reading and that there's no error */ if (state->mode != GZ_READ || (state->err != Z_OK && state->err != Z_BUF_ERROR)) return -1; /* back up and start over */ if (LSEEK(state->fd, state->start, SEEK_SET) == -1) return -1; gz_reset(state); return 0; } /* -- see zlib.h -- */ z_off64_t ZEXPORT gzseek64(file, offset, whence) gzFile file; z_off64_t offset; int whence; { unsigned n; z_off64_t ret; gz_statep state; /* get internal structure and check integrity */ if (file == NULL) return -1; state = (gz_statep)file; if (state->mode != GZ_READ && state->mode != GZ_WRITE) return -1; /* check that there's no error */ if (state->err != Z_OK && state->err != Z_BUF_ERROR) return -1; /* can only seek from start or relative to current position */ if (whence != SEEK_SET && whence != SEEK_CUR) return -1; /* normalize offset to a SEEK_CUR specification */ if (whence == SEEK_SET) offset -= state->x.pos; else if (state->seek) offset += state->skip; state->seek = 0; /* if within raw area while reading, just go there */ if (state->mode == GZ_READ && state->how == COPY && state->x.pos + offset >= 0) { ret = LSEEK(state->fd, offset - state->x.have, SEEK_CUR); if (ret == -1) return -1; state->x.have = 0; state->eof = 0; state->past = 0; state->seek = 0; gz_error(state, Z_OK, NULL); state->strm.avail_in = 0; state->x.pos += offset; return state->x.pos; } /* calculate skip amount, rewinding if needed for back seek when reading */ if (offset < 0) { if (state->mode != GZ_READ) /* writing -- can't go backwards */ return -1; offset += state->x.pos; if (offset < 0) /* before start of file! */ return -1; if (gzrewind(file) == -1) /* rewind, then skip to offset */ return -1; } /* if reading, skip what's in output buffer (one less gzgetc() check) */ if (state->mode == GZ_READ) { n = GT_OFF(state->x.have) || (z_off64_t)state->x.have > offset ? (unsigned)offset : state->x.have; state->x.have -= n; state->x.next += n; state->x.pos += n; offset -= n; } /* request skip (if not zero) */ if (offset) { state->seek = 1; state->skip = offset; } return state->x.pos + offset; } /* -- see zlib.h -- */ z_off_t ZEXPORT gzseek(file, offset, whence) gzFile file; z_off_t offset; int whence; { z_off64_t ret; ret = gzseek64(file, (z_off64_t)offset, whence); return ret == (z_off_t)ret ? (z_off_t)ret : -1; } /* -- see zlib.h -- */ z_off64_t ZEXPORT gztell64(file) gzFile file; { gz_statep state; /* get internal structure and check integrity */ if (file == NULL) return -1; state = (gz_statep)file; if (state->mode != GZ_READ && state->mode != GZ_WRITE) return -1; /* return position */ return state->x.pos + (state->seek ? state->skip : 0); } /* -- see zlib.h -- */ z_off_t ZEXPORT gztell(file) gzFile file; { z_off64_t ret; ret = gztell64(file); return ret == (z_off_t)ret ? (z_off_t)ret : -1; } /* -- see zlib.h -- */ z_off64_t ZEXPORT gzoffset64(file) gzFile file; { z_off64_t offset; gz_statep state; /* get internal structure and check integrity */ if (file == NULL) return -1; state = (gz_statep)file; if (state->mode != GZ_READ && state->mode != GZ_WRITE) return -1; /* compute and return effective offset in file */ offset = LSEEK(state->fd, 0, SEEK_CUR); if (offset == -1) return -1; if (state->mode == GZ_READ) /* reading */ offset -= state->strm.avail_in; /* don't count buffered input */ return offset; } /* -- see zlib.h -- */ z_off_t ZEXPORT gzoffset(file) gzFile file; { z_off64_t ret; ret = gzoffset64(file); return ret == (z_off_t)ret ? (z_off_t)ret : -1; } /* -- see zlib.h -- */ int ZEXPORT gzeof(file) gzFile file; { gz_statep state; /* get internal structure and check integrity */ if (file == NULL) return 0; state = (gz_statep)file; if (state->mode != GZ_READ && state->mode != GZ_WRITE) return 0; /* return end-of-file state */ return state->mode == GZ_READ ? state->past : 0; } /* -- see zlib.h -- */ const char * ZEXPORT gzerror(file, errnum) gzFile file; int *errnum; { gz_statep state; /* get internal structure and check integrity */ if (file == NULL) return NULL; state = (gz_statep)file; if (state->mode != GZ_READ && state->mode != GZ_WRITE) return NULL; /* return error information */ if (errnum != NULL) *errnum = state->err; return state->err == Z_MEM_ERROR ? "out of memory" : (state->msg == NULL ? "" : state->msg); } /* -- see zlib.h -- */ void ZEXPORT gzclearerr(file) gzFile file; { gz_statep state; /* get internal structure and check integrity */ if (file == NULL) return; state = (gz_statep)file; if (state->mode != GZ_READ && state->mode != GZ_WRITE) return; /* clear error and end-of-file */ if (state->mode == GZ_READ) { state->eof = 0; state->past = 0; } gz_error(state, Z_OK, NULL); } /* Create an error message in allocated memory and set state->err and state->msg accordingly. Free any previous error message already there. Do not try to free or allocate space if the error is Z_MEM_ERROR (out of memory). Simply save the error message as a static string. If there is an allocation failure constructing the error message, then convert the error to out of memory. */ void ZLIB_INTERNAL gz_error(state, err, msg) gz_statep state; int err; const char *msg; { /* free previously allocated message and clear */ if (state->msg != NULL) { if (state->err != Z_MEM_ERROR) free(state->msg); state->msg = NULL; } /* if fatal, set state->x.have to 0 so that the gzgetc() macro fails */ if (err != Z_OK && err != Z_BUF_ERROR) state->x.have = 0; /* set error code, and if no message, then done */ state->err = err; if (msg == NULL) return; /* for an out of memory error, return literal string when requested */ if (err == Z_MEM_ERROR) return; /* construct error message with path */ if ((state->msg = (char *)malloc(strlen(state->path) + strlen(msg) + 3)) == NULL) { state->err = Z_MEM_ERROR; return; } #if !defined(NO_snprintf) && !defined(NO_vsnprintf) (void)snprintf(state->msg, strlen(state->path) + strlen(msg) + 3, "%s%s%s", state->path, ": ", msg); #else strcpy(state->msg, state->path); strcat(state->msg, ": "); strcat(state->msg, msg); #endif } #ifndef INT_MAX /* portably return maximum value for an int (when limits.h presumed not available) -- we need to do this to cover cases where 2's complement not used, since C standard permits 1's complement and sign-bit representations, otherwise we could just use ((unsigned)-1) >> 1 */ unsigned ZLIB_INTERNAL gz_intmax() { unsigned p, q; p = 1; do { q = p; p <<= 1; p++; } while (p > q); return q >> 1; } #endif
{ "pile_set_name": "Github" }
############################################################ # <bsn.cl fy=2013 v=none> # # Copyright 2013, 2014 BigSwitch Networks, Inc. # # # # </bsn.cl> ############################################################ ARCH := amd64 PACKAGE_NAMES=onlp-x86-64-accton-as7716-32x-r0 include ../../../../../make/debuild.mk
{ "pile_set_name": "Github" }
/*global doSweep */ $(document).ready(function() { "use strict"; var arr = [20, 30, 44, 54, 55, 11, 78, 14, 13, 79, 12, 98]; doSweep("shellsortCON2", arr, 8); });
{ "pile_set_name": "Github" }
module network { export class PlayCardsResolver extends IResolver{ public constructor() { super(); } /** * 发送消息封包 */ protected Package(data:any):any{ let event = { action:131, data:data }; event = data; return event; } /** * 接收消息解包 */ protected Parse(data:any){ return data; } } }
{ "pile_set_name": "Github" }
#coding=utf-8 #coding=utf-8 ''' Created on 2014-12-16 @author: Devuser ''' import os from gatesidelib.filehelper import FileHelper from gatesidelib.common.simplelogger import SimpleLogger class GitHelper(object): ''' git command helper ''' git_clonecommand="git clone -b {BRANCHNAME} {REPERTORY} {PROJECTPATH} >> " git_pullcommand="git --git-dir={PROJECTPATH} pull {REPERTORY} >> " git_logcommand="git --git-dir={PROJECTPATH} log {REVERSIONRANGE} >> " git_diffcommand="git --git-dir={PROJECTPATH} diff {STARTVERSION} {ENDVERSION} --stat >> " def __init__(self,giturl,projectpath,logfilepath): ''' giturl: git repertory address username: username for git repretory password: password for git repertory ''' self.project=projectpath self.url=giturl self.templog=logfilepath def get_changecode_lines(self,startversion,endversion): gitcommandtext=self.get_gitcommand(GitHelper.git_diffcommand, startversion, endversion,"","") os.popen(gitcommandtext) linecounts=self.get_linecounts(self.templog) return linecounts def clone_project(self,branchname): if os.path.exists(self.project): FileHelper.delete_dir_all(self.project) gitcommandtext=self.get_gitcommand(GitHelper.git_clonecommand,"","","",branchname) SimpleLogger.info(gitcommandtext) os.popen(gitcommandtext) def pull_project(self): gitcommandtext=self.get_gitcommand(GitHelper.git_pullcommand,"","","","") SimpleLogger.info(gitcommandtext) os.popen(gitcommandtext) def get_commitlog(self,reversionNumber): gitcommandtext=self.get_gitcommand(GitHelper.git_logcommand,"","",reversionNumber,"") FileHelper.delete_file(self.templog) os.popen(gitcommandtext) return FileHelper.read_lines(self.templog) def save_commitlog(self,reversionNumber): gitcommandtext=self.get_gitcommand(GitHelper.git_logcommand,"","",reversionNumber,"") FileHelper.delete_file(self.templog) os.popen(gitcommandtext) def get_allcodelines(self,startversion,endversion): gitcommandtext=self.get_gitcommand(GitHelper.git_diffcommand,startversion,endversion,"","") FileHelper.delete_file(self.templog) os.popen(gitcommandtext) linecounts=FileHelper.get_linecounts(self.templog) return linecounts def get_gitcommand(self,command,startversion,endversion,versionNumber,branchname): commandtext=command.replace("{STARTVERSION}",startversion) commandtext=commandtext.replace("{ENDVERSION}",endversion) commandtext=commandtext.replace("{REPERTORY}",self.url) commandtext=commandtext.replace("{PROJECTPATH}",self.project) commandtext=commandtext.replace("{REVERSIONRANGE}",versionNumber) commandtext=commandtext.replace("{BRANCHNAME}",branchname) return commandtext+self.templog def get_linecounts(self,filename): filehandler=open(filename,'r') linelist=[line for line in filehandler] linelist.reverse() if linelist[0]: if "," in linelist[0]: changeinfos=linelist[0].split(',') if len(changeinfos)==2: new_codeline_counts=self.get_number_from_string(changeinfos[1]) deleted_codeline_counts=0 if len(changeinfos)==3: new_codeline_counts=self.get_number_from_string(changeinfos[1]) deleted_codeline_counts=self.get_number_from_string(changeinfos[2]) else: new_codeline_counts=0 deleted_codeline_counts=0 return [new_codeline_counts,deleted_codeline_counts] def get_number_from_string(self,str_contains_num): tempstr=str_contains_num.strip() number="" for char in tempstr: if char.isdigit(): number=number+char return number
{ "pile_set_name": "Github" }
<html> <head> <title>CSS background/foreground images</title> <style type="text/css"> body { font-family:Verdana; font-size:10pt; width:100%%; height:100%%; background-color: window; padding:10px; margin:0; } header { font-size:150%; } #c1 { background-color: threedface; } img { margin: 6px; } img:hover { foreground-image-transformation: contrast-brightness-gamma(0.5,0.5, 1.3); } img:active { foreground-image-transformation: contrast-brightness-gamma(0.25,0.95, 1.0); } </style> </head> <body> <header>Image color transformation. Move mouse over the block below. Image will change gamma. On press image will change contrast and brightness.</header> <p id="c1"><img src="images/flowers.jpg" /> <img src="images/icon.png" /></p> <pre> :hover -> foreground-image-transformation: contrast-brightness-gamma(0.5,0.5, 1.3); :active -> foreground-image-transformation: contrast-brightness-gamma(0.25,0.95, 1.0); </pre> </body> </html>
{ "pile_set_name": "Github" }
# # Copyright (c) 2008-2020 the Urho3D project. # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # # Find DirectFB development library # # DIRECTFB_FOUND # DIRECTFB_INCLUDE_DIRS # DIRECTFB_LIBRARIES # DIRECTFB_VERSION # find_path (DIRECTFB_INCLUDE_DIRS NAMES directfb.h PATH_SUFFIXES directfb DOC "DirectFB include directory") find_library (DIRECTFB_LIBRARIES NAMES directfb DOC "DirectFB library") if (NOT DIRECTFB_VERSION AND DIRECTFB_INCLUDE_DIRS AND EXISTS ${DIRECTFB_INCLUDE_DIRS}/directfb_version.h) # Only do this once file (STRINGS ${DIRECTFB_INCLUDE_DIRS}/directfb_version.h DIRECTFB_VERSION REGEX "^.*DIRECTFB_(MAJOR|MINOR|MACRO)_VERSION.+\([^\)]*\).*$") string (REGEX REPLACE "^.*DIRECTFB_MAJOR_VERSION.+\(([^\)]*)\).*$" \\1 DIRECTFB_MAJOR_VERSION "${DIRECTFB_VERSION}") # Stringify to guard against empty variable string (REGEX REPLACE "^.*DIRECTFB_MINOR_VERSION.+\(([^\)]*)\).*$" \\1 DIRECTFB_MINOR_VERSION "${DIRECTFB_VERSION}") string (REGEX REPLACE "^.*DIRECTFB_MICRO_VERSION.+\(([^\)]*)\).*$" \\1 DIRECTFB_MICRO_VERSION "${DIRECTFB_VERSION}") set (DIRECTFB_VERSION "${DIRECTFB_MAJOR_VERSION}.${DIRECTFB_MINOR_VERSION}.${DIRECTFB_MICRO_VERSION}" CACHE INTERNAL "DirectFB version") endif () include (FindPackageHandleStandardArgs) find_package_handle_standard_args (DirectFB REQUIRED_VARS DIRECTFB_LIBRARIES DIRECTFB_INCLUDE_DIRS VERSION_VAR DIRECTFB_VERSION FAIL_MESSAGE "Could NOT find DirectFB development library") mark_as_advanced (DIRECTFB_INCLUDE_DIRS DIRECTFB_LIBRARIES)
{ "pile_set_name": "Github" }
/* * Copyright (C) 2016 Cavium, Inc. * * This program is free software; you can redistribute it and/or modify it * under the terms of version 2 of the GNU General Public License * as published by the Free Software Foundation. */ #ifndef __REQUEST_MANAGER_H #define __REQUEST_MANAGER_H #include "cpt_common.h" #define TIME_IN_RESET_COUNT 5 #define COMPLETION_CODE_SIZE 8 #define COMPLETION_CODE_INIT 0 #define PENDING_THOLD 100 #define MAX_SG_IN_CNT 12 #define MAX_SG_OUT_CNT 13 #define SG_LIST_HDR_SIZE 8 #define MAX_BUF_CNT 16 union ctrl_info { u32 flags; struct { #if defined(__BIG_ENDIAN_BITFIELD) u32 reserved0:26; u32 grp:3; /* Group bits */ u32 dma_mode:2; /* DMA mode */ u32 se_req:1;/* To SE core */ #else u32 se_req:1; /* To SE core */ u32 dma_mode:2; /* DMA mode */ u32 grp:3; /* Group bits */ u32 reserved0:26; #endif } s; }; union opcode_info { u16 flags; struct { u8 major; u8 minor; } s; }; struct cptvf_request { union opcode_info opcode; u16 param1; u16 param2; u16 dlen; }; struct buf_ptr { u8 *vptr; dma_addr_t dma_addr; u16 size; }; struct cpt_request_info { u8 incnt; /* Number of input buffers */ u8 outcnt; /* Number of output buffers */ u16 rlen; /* Output length */ union ctrl_info ctrl; /* User control information */ struct cptvf_request req; /* Request Information (Core specific) */ struct buf_ptr in[MAX_BUF_CNT]; struct buf_ptr out[MAX_BUF_CNT]; void (*callback)(int, void *); /* Kernel ASYNC request callabck */ void *callback_arg; /* Kernel ASYNC request callabck arg */ }; struct sglist_component { union { u64 len; struct { u16 len0; u16 len1; u16 len2; u16 len3; } s; } u; u64 ptr0; u64 ptr1; u64 ptr2; u64 ptr3; }; struct cpt_info_buffer { struct cpt_vf *cptvf; unsigned long time_in; u8 extra_time; struct cpt_request_info *req; dma_addr_t dptr_baddr; u32 dlen; dma_addr_t rptr_baddr; dma_addr_t comp_baddr; u8 *in_buffer; u8 *out_buffer; u8 *gather_components; u8 *scatter_components; struct pending_entry *pentry; volatile u64 *completion_addr; volatile u64 *alternate_caddr; }; /* * CPT_INST_S software command definitions * Words EI (0-3) */ union vq_cmd_word0 { u64 u64; struct { u16 opcode; u16 param1; u16 param2; u16 dlen; } s; }; union vq_cmd_word3 { u64 u64; struct { #if defined(__BIG_ENDIAN_BITFIELD) u64 grp:3; u64 cptr:61; #else u64 cptr:61; u64 grp:3; #endif } s; }; struct cpt_vq_command { union vq_cmd_word0 cmd; u64 dptr; u64 rptr; union vq_cmd_word3 cptr; }; void vq_post_process(struct cpt_vf *cptvf, u32 qno); int process_request(struct cpt_vf *cptvf, struct cpt_request_info *req); #endif /* __REQUEST_MANAGER_H */
{ "pile_set_name": "Github" }
<?php namespace GuzzleHttp\Exception; use GuzzleHttp\Promise\PromiseInterface; use Psr\Http\Message\RequestInterface; use Psr\Http\Message\ResponseInterface; use Psr\Http\Message\UriInterface; /** * HTTP Request exception */ class RequestException extends TransferException { /** @var RequestInterface */ private $request; /** @var ResponseInterface|null */ private $response; /** @var array */ private $handlerContext; public function __construct( $message, RequestInterface $request, ResponseInterface $response = null, \Exception $previous = null, array $handlerContext = [] ) { // Set the code of the exception if the response is set and not future. $code = $response && !($response instanceof PromiseInterface) ? $response->getStatusCode() : 0; parent::__construct($message, $code, $previous); $this->request = $request; $this->response = $response; $this->handlerContext = $handlerContext; } /** * Wrap non-RequestExceptions with a RequestException * * @param RequestInterface $request * @param \Exception $e * * @return RequestException */ public static function wrapException(RequestInterface $request, \Exception $e) { return $e instanceof RequestException ? $e : new RequestException($e->getMessage(), $request, null, $e); } /** * Factory method to create a new exception with a normalized error message * * @param RequestInterface $request Request * @param ResponseInterface $response Response received * @param \Exception $previous Previous exception * @param array $ctx Optional handler context. * * @return self */ public static function create( RequestInterface $request, ResponseInterface $response = null, \Exception $previous = null, array $ctx = [] ) { if (!$response) { return new self( 'Error completing request', $request, null, $previous, $ctx ); } $level = (int) floor($response->getStatusCode() / 100); if ($level === 4) { $label = 'Client error'; $className = ClientException::class; } elseif ($level === 5) { $label = 'Server error'; $className = ServerException::class; } else { $label = 'Unsuccessful request'; $className = __CLASS__; } $uri = $request->getUri(); $uri = static::obfuscateUri($uri); // Client Error: `GET /` resulted in a `404 Not Found` response: // <html> ... (truncated) $message = sprintf( '%s: `%s %s` resulted in a `%s %s` response', $label, $request->getMethod(), $uri, $response->getStatusCode(), $response->getReasonPhrase() ); $summary = static::getResponseBodySummary($response); if ($summary !== null) { $message .= ":\n{$summary}\n"; } return new $className($message, $request, $response, $previous, $ctx); } /** * Get a short summary of the response * * Will return `null` if the response is not printable. * * @param ResponseInterface $response * * @return string|null */ public static function getResponseBodySummary(ResponseInterface $response) { return \GuzzleHttp\Psr7\get_message_body_summary($response); } /** * Obfuscates URI if there is a username and a password present * * @param UriInterface $uri * * @return UriInterface */ private static function obfuscateUri(UriInterface $uri) { $userInfo = $uri->getUserInfo(); if (false !== ($pos = strpos($userInfo, ':'))) { return $uri->withUserInfo(substr($userInfo, 0, $pos), '***'); } return $uri; } /** * Get the request that caused the exception * * @return RequestInterface */ public function getRequest() { return $this->request; } /** * Get the associated response * * @return ResponseInterface|null */ public function getResponse() { return $this->response; } /** * Check if a response was received * * @return bool */ public function hasResponse() { return $this->response !== null; } /** * Get contextual information about the error from the underlying handler. * * The contents of this array will vary depending on which handler you are * using. It may also be just an empty array. Relying on this data will * couple you to a specific handler, but can give more debug information * when needed. * * @return array */ public function getHandlerContext() { return $this->handlerContext; } }
{ "pile_set_name": "Github" }
# SPDX-License-Identifier: GPL-2.0+ # # Makefile for the HISILICON network device drivers. # ccflags-y := -I $(srctree)/drivers/net/ethernet/hisilicon/hns3 obj-$(CONFIG_HNS3_HCLGE) += hclge.o hclge-objs = hclge_main.o hclge_cmd.o hclge_mdio.o hclge_tm.o hclge_mbx.o hclge-$(CONFIG_HNS3_DCB) += hclge_dcb.o
{ "pile_set_name": "Github" }
using System; using System.Collections.Generic; using System.ComponentModel; using AudioBand.AudioSource; namespace AudioBand.TextFormatting { /// <summary> /// A placeholder text that has values based on the current song. /// </summary> public abstract class TextPlaceholder { private readonly HashSet<string> _propertyFilter = new HashSet<string>(); /// <summary> /// Initializes a new instance of the <see cref="TextPlaceholder"/> class. /// </summary> /// <param name="parameters">The parameters passed to the text format.</param> /// <param name="audioSession">The audio session to use for the placeholder value.</param> protected TextPlaceholder(IEnumerable<TextPlaceholderParameter> parameters, IAudioSession audioSession) { Session = audioSession; Session.PropertyChanged += AudioSessionOnPropertyChanged; // TODO parameters } /// <summary> /// Occurs when the placeholders text has changed. /// </summary> public event EventHandler TextChanged; /// <summary> /// Gets the audio session. /// </summary> protected IAudioSession Session { get; private set; } /// <summary> /// Gets the current text value for the placeholder. /// </summary> /// <returns>The value.</returns> public abstract string GetText(); /// <summary> /// Raises the <see cref="TextChanged"/> event. /// </summary> protected void RaiseTextChanged() { TextChanged?.Invoke(this, EventArgs.Empty); } /// <summary> /// Gets the parameter from the name. /// </summary> /// <param name="parameterName">The parameter name.</param> /// <returns>The value of the parameter or null if not passed in.</returns> protected string GetParameter(string parameterName) { return null; } /// <summary> /// Adds a filter for <see cref="OnAudioSessionPropertyChanged"/>. /// </summary> /// <param name="audioSessionPropertyName">The property name to filter.</param> protected void AddSessionPropertyFilter(string audioSessionPropertyName) { _propertyFilter.Add(audioSessionPropertyName); } /// <summary> /// Called when the audio session property value changes. /// </summary> /// <param name="propertyName">The name of the property that changed.</param> protected virtual void OnAudioSessionPropertyChanged(string propertyName) { } private void AudioSessionOnPropertyChanged(object sender, PropertyChangedEventArgs e) { if (_propertyFilter.Contains(e.PropertyName) || _propertyFilter.Count == 0) { OnAudioSessionPropertyChanged(e.PropertyName); } } } }
{ "pile_set_name": "Github" }
import decimalAdjust from "discourse/lib/decimal-adjust"; export default function (value, exp) { return decimalAdjust("round", value, exp); }
{ "pile_set_name": "Github" }
/* * Copyright (C) 2016 Red Hat, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.syndesis.server.api.generator; import io.syndesis.common.model.api.APISummary; import io.syndesis.common.model.connection.ConfigurationProperty; import io.syndesis.common.model.connection.Connector; import io.syndesis.common.model.connection.ConnectorGroup; import io.syndesis.common.model.connection.ConnectorSettings; import io.syndesis.common.model.connection.ConnectorTemplate; import org.junit.Test; import static org.assertj.core.api.Assertions.assertThat; public class ConnectorGeneratorTest { private final ConnectorGenerator generator = new ConnectorGenerator(new Connector.Builder() .addTags("from-connector") .build()) { @Override public Connector generate(final ConnectorTemplate connectorTemplate, final ConnectorSettings connectorSettings) { return null; } @Override public APISummary info(final ConnectorTemplate connectorTemplate, final ConnectorSettings connectorSettings) { return null; } @Override protected String determineConnectorDescription(final ConnectorTemplate connectorTemplate, final ConnectorSettings connectorSettings) { return "test-description"; } @Override protected String determineConnectorName(final ConnectorTemplate connectorTemplate, final ConnectorSettings connectorSettings) { return "test-name"; } }; private final ConnectorTemplate template = new ConnectorTemplate.Builder() .id("template-id") .connectorGroup(new ConnectorGroup.Builder().id("template-group").build()) .putProperty("property1", new ConfigurationProperty.Builder().build()) .putProperty("property2", new ConfigurationProperty.Builder().build()) .build(); @Test public void shouldCreateBaseConnectors() { final ConnectorSettings settings = new ConnectorSettings.Builder().putConfiguredProperty("property2", "value2").build(); final Connector connector = generator.baseConnectorFrom(template, settings); assertThat(connector).isEqualToIgnoringGivenFields( new Connector.Builder() .name("test-name") .description("test-description") .addTags("from-connector") .connectorGroup(template.getConnectorGroup()) .connectorGroupId("template-group") .properties(template.getConnectorProperties()) .putConfiguredProperty("property2", "value2") .build(), "id", "icon"); assertThat(connector.getIcon()).isEqualTo("data:image/svg+xml,dummy"); } @Test public void shouldCreateBaseConnectorsWithGivenNameAndDescription() { final ConnectorSettings settings = new ConnectorSettings.Builder().name("given-name").description("given-description") .putConfiguredProperty("property2", "value2").build(); final Connector connector = generator.baseConnectorFrom(template, settings); assertThat(connector).isEqualToIgnoringGivenFields( new Connector.Builder() .name("given-name") .description("given-description") .addTags("from-connector") .connectorGroup(template.getConnectorGroup()) .connectorGroupId("template-group") .properties(template.getConnectorProperties()) .putConfiguredProperty("property2", "value2").build(), "id", "icon"); assertThat(connector.getIcon()).isEqualTo("data:image/svg+xml,dummy"); } }
{ "pile_set_name": "Github" }
--- alias: iquaecuj6b description: Upgrade to 1.7 --- # Upgrade to 1.7 ## Overview The [1.7 release](https://github.com/graphcool/prisma/releases/tag/1.7.0) of Prisma introduces a few major changes for the deployment process of a Prisma API. These changes mainly concern the service configuration within [`prisma.yml`](!alias-ufeshusai8) and a few commands of the Prisma CLI. All changes are **backwards-compatible**, meaning there is no necessity to incorporate the changes right away. In general, the CLI will help you perform the required changes automatically where possible. There are two main cases for how the CLI is helping you with the migration: - **Your API is deployed to a [Prisma Cloud](https://www.prisma.io/cloud) server**: The CLI _automatically_ adjusts `prisma.yml` and writes the new `endpoint` property into it (while removing `service`, `stage` and `cluster`). - **Your API is _NOT_ deployed to a [Prisma Cloud](https://www.prisma.io/cloud) server**: The CLI prints a warning and provides hints how to perform the updates (see below for more info). ## Terminology - **Prisma Clusters** are renamed to **Prisma Servers** - **Development Clusters** are renamed to **Prisma Sandbox** ## Service configuration in `prisma.yml` ### New YAML structure The service configuration inside `prisma.yml` is based on a new YAML structure (find the updated docs [here](https://www.prisma.io/docs/reference/service-configuration/prisma.yml/yaml-structure-ufeshusai8)): - The `service`, `stage` and `cluster` properties have been removed. - A new property called `endpoint` has been added. The new `endpoint` effectively encodes the information of the three removed properties. - A new property called `post-deploy` has been added (see [Post deployment hooks](#post-deployment-hooks) for more info). - The `disableAuth` property has been removed. If you don't want your Prisma API to require authentication, simply omit the `secret` property. - The `schema` property has been removed. Note that the Prisma CLI will not by default download the GraphQL schema (commonly called `prisma.graphql`) for your Prisma API any more! If you want to get access to the GraphQL schema of your Prisma API, you need to configure a [post deploment hook](#post-deployment-hooks) accordingly. #### Example: Local deployment Consider this **outdated** version of `prisma.yml`: ```yml service: myservice stage: dev cluster: local datamodel: datamodel.graphql ``` After migrated to **Prisma 1.7**, the file will have the following structure: ```yml endpoint: http://localhost:4466/myservice/dev datamodel: datamodel.graphql ``` ### Example: Deploying to a Prisma Sandbox in the Cloud Consider this **outdated** version of `prisma.yml`: ```yml service: myservice stage: dev cluster: public-crocusraccoon-3/prisma-eu1 datamodel: datamodel.graphql ``` After migrated to **Prisma 1.7**, the file will have the following structure: ```yml endpoint: https://eu1.prisma.sh/public-crocusraccoon-3/myservice/dev datamodel: datamodel.graphql ``` ### Introducing `default` service name and `default` stage For convenience, two special values for the _service name_ and _stage_ parts of the Prisma `endpoint` have been introduced. Both values are called `default`. If not explicitly provided, the CLI will automatically infer them. Concretely, this means that whenever the _service name_ and _stage_ are called `default`, you can omit them in the `endpoint` property of `prisma.yml`. For example: - `http://localhost:4466/default/default` can be written as `http://localhost:4466/` - `https://eu1.prisma.sh/public-helixgoose-752/default/default` can be written as `https://eu1.prisma.sh/public-helixgoose-752/` This is also relevant for the `/import` and `/export` endpoints of your API. For example: - `http://localhost:4466/default/default/import` can be written as `http://localhost:4466/import` - `https://eu1.prisma.sh/public-helixgoose-752/default/default/export` can be written as `https://eu1.prisma.sh/public-helixgoose-752/export` ### Post deployment hooks In Prisma 1.7, you can specify arbitrary terminal commands to be executed by the Prisma CLI after a deployment (i.e. after `prisma deploy` has terminated). Here is an example that performs three tasks after a deployment: 1. Print "Deployment finished" 1. Download the GraphQL schema for the `db` project specified in `.graphqlconfig.yml` 1. Invoke code generation as specified in `.graphqlconfig.yml` ```yml # in database/prisma.yml hooks: post-deploy: - echo "Deployment finished" - graphql get-schema --project db - graphql prepare ``` ## Prisma CLI ### Deprecating `local` commands The `prisma local` commands are being deprecated in favor of using Docker commands directly. `prisma local` provided a convenient abstraction for certain Docker workflows. In 1.7, everything related to these Docker worfklows can be done manually using the [Docker CLI](https://docs.docker.com/engine/reference/commandline/cli/). When running `prisma init` in Prisma 1.7, the CLI generates a `docker-compose.yml` file that specifies the images for two Docker containers: - `prisma`: This is the image for the Prisma API that turns your database into a GraphQL API. - `db`: This is the image for the connected database, e.g. `mysql`. Here's what the raw version of this generated `docker-compose.yml` file: ```yml version: '3' services: prisma: image: prismagraphql/prisma:1.7 restart: always ports: - "4466:4466" environment: PRISMA_CONFIG: | managementApiSecret: my-server-secret-123 port: 4466 databases: default: connector: mysql # or `postgres` active: true host: db port: 3306 # or `5432` for `postgres` user: root password: prisma db: container_name: prisma-db image: mysql:5.7 restart: always environment: MYSQL_USER: root MYSQL_ROOT_PASSWORD: prisma ``` > **Note**: You can learn more about the different properties of the `docker-compose.yml` file in the [reference](!alias-aira9zama5) or directly in the [Docker documentation](https://docs.docker.com/compose/compose-file/). ### Authenticating against Prisma servers running on Docker When using the Prisma CLI to deploy and manage your Prisma APIs against a Docker-based [Prisma server](!alias-eu2ood0she), the CLI needs to authenticate its interactions (otherwise anyone with access to the endpoint of the server would be able to arbitrarily modify your Prisma APIs). In previous Prisma versions, the CLI used an _asymmetric_ authentication approach based on a public/private-keypair. The public key was deployed along with the Prisma cluster and the private key was stored in the _cluster registry_ as the `clusterSecret`. This `clusterSecret` was used by the CLI to authenticate its requests. With Prisma 1.7, a _symmetric_ authentication approach is introduced. This means the key stored on the deployed Prisma server is identical to the key used by the CLI. #### Providing the key to the Prisma server Prisma servers running on Docker receive their keys via the `managementApiSecret` key in `docker-compose.yml`. When deploying the Prisma server using `docker-compose up -d`, the key will be stored on the server. Every request made by the CLI (e.g. `prisma deploy`) now needs to be authenticated with that key. Here is an example where the `managementApiSecret` key is set to `my-server-secret-123`: ```yml version: '3' services: prisma: image: prismagraphql/prisma:1.7 restart: always ports: - "4466:4466" environment: PRISMA_CONFIG: | managementApiSecret: my-server-secret-123 port: 4466 databases: default: connector: mysql # or `postgres` active: true host: db port: 3306 # or `5432` for `postgres` user: root password: prisma db: container_name: prisma-db image: mysql:5.7 restart: always environment: MYSQL_USER: root MYSQL_ROOT_PASSWORD: prisma ``` #### Authenticating requests made by the Prisma CLI Whenever the CLI makes requests against the server (e.g. `prisma deploy`), it needs to authenticate using the same key that was stored on the Prisma server. But where does it get the key from? You need to explicitly set the key using the `PRISMA_MANAGEMENT_API_SECRET` environment variable. The easiest way to do so is by using a [`.env`](https://www.npmjs.com/package/dotenv)-file which is automatically "understood" by the Prisma CLI. Here is an example for a `.env`-file which defines the `my-server-secret-123` key as the `PRISMA_MANAGEMENT_API_SECRET` environment variable. This will allow the Prisma CLI to authenticate against the Prisma server it is talking to: ``` PRISMA_MANAGEMENT_API_SECRET="my-server-secret-123" ``` ### Download the Prisma GraphQL schema and invoking codegen In Prisma 1.7, the Prisma CLI is not automatically taking care of downloading the GraphQL schema (commonly called `prisma.graphql`) for the deployed Prisma API after a deployment any more. If you need the file in your project, you need to download it via a [post deployment hook](#post-deployment-hook). Similarly, if you're using code generation that is configured via [GraphQL Config](https://github.com/graphcool/graphql-config), you also need to explicitly invoke `graphql prepare` for the code being generated. Here is the example of a `.graphqlconfig.yml` file that specifies that: - the Prisma GraphQL schema should be stored in a file called `generated/prisma.graphql` - the corresponding TypeScript type definitions should be written to a file called `generated/prisma.ts` ```yml projects: prisma: schemaPath: generated/prisma.graphql extensions: prisma: prisma.yml prepare-binding: output: generated/prisma.ts generator: prisma-ts ``` The corresponding post deployment hook in `prisma.yml` for downloading the schema and invoking the code generation needs to be configured as follows: ```yml hooks: post-deploy: - graphql get-schema --project prisma - graphql prepare ``` ### Bootstrap a GraphQL server Note that in Prisma 1.7, the `--boilerplate` option has been removed from the `prisma init` command. This means you can not bootstrap an entire GraphQL server based on a [GraphQL boilerplate](https://github.com/graphql-boilerplates) project any more. To bootstrap a GraphQL server based on a GraphQL boilerplate project, use the `graphql create` command from the [GraphQL CLI](https://github.com/graphql-cli/graphql-cli): ```bash # Install the GraphQL CLI npm install -g graphql-cli # Choose a boilerplate from the interactive prompt ... graphql create myapp # ...or directly select a boilerplate project via the `--boilerplate` option (e.g. `typescript-advanced`) graphql create myapp --boilerplate typescript-advanced ``` ## Common Errors ### Invalid prisma.yml file prisma.yml should NOT have additional properties. You can read more about this error and how to fix it [in the Forum](https://www.graph.cool/forum/t/how-to-fix-invalid-prisma-yml-file-prisma-yml-should-not-have-additional-properties-additionalproperty-endpoint-errors/3303).
{ "pile_set_name": "Github" }
// Copyright 2012 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // +build windows package main import ( "fmt" "time" "golang.org/x/sys/windows/svc" "golang.org/x/sys/windows/svc/mgr" ) func startService(name string) error { m, err := mgr.Connect() if err != nil { return err } defer m.Disconnect() s, err := m.OpenService(name) if err != nil { return fmt.Errorf("could not access service: %v", err) } defer s.Close() err = s.Start("is", "manual-started") if err != nil { return fmt.Errorf("could not start service: %v", err) } return nil } func controlService(name string, c svc.Cmd, to svc.State) error { m, err := mgr.Connect() if err != nil { return err } defer m.Disconnect() s, err := m.OpenService(name) if err != nil { return fmt.Errorf("could not access service: %v", err) } defer s.Close() status, err := s.Control(c) if err != nil { return fmt.Errorf("could not send control=%d: %v", c, err) } timeout := time.Now().Add(10 * time.Second) for status.State != to { if timeout.Before(time.Now()) { return fmt.Errorf("timeout waiting for service to go to state=%d", to) } time.Sleep(300 * time.Millisecond) status, err = s.Query() if err != nil { return fmt.Errorf("could not retrieve service status: %v", err) } } return nil }
{ "pile_set_name": "Github" }
<Ticket_ProcessEDocReply xmlns="http://xml.amadeus.com/TATRES_15_2_1A"> <msgActionDetails> <messageFunctionDetails> <messageFunction>131</messageFunction> </messageFunctionDetails> <responseType>7</responseType> </msgActionDetails> <error> <errorDetails> <errorCode>118</errorCode> </errorDetails> </error> <textInfo> <freeTextQualification> <textSubjectQualifier>4</textSubjectQualifier> <informationType>23</informationType> </freeTextQualification> <freeText>SYSTEM UNABLE TO PROCESS</freeText> </textInfo> </Ticket_ProcessEDocReply>
{ "pile_set_name": "Github" }
find $SRC_DIR -type f if [[ ! -f "$SRC_DIR/mypkg/awesomeheader.h" ]]; then exit 1 fi # when a file shadows the parent directory name if [[ ! -f "$SRC_DIR/mypkg/mypkg" ]]; then exit 1 fi echo "found source files OK" exit 0
{ "pile_set_name": "Github" }

0:01 A key design feature for working with classes and object-oriented programming  0:04 is modeling and layers, going from the most general to the most specific.  0:09 So, we started with a creature class,  0:12 and a creature class has a name and a level and it's just a generic creature, 0:16 it could be anything, so it could be a squirrel as we saw,  0:20 it could be a dragon, it could be a toad.  0:23 Any kind of creature we can think of, we could model with the original creature class,  0:26 and that's great because it's very applicable but there are differences  0:30 between a dragon and a toad, for example,  0:33 maybe the dragon breathes fire, not too many toads breed fire,  0:36 and so we can use inheritance to add additional specializations to our more specific types, 0:43 so we can have a specific dragon class, which can stand in for a creature,  0:47 it is a creature but it also has more behaviors and more variables.  0:51 Here we have our initializer, the __init__  0:54 and you see we take the required parameters  0:57 and data to pass along to the creature class,  1:00 in order to create a creature, in order for the dragon to be a creature,  1:03 it has to supply a name and a level,  1:05 so we can get to the creature's initializer saying super().__init__  1:09 and pass name and level and that allows the creature to do  1:12 whatever sort of setup it does when it gets created,  1:14 but we also want to have a scale thickness for our dragon,  1:17 so we create another field specific only to dragons,  1:20 and we say self.scale_thickness = whatever they passed in.  1:23 So in addition to having name and level we get from Creature,  1:26 we also have a scale thickness,  1:28 so that adds more data we can also add additional behaviors,  1:30 here we have added a breed_fire method.  1:33 So the way we create a derived type in Python,  1:36 is we just say class, because it is a class, the name of the class, Dragon,  1:40 and in parenthesis the name of the base type.  1:44 And then, other than that, and using "super",  1:46 this is basically the same as creating any other class.
{ "pile_set_name": "Github" }
/******************************************************************************* * Copyright 2012-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * Licensed under the Apache License, Version 2.0 (the "License"). You may not use * this file except in compliance with the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. * This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * ***************************************************************************** * * AWS Tools for Windows (TM) PowerShell (TM) * */ using System; using System.Collections.Generic; using System.Linq; using System.Management.Automation; using System.Text; using Amazon.PowerShell.Common; using Amazon.Runtime; using Amazon.GroundStation; using Amazon.GroundStation.Model; namespace Amazon.PowerShell.Cmdlets.GS { /// <summary> /// Returns a mission profile. /// </summary> [Cmdlet("Get", "GSMissionProfile")] [OutputType("Amazon.GroundStation.Model.GetMissionProfileResponse")] [AWSCmdlet("Calls the AWS Ground Station GetMissionProfile API operation.", Operation = new[] {"GetMissionProfile"}, SelectReturnType = typeof(Amazon.GroundStation.Model.GetMissionProfileResponse))] [AWSCmdletOutput("Amazon.GroundStation.Model.GetMissionProfileResponse", "This cmdlet returns an Amazon.GroundStation.Model.GetMissionProfileResponse object containing multiple properties. The object can also be referenced from properties attached to the cmdlet entry in the $AWSHistory stack." )] public partial class GetGSMissionProfileCmdlet : AmazonGroundStationClientCmdlet, IExecutor { #region Parameter MissionProfileId /// <summary> /// <para> /// <para>UUID of a mission profile.</para> /// </para> /// </summary> #if !MODULAR [System.Management.Automation.Parameter(Position = 0, ValueFromPipelineByPropertyName = true, ValueFromPipeline = true)] #else [System.Management.Automation.Parameter(Position = 0, ValueFromPipelineByPropertyName = true, ValueFromPipeline = true, Mandatory = true)] [System.Management.Automation.AllowEmptyString] [System.Management.Automation.AllowNull] #endif [Amazon.PowerShell.Common.AWSRequiredParameter] public System.String MissionProfileId { get; set; } #endregion #region Parameter Select /// <summary> /// Use the -Select parameter to control the cmdlet output. The default value is '*'. /// Specifying -Select '*' will result in the cmdlet returning the whole service response (Amazon.GroundStation.Model.GetMissionProfileResponse). /// Specifying the name of a property of type Amazon.GroundStation.Model.GetMissionProfileResponse will result in that property being returned. /// Specifying -Select '^ParameterName' will result in the cmdlet returning the selected cmdlet parameter value. /// </summary> [System.Management.Automation.Parameter(ValueFromPipelineByPropertyName = true)] public string Select { get; set; } = "*"; #endregion #region Parameter PassThru /// <summary> /// Changes the cmdlet behavior to return the value passed to the MissionProfileId parameter. /// The -PassThru parameter is deprecated, use -Select '^MissionProfileId' instead. This parameter will be removed in a future version. /// </summary> [System.Obsolete("The -PassThru parameter is deprecated, use -Select '^MissionProfileId' instead. This parameter will be removed in a future version.")] [System.Management.Automation.Parameter(ValueFromPipelineByPropertyName = true)] public SwitchParameter PassThru { get; set; } #endregion protected override void ProcessRecord() { base.ProcessRecord(); var context = new CmdletContext(); // allow for manipulation of parameters prior to loading into context PreExecutionContextLoad(context); #pragma warning disable CS0618, CS0612 //A class member was marked with the Obsolete attribute if (ParameterWasBound(nameof(this.Select))) { context.Select = CreateSelectDelegate<Amazon.GroundStation.Model.GetMissionProfileResponse, GetGSMissionProfileCmdlet>(Select) ?? throw new System.ArgumentException("Invalid value for -Select parameter.", nameof(this.Select)); if (this.PassThru.IsPresent) { throw new System.ArgumentException("-PassThru cannot be used when -Select is specified.", nameof(this.Select)); } } else if (this.PassThru.IsPresent) { context.Select = (response, cmdlet) => this.MissionProfileId; } #pragma warning restore CS0618, CS0612 //A class member was marked with the Obsolete attribute context.MissionProfileId = this.MissionProfileId; #if MODULAR if (this.MissionProfileId == null && ParameterWasBound(nameof(this.MissionProfileId))) { WriteWarning("You are passing $null as a value for parameter MissionProfileId which is marked as required. In case you believe this parameter was incorrectly marked as required, report this by opening an issue at https://github.com/aws/aws-tools-for-powershell/issues."); } #endif // allow further manipulation of loaded context prior to processing PostExecutionContextLoad(context); var output = Execute(context) as CmdletOutput; ProcessOutput(output); } #region IExecutor Members public object Execute(ExecutorContext context) { var cmdletContext = context as CmdletContext; // create request var request = new Amazon.GroundStation.Model.GetMissionProfileRequest(); if (cmdletContext.MissionProfileId != null) { request.MissionProfileId = cmdletContext.MissionProfileId; } CmdletOutput output; // issue call var client = Client ?? CreateClient(_CurrentCredentials, _RegionEndpoint); try { var response = CallAWSServiceOperation(client, request); object pipelineOutput = null; pipelineOutput = cmdletContext.Select(response, this); output = new CmdletOutput { PipelineOutput = pipelineOutput, ServiceResponse = response }; } catch (Exception e) { output = new CmdletOutput { ErrorResponse = e }; } return output; } public ExecutorContext CreateContext() { return new CmdletContext(); } #endregion #region AWS Service Operation Call private Amazon.GroundStation.Model.GetMissionProfileResponse CallAWSServiceOperation(IAmazonGroundStation client, Amazon.GroundStation.Model.GetMissionProfileRequest request) { Utils.Common.WriteVerboseEndpointMessage(this, client.Config, "AWS Ground Station", "GetMissionProfile"); try { #if DESKTOP return client.GetMissionProfile(request); #elif CORECLR return client.GetMissionProfileAsync(request).GetAwaiter().GetResult(); #else #error "Unknown build edition" #endif } catch (AmazonServiceException exc) { var webException = exc.InnerException as System.Net.WebException; if (webException != null) { throw new Exception(Utils.Common.FormatNameResolutionFailureMessage(client.Config, webException.Message), webException); } throw; } } #endregion internal partial class CmdletContext : ExecutorContext { public System.String MissionProfileId { get; set; } public System.Func<Amazon.GroundStation.Model.GetMissionProfileResponse, GetGSMissionProfileCmdlet, object> Select { get; set; } = (response, cmdlet) => response; } } }
{ "pile_set_name": "Github" }
--- -api-id: E:Windows.UI.Xaml.Controls.SwipeItem.Invoked -api-type: winrt event --- <!-- Event syntax. public event TypedEventHandler Invoked<SwipeItem, SwipeItemInvokedEventArgs> --> # Windows.UI.Xaml.Controls.SwipeItem.Invoked ## -description Occurs when user interaction indicates that the command represented by this item should execute. ## -xaml-syntax ```xaml <SwipeItem Invoked="eventhandler"/> ``` ## -remarks ## -see-also ## -examples
{ "pile_set_name": "Github" }
<?xml version="1.0"?> <entity_profile> <profile id="Logistician"> <name>Logistician</name> </profile> <profile id="Translator"> <name>Translator</name> </profile> <profile id="Salesman"> <name>Salesman</name> </profile> </entity_profile>
{ "pile_set_name": "Github" }
/// Copyright (c) 2012 Ecma International. All rights reserved. /// Ecma International makes this code available under the terms and conditions set /// forth on http://hg.ecmascript.org/tests/test262/raw-file/tip/LICENSE (the /// "Use Terms"). Any redistribution of this code must retain the above /// copyright and this notice and otherwise comply with the Use Terms. /** * @path ch15/15.2/15.2.3/15.2.3.6/15.2.3.6-4-267.js * @description Object.defineProperty - 'O' is an Array, 'name' is an array index named property, name is accessor property and 'desc' is accessor descriptor, test updating the [[Get]] attribute value of 'name' from undefined to function object (15.4.5.1 step 4.c) */ function testcase() { var arrObj = []; function getFunc() { return 12; } Object.defineProperty(arrObj, "0", { get: undefined, configurable: true }); Object.defineProperty(arrObj, "0", { get: getFunc }); return accessorPropertyAttributesAreCorrect(arrObj, "0", getFunc, undefined, undefined, false, true); } runTestCase(testcase);
{ "pile_set_name": "Github" }
xof 0303txt 0032 Material Material_1 { 1.000000;1.000000;1.000000;1.000000;; 3.200000; 0.000000;0.000000;0.000000;; 0.000000;0.000000;0.000000;; TextureFilename { "C:\WORK\FPS Creator\MAPS2\texturebank\ww2\walls\concrete\W_b_ALL_01_D2.tga"; } } Frame wall_ALL_a_E { FrameTransformMatrix { 1.000000,0.000000,0.000000,0.000000,0.000000,1.000000,0.000000,0.000000,0.000000,0.000000,1.000000,0.000000,0.000000,0.000000,0.000000,1.000000;; } Mesh { 216; -50.000004;50.000004;0.000000;, 50.000004;50.000004;-6.600000;, -50.000004;50.000004;-6.600000;, -50.000004;50.000004;0.000000;, 50.000004;50.000004;0.000000;, 50.000004;50.000004;-6.600000;, -50.000004;50.000004;0.000000;, -50.000004;50.000004;-6.600000;, -50.000004;48.500004;-8.000001;, -50.000004;50.000004;-6.600000;, 50.000004;48.500004;-8.000001;, -50.000004;48.500004;-8.000001;, -50.000004;50.000004;-6.600000;, 50.000004;50.000004;-6.600000;, 50.000004;48.500004;-8.000001;, 50.000004;50.000004;-6.600000;, 50.000004;50.000004;0.000000;, 50.000004;48.500004;-8.000001;, -50.000004;50.000004;0.000000;, -50.000004;48.500004;-8.000001;, -50.000004;36.900002;-8.000001;, -50.000004;48.500004;-8.000001;, 50.000004;36.900002;-8.000001;, -50.000004;36.900002;-8.000001;, -50.000004;48.500004;-8.000001;, 50.000004;48.500004;-8.000001;, 50.000004;36.900002;-8.000001;, 50.000004;48.500004;-8.000001;, 50.000004;50.000004;0.000000;, 50.000004;36.900002;-8.000001;, -50.000004;50.000004;0.000000;, -50.000004;-38.500004;-5.000000;, -50.000004;-50.000004;0.000000;, -50.000004;50.000004;0.000000;, -50.000004;32.100002;-5.000000;, -50.000004;-38.500004;-5.000000;, 50.000004;32.100002;-5.000000;, 50.000004;-50.000004;0.000000;, 50.000004;-38.500004;-5.000000;, 50.000004;32.100002;-5.000000;, 50.000004;50.000004;0.000000;, 50.000004;-50.000004;0.000000;, -50.000004;-50.000004;0.000000;, -50.000004;-38.500004;-5.000000;, -50.000004;-42.300003;-8.000001;, 50.000004;-38.500004;-5.000000;, 50.000004;-50.000004;0.000000;, 50.000004;-42.300003;-8.000001;, -50.000004;-50.000004;0.000000;, -50.000004;-42.300003;-8.000001;, -50.000004;-50.000004;-8.000001;, 50.000004;-42.300003;-8.000001;, 50.000004;-50.000004;0.000000;, 50.000004;-50.000004;-8.000001;, -50.000004;-50.000004;0.000000;, -50.000004;-50.000004;-8.000001;, -37.700001;-50.000004;-8.000001;, -37.700001;-42.300003;-8.000001;, -50.000004;-42.300003;-8.000001;, -50.000004;-38.500004;-5.000000;, -37.700001;-50.000004;-8.000001;, -50.000004;-42.300003;-8.000001;, -37.700001;-42.300003;-8.000001;, -37.700001;-50.000004;-8.000001;, -50.000004;-50.000004;-8.000001;, -50.000004;-42.300003;-8.000001;, -50.000004;-50.000004;0.000000;, -12.800001;-50.000004;-8.000001;, 0.000000;-50.000004;0.000000;, -50.000004;-50.000004;0.000000;, -37.700001;-50.000004;-8.000001;, -12.800001;-50.000004;-8.000001;, -37.700001;-42.300003;-8.000001;, 0.000000;-38.500004;-5.000000;, -12.800001;-42.300003;-8.000001;, -37.700001;-42.300003;-8.000001;, -50.000004;-38.500004;-5.000000;, 0.000000;-38.500004;-5.000000;, -37.700001;-47.200001;-12.036000;, -12.800001;-42.300003;-8.000001;, -12.800001;-47.200001;-12.036000;, -37.700001;-47.200001;-12.036000;, -37.700001;-42.300003;-8.000001;, -12.800001;-42.300003;-8.000001;, -12.800001;-50.000004;-12.036000;, -37.700001;-47.200001;-12.036000;, -12.800001;-47.200001;-12.036000;, -12.800001;-50.000004;-12.036000;, -37.700001;-50.000004;-12.036000;, -37.700001;-47.200001;-12.036000;, 0.000000;-50.000004;0.000000;, -12.800001;-50.000004;-8.000001;, 12.800001;-50.000004;-8.000001;, -12.800001;-42.300003;-8.000001;, 0.000000;-38.500004;-5.000000;, 12.800001;-42.300003;-8.000001;, -12.800001;-50.000004;-8.000001;, 12.800001;-42.300003;-8.000001;, 12.800001;-50.000004;-8.000001;, -12.800001;-50.000004;-8.000001;, -12.800001;-42.300003;-8.000001;, 12.800001;-42.300003;-8.000001;, 50.000004;-50.000004;0.000000;, 37.700001;-50.000004;-8.000001;, 50.000004;-50.000004;-8.000001;, 0.000000;-50.000004;0.000000;, 37.700001;-50.000004;-8.000001;, 50.000004;-50.000004;0.000000;, 0.000000;-50.000004;0.000000;, 12.800001;-50.000004;-8.000001;, 37.700001;-50.000004;-8.000001;, 12.800001;-42.300003;-8.000001;, 50.000004;-38.500004;-5.000000;, 37.700001;-42.300003;-8.000001;, 12.800001;-42.300003;-8.000001;, 0.000000;-38.500004;-5.000000;, 50.000004;-38.500004;-5.000000;, 50.000004;-38.500004;-5.000000;, 50.000004;-42.300003;-8.000001;, 37.700001;-42.300003;-8.000001;, 12.800001;-47.200001;-12.036000;, 37.700001;-42.300003;-8.000001;, 37.700001;-47.200001;-12.036000;, 12.800001;-47.200001;-12.036000;, 12.800001;-42.300003;-8.000001;, 37.700001;-42.300003;-8.000001;, 50.000004;-42.300003;-8.000001;, 37.700001;-50.000004;-8.000001;, 37.700001;-42.300003;-8.000001;, 50.000004;-42.300003;-8.000001;, 50.000004;-50.000004;-8.000001;, 37.700001;-50.000004;-8.000001;, 12.800001;-50.000004;-12.036000;, 37.700001;-47.200001;-12.036000;, 37.700001;-50.000004;-12.036000;, 12.800001;-50.000004;-12.036000;, 12.800001;-47.200001;-12.036000;, 37.700001;-47.200001;-12.036000;, 0.000000;-38.500004;-5.000000;, -50.000004;-38.500004;-5.000000;, -50.000004;32.100002;-5.000000;, 50.000004;-38.500004;-5.000000;, 0.000000;-38.500004;-5.000000;, 50.000004;32.100002;-5.000000;, 50.000004;32.100002;-5.000000;, 0.000000;-38.500004;-5.000000;, -50.000004;32.100002;-5.000000;, -37.700001;-50.000004;-12.036000;, -12.800001;-50.000004;-8.000001;, -37.700001;-50.000004;-8.000001;, -37.700001;-50.000004;-12.036000;, -12.800001;-50.000004;-12.036000;, -12.800001;-50.000004;-8.000001;, 12.800001;-50.000004;-8.000001;, 37.700001;-50.000004;-12.036000;, 37.700001;-50.000004;-8.000001;, 12.800001;-50.000004;-8.000001;, 12.800001;-50.000004;-12.036000;, 37.700001;-50.000004;-12.036000;, -50.000004;33.973701;-5.621050;, -50.000004;32.100002;-5.000000;, -50.000004;50.000004;0.000000;, -50.000004;50.000004;0.000000;, -50.000004;36.900002;-8.000001;, -50.000004;33.973701;-5.621050;, 50.000004;32.100002;-5.000000;, -50.000004;33.973701;-5.621050;, 50.000004;33.973701;-5.621050;, 50.000004;32.100002;-5.000000;, -50.000004;32.100002;-5.000000;, -50.000004;33.973701;-5.621050;, -50.000004;36.900002;-8.000001;, 50.000004;33.973701;-5.621050;, -50.000004;33.973701;-5.621050;, -50.000004;36.900002;-8.000001;, 50.000004;36.900002;-8.000001;, 50.000004;33.973701;-5.621050;, 50.000004;50.000004;0.000000;, 50.000004;32.100002;-5.000000;, 50.000004;33.973701;-5.621050;, 50.000004;33.973701;-5.621050;, 50.000004;36.900002;-8.000001;, 50.000004;50.000004;0.000000;, 0.000000;-50.000004;0.000000;, 50.000004;-50.000004;0.000000;, 50.000004;50.000004;0.000000;, -50.000004;50.000004;0.000000;, -50.000004;-50.000004;0.000000;, 0.000000;-50.000004;0.000000;, -50.000004;50.000004;0.000000;, 0.000000;-50.000004;0.000000;, 50.000004;50.000004;0.000000;, -12.800001;-50.000004;-8.000001;, -12.800001;-47.200001;-12.036000;, -12.800001;-42.300003;-8.000001;, -37.700001;-42.300003;-8.000001;, -37.700001;-47.200001;-12.036000;, -37.700001;-50.000004;-8.000001;, -37.700001;-47.200001;-12.036000;, -37.700001;-50.000004;-12.036000;, -37.700001;-50.000004;-8.000001;, -12.800001;-50.000004;-8.000001;, -12.800001;-50.000004;-12.036000;, -12.800001;-47.200001;-12.036000;, 37.700001;-50.000004;-8.000001;, 37.700001;-47.200001;-12.036000;, 37.700001;-42.300003;-8.000001;, 12.800001;-42.300003;-8.000001;, 12.800001;-47.200001;-12.036000;, 12.800001;-50.000004;-8.000001;, 37.700001;-50.000004;-8.000001;, 37.700001;-50.000004;-12.036000;, 37.700001;-47.200001;-12.036000;, 12.800001;-47.200001;-12.036000;, 12.800001;-50.000004;-12.036000;, 12.800001;-50.000004;-8.000001;; 72; 3;0,1,2;, 3;3,4,5;, 3;6,7,8;, 3;9,10,11;, 3;12,13,14;, 3;15,16,17;, 3;18,19,20;, 3;21,22,23;, 3;24,25,26;, 3;27,28,29;, 3;30,31,32;, 3;33,34,35;, 3;36,37,38;, 3;39,40,41;, 3;42,43,44;, 3;45,46,47;, 3;48,49,50;, 3;51,52,53;, 3;54,55,56;, 3;57,58,59;, 3;60,61,62;, 3;63,64,65;, 3;66,67,68;, 3;69,70,71;, 3;72,73,74;, 3;75,76,77;, 3;78,79,80;, 3;81,82,83;, 3;84,85,86;, 3;87,88,89;, 3;90,91,92;, 3;93,94,95;, 3;96,97,98;, 3;99,100,101;, 3;102,103,104;, 3;105,106,107;, 3;108,109,110;, 3;111,112,113;, 3;114,115,116;, 3;117,118,119;, 3;120,121,122;, 3;123,124,125;, 3;126,127,128;, 3;129,130,131;, 3;132,133,134;, 3;135,136,137;, 3;138,139,140;, 3;141,142,143;, 3;144,145,146;, 3;147,148,149;, 3;150,151,152;, 3;153,154,155;, 3;156,157,158;, 3;159,160,161;, 3;162,163,164;, 3;165,166,167;, 3;168,169,170;, 3;171,172,173;, 3;174,175,176;, 3;177,178,179;, 3;180,181,182;, 3;183,184,185;, 3;186,187,188;, 3;189,190,191;, 3;192,193,194;, 3;195,196,197;, 3;198,199,200;, 3;201,202,203;, 3;204,205,206;, 3;207,208,209;, 3;210,211,212;, 3;213,214,215;; MeshNormals { 216; 0.000000;1.000000;0.000000;, 0.000000;1.000000;0.000000;, 0.000000;1.000000;0.000000;, 0.000000;1.000000;0.000000;, 0.000000;1.000000;0.000000;, 0.000000;1.000000;0.000000;, -1.000000;0.000000;0.000000;, -1.000000;0.000000;0.000000;, -1.000000;0.000000;0.000000;, 0.000000;0.682319;-0.731055;, 0.000000;0.682319;-0.731055;, 0.000000;0.682319;-0.731055;, 0.000000;0.682319;-0.731055;, 0.000000;0.682319;-0.731055;, 0.000000;0.682319;-0.731055;, 1.000000;0.000000;0.000000;, 1.000000;0.000000;0.000000;, 1.000000;0.000000;0.000000;, -1.000000;0.000000;0.000000;, -1.000000;0.000000;0.000000;, -1.000000;0.000000;0.000000;, 0.000000;0.000000;-1.000000;, 0.000000;0.000000;-1.000000;, 0.000000;0.000000;-1.000000;, 0.000000;0.000000;-1.000000;, 0.000000;0.000000;-1.000000;, 0.000000;0.000000;-1.000000;, 1.000000;0.000000;0.000000;, 1.000000;0.000000;0.000000;, 1.000000;0.000000;0.000000;, -1.000000;0.000000;0.000000;, -1.000000;0.000000;0.000000;, -1.000000;0.000000;0.000000;, -1.000000;0.000000;0.000000;, -1.000000;0.000000;0.000000;, -1.000000;0.000000;0.000000;, 1.000000;0.000000;0.000000;, 1.000000;0.000000;0.000000;, 1.000000;0.000000;0.000000;, 1.000000;0.000000;0.000000;, 1.000000;0.000000;0.000000;, 1.000000;0.000000;0.000000;, -1.000000;0.000000;0.000000;, -1.000000;0.000000;0.000000;, -1.000000;0.000000;0.000000;, 1.000000;0.000000;0.000000;, 1.000000;0.000000;0.000000;, 1.000000;0.000000;0.000000;, -1.000000;0.000000;0.000000;, -1.000000;0.000000;0.000000;, -1.000000;0.000000;0.000000;, 1.000000;0.000000;0.000000;, 1.000000;0.000000;0.000000;, 1.000000;0.000000;0.000000;, 0.000000;-1.000000;0.000000;, 0.000000;-1.000000;0.000000;, 0.000000;-1.000000;0.000000;, 0.000000;0.625051;-0.780584;, 0.000000;0.619644;-0.784883;, 0.000000;0.619644;-0.784883;, 0.000000;0.000000;-1.000000;, 0.000000;0.000000;-1.000000;, 0.000000;0.000000;-1.000000;, 0.000000;0.000000;-1.000000;, 0.000000;0.000000;-1.000000;, 0.000000;0.000000;-1.000000;, 0.000000;-1.000000;0.000000;, 0.000000;-1.000000;0.000000;, 0.000000;-1.000000;0.000000;, 0.000000;-1.000000;0.000000;, 0.000000;-1.000000;0.000000;, 0.000000;-1.000000;0.000000;, 0.000000;0.625051;-0.780584;, 0.000000;0.619644;-0.784883;, 0.000000;0.625051;-0.780584;, 0.000000;0.625051;-0.780584;, 0.000000;0.619644;-0.784883;, 0.000000;0.619644;-0.784883;, 0.000000;0.635774;-0.771876;, 0.000000;0.625051;-0.780584;, 0.000000;0.635774;-0.771876;, 0.000000;0.635774;-0.771876;, 0.000000;0.625051;-0.780584;, 0.000000;0.625051;-0.780584;, 0.000000;0.000000;-1.000000;, 0.000000;0.000000;-1.000000;, 0.000000;0.000000;-1.000000;, 0.000000;0.000000;-1.000000;, 0.000000;0.000000;-1.000000;, 0.000000;0.000000;-1.000000;, 0.000000;-1.000000;0.000000;, 0.000000;-1.000000;0.000000;, 0.000000;-1.000000;0.000000;, 0.000000;0.625051;-0.780584;, 0.000000;0.619644;-0.784883;, 0.000000;0.625051;-0.780584;, 0.000000;0.000000;-1.000000;, 0.000000;0.000000;-1.000000;, 0.000000;0.000000;-1.000000;, 0.000000;0.000000;-1.000000;, 0.000000;0.000000;-1.000000;, 0.000000;0.000000;-1.000000;, 0.000000;-1.000000;0.000000;, 0.000000;-1.000000;0.000000;, 0.000000;-1.000000;0.000000;, 0.000000;-1.000000;0.000000;, 0.000000;-1.000000;0.000000;, 0.000000;-1.000000;0.000000;, 0.000000;-1.000000;0.000000;, 0.000000;-1.000000;0.000000;, 0.000000;-1.000000;0.000000;, 0.000000;0.625051;-0.780584;, 0.000000;0.619644;-0.784883;, 0.000000;0.625051;-0.780584;, 0.000000;0.625051;-0.780584;, 0.000000;0.619644;-0.784883;, 0.000000;0.619644;-0.784883;, 0.000000;0.619644;-0.784883;, 0.000000;0.619644;-0.784883;, 0.000000;0.625051;-0.780584;, 0.000000;0.635774;-0.771876;, 0.000000;0.625051;-0.780584;, 0.000000;0.635774;-0.771876;, 0.000000;0.635774;-0.771876;, 0.000000;0.625051;-0.780584;, 0.000000;0.625051;-0.780584;, 0.000000;0.000000;-1.000000;, 0.000000;0.000000;-1.000000;, 0.000000;0.000000;-1.000000;, 0.000000;0.000000;-1.000000;, 0.000000;0.000000;-1.000000;, 0.000000;0.000000;-1.000000;, 0.000000;0.000000;-1.000000;, 0.000000;0.000000;-1.000000;, 0.000000;0.000000;-1.000000;, 0.000000;0.000000;-1.000000;, 0.000000;0.000000;-1.000000;, 0.000000;0.000000;-1.000000;, 0.000000;0.000000;-1.000000;, 0.000000;0.000000;-1.000000;, 0.000000;-0.159348;-0.987222;, 0.000000;0.000000;-1.000000;, 0.000000;0.000000;-1.000000;, 0.000000;-0.159348;-0.987222;, 0.000000;-0.159348;-0.987222;, 0.000000;0.000000;-1.000000;, 0.000000;-0.159348;-0.987222;, 0.000000;-1.000000;0.000000;, 0.000000;-1.000000;0.000000;, 0.000000;-1.000000;0.000000;, 0.000000;-1.000000;0.000000;, 0.000000;-1.000000;0.000000;, 0.000000;-1.000000;0.000000;, 0.000000;-1.000000;0.000000;, 0.000000;-1.000000;0.000000;, 0.000000;-1.000000;0.000000;, 0.000000;-1.000000;0.000000;, 0.000000;-1.000000;0.000000;, 0.000000;-1.000000;0.000000;, -1.000000;0.000000;0.000000;, -1.000000;0.000000;0.000000;, -1.000000;0.000000;0.000000;, -1.000000;0.000000;0.000000;, -1.000000;0.000000;0.000000;, -1.000000;0.000000;0.000000;, 0.000000;-0.159348;-0.987222;, 0.000000;-0.314624;-0.949216;, 0.000000;-0.314624;-0.949216;, 0.000000;-0.159348;-0.987222;, 0.000000;-0.159348;-0.987222;, 0.000000;-0.314624;-0.949216;, 0.000000;-0.630805;-0.775941;, 0.000000;-0.630805;-0.775941;, 0.000000;-0.630805;-0.775941;, 0.000000;-0.630805;-0.775941;, 0.000000;-0.630805;-0.775941;, 0.000000;-0.630805;-0.775941;, 1.000000;0.000000;0.000000;, 1.000000;0.000000;0.000000;, 1.000000;0.000000;0.000000;, 1.000000;0.000000;0.000000;, 1.000000;0.000000;0.000000;, 1.000000;0.000000;0.000000;, -0.000000;0.000000;1.000000;, 0.000000;0.000000;1.000000;, 0.000000;0.000000;1.000000;, -0.000000;0.000000;1.000000;, -0.000000;0.000000;1.000000;, -0.000000;0.000000;1.000000;, -0.000000;0.000000;1.000000;, -0.000000;0.000000;1.000000;, 0.000000;0.000000;1.000000;, 1.000000;0.000000;0.000000;, 1.000000;0.000000;0.000000;, 1.000000;0.000000;0.000000;, -1.000000;0.000000;0.000000;, -1.000000;0.000000;0.000000;, -1.000000;0.000000;0.000000;, -1.000000;0.000000;0.000000;, -1.000000;0.000000;0.000000;, -1.000000;0.000000;0.000000;, 1.000000;0.000000;0.000000;, 1.000000;0.000000;0.000000;, 1.000000;0.000000;0.000000;, 1.000000;0.000000;0.000000;, 1.000000;0.000000;0.000000;, 1.000000;0.000000;0.000000;, -1.000000;0.000000;0.000000;, -1.000000;0.000000;0.000000;, -1.000000;0.000000;0.000000;, 1.000000;0.000000;0.000000;, 1.000000;0.000000;0.000000;, 1.000000;0.000000;0.000000;, -1.000000;0.000000;0.000000;, -1.000000;0.000000;0.000000;, -1.000000;0.000000;0.000000;; 72; 3;0,1,2;, 3;3,4,5;, 3;6,7,8;, 3;9,10,11;, 3;12,13,14;, 3;15,16,17;, 3;18,19,20;, 3;21,22,23;, 3;24,25,26;, 3;27,28,29;, 3;30,31,32;, 3;33,34,35;, 3;36,37,38;, 3;39,40,41;, 3;42,43,44;, 3;45,46,47;, 3;48,49,50;, 3;51,52,53;, 3;54,55,56;, 3;57,58,59;, 3;60,61,62;, 3;63,64,65;, 3;66,67,68;, 3;69,70,71;, 3;72,73,74;, 3;75,76,77;, 3;78,79,80;, 3;81,82,83;, 3;84,85,86;, 3;87,88,89;, 3;90,91,92;, 3;93,94,95;, 3;96,97,98;, 3;99,100,101;, 3;102,103,104;, 3;105,106,107;, 3;108,109,110;, 3;111,112,113;, 3;114,115,116;, 3;117,118,119;, 3;120,121,122;, 3;123,124,125;, 3;126,127,128;, 3;129,130,131;, 3;132,133,134;, 3;135,136,137;, 3;138,139,140;, 3;141,142,143;, 3;144,145,146;, 3;147,148,149;, 3;150,151,152;, 3;153,154,155;, 3;156,157,158;, 3;159,160,161;, 3;162,163,164;, 3;165,166,167;, 3;168,169,170;, 3;171,172,173;, 3;174,175,176;, 3;177,178,179;, 3;180,181,182;, 3;183,184,185;, 3;186,187,188;, 3;189,190,191;, 3;192,193,194;, 3;195,196,197;, 3;198,199,200;, 3;201,202,203;, 3;204,205,206;, 3;207,208,209;, 3;210,211,212;, 3;213,214,215;; } MeshMaterialList { 1; 72; 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0; { Material_1 } } MeshTextureCoords { 216; 0.000000;0.000000;, 1.000000;0.060319;, 0.000000;0.060319;, 0.000000;0.000000;, 1.000000;0.000000;, 1.000000;0.060319;, 1.000000;0.000000;, 0.942250;0.000000;, 0.930000;0.015000;, 0.000000;0.000000;, 1.000000;0.015000;, 0.000000;0.015000;, 0.000000;0.000000;, 1.000000;0.000000;, 1.000000;0.015000;, 0.942250;0.000000;, 1.000000;0.000000;, 0.930000;0.015000;, 1.000000;0.000000;, 0.930000;0.015000;, 0.930000;0.131000;, 0.000000;0.015000;, 1.000000;0.131000;, 0.000000;0.131000;, 0.000000;0.015000;, 1.000000;0.015000;, 1.000000;0.131000;, 0.930000;0.015000;, 1.000000;0.000000;, 0.930000;0.131000;, 1.000000;0.000000;, 0.956250;0.885000;, 1.000000;1.000000;, 1.000000;0.000000;, 0.956250;0.179000;, 0.956250;0.885000;, 0.956250;0.179000;, 1.000000;1.000000;, 0.956250;0.885000;, 0.956250;0.179000;, 1.000000;0.000000;, 1.000000;1.000000;, 1.000000;1.000000;, 0.956250;0.885000;, 0.930000;0.923000;, 0.956250;0.885000;, 1.000000;1.000000;, 0.930000;0.923000;, 1.000000;1.000000;, 0.930000;0.923000;, 0.930000;1.000000;, 0.930000;0.923000;, 1.000000;1.000000;, 0.930000;1.000000;, 0.000000;0.000000;, 0.000000;0.073114;, 0.123000;0.073114;, 0.123000;0.923000;, 0.000000;0.923000;, 0.000000;0.885000;, 0.123000;1.000000;, 0.000000;0.923000;, 0.123000;0.923000;, 0.123000;1.000000;, 0.000000;1.000000;, 0.000000;0.923000;, 0.000000;0.000000;, 0.372000;0.073114;, 0.500000;0.000000;, 0.000000;0.000000;, 0.123000;0.073114;, 0.372000;0.073114;, 0.123000;0.923000;, 0.500000;0.885000;, 0.372000;0.923000;, 0.123000;0.923000;, 0.000000;0.885000;, 0.500000;0.885000;, 0.123000;0.972000;, 0.372000;0.923000;, 0.372000;0.972000;, 0.123000;0.972000;, 0.123000;0.923000;, 0.372000;0.923000;, 0.372000;1.000000;, 0.123000;0.972000;, 0.372000;0.972000;, 0.372000;1.000000;, 0.123000;1.000000;, 0.123000;0.972000;, 0.500000;0.000000;, 0.372000;0.073114;, 0.628000;0.073114;, 0.372000;0.923000;, 0.500000;0.885000;, 0.628000;0.923000;, 0.372000;1.000000;, 0.628000;0.923000;, 0.628000;1.000000;, 0.372000;1.000000;, 0.372000;0.923000;, 0.628000;0.923000;, 1.000000;0.000000;, 0.877000;0.073114;, 1.000000;0.073114;, 0.500000;0.000000;, 0.877000;0.073114;, 1.000000;0.000000;, 0.500000;0.000000;, 0.628000;0.073114;, 0.877000;0.073114;, 0.628000;0.923000;, 1.000000;0.885000;, 0.877000;0.923000;, 0.628000;0.923000;, 0.500000;0.885000;, 1.000000;0.885000;, 1.000000;0.885000;, 1.000000;0.923000;, 0.877000;0.923000;, 0.628000;0.972000;, 0.877000;0.923000;, 0.877000;0.972000;, 0.628000;0.972000;, 0.628000;0.923000;, 0.877000;0.923000;, 1.000000;0.923000;, 0.877000;1.000000;, 0.877000;0.923000;, 1.000000;0.923000;, 1.000000;1.000000;, 0.877000;1.000000;, 0.628000;1.000000;, 0.877000;0.972000;, 0.877000;1.000000;, 0.628000;1.000000;, 0.628000;0.972000;, 0.877000;0.972000;, 0.500000;0.885000;, 0.000000;0.885000;, 0.000000;0.179000;, 1.000000;0.885000;, 0.500000;0.885000;, 1.000000;0.179000;, 1.000000;0.179000;, 0.500000;0.885000;, 0.000000;0.179000;, 0.123000;0.110000;, 0.372000;0.073114;, 0.123000;0.073114;, 0.123000;0.110000;, 0.372000;0.110000;, 0.372000;0.073114;, 0.628000;0.073114;, 0.877000;0.110000;, 0.877000;0.073114;, 0.628000;0.073114;, 0.628000;0.110000;, 0.877000;0.110000;, 0.950816;0.160263;, 0.956250;0.179000;, 1.000000;0.000000;, 1.000000;0.000000;, 0.930000;0.131000;, 0.950816;0.160263;, 1.000000;0.179000;, 0.000000;0.160263;, 1.000000;0.160263;, 1.000000;0.179000;, 0.000000;0.179000;, 0.000000;0.160263;, 0.000000;0.131000;, 1.000000;0.160263;, 0.000000;0.160263;, 0.000000;0.131000;, 1.000000;0.131000;, 1.000000;0.160263;, 1.000000;0.000000;, 0.956250;0.179000;, 0.950816;0.160263;, 0.950816;0.160263;, 0.930000;0.131000;, 1.000000;0.000000;, 0.500000;1.000000;, 1.000000;1.000000;, 1.000000;0.000000;, 0.000000;0.000000;, 0.000000;1.000000;, 0.500000;1.000000;, 0.000000;0.000000;, 0.500000;1.000000;, 1.000000;0.000000;, 0.372000;1.000000;, 0.414000;0.972000;, 0.372000;0.923000;, 0.123000;0.923000;, 0.079000;0.972000;, 0.123000;1.000000;, 0.079000;0.972000;, 0.079000;1.000000;, 0.123000;1.000000;, 0.372000;1.000000;, 0.414000;1.000000;, 0.414000;0.972000;, 0.877000;1.000000;, 0.919000;0.972000;, 0.877000;0.923000;, 0.628000;0.923000;, 0.584000;0.972000;, 0.628000;1.000000;, 0.877000;1.000000;, 0.919000;1.000000;, 0.919000;0.972000;, 0.584000;0.972000;, 0.584000;1.000000;, 0.628000;1.000000;; } } }
{ "pile_set_name": "Github" }
--- id: design-principles title: Design Principles --- :::caution This section is a work in progress. ::: - **Little to learn** - Docusaurus should be easy to learn and use as the API is quite small. Most things will still be achievable by users, even if it takes them more code and more time to write. Not having abstractions is better than having the wrong abstractions, and we don't want users to have to hack around the wrong abstractions. Mandatory talk - [Minimal API Surface Area](https://www.youtube.com/watch?v=4anAwXYqLG8). - **Intuitive** - Users will not feel overwhelmed when looking at the project directory of a Docusaurus project or adding new features. It should look intuitive and easy to build on top of, using approaches they are familiar with. - **Layered architecture** - The separations of concerns between each layer of our stack (content/theming/styling) should be clear - well-abstracted and modular. - **Sensible defaults** - Common and popular performance optimizations and configurations will be done for users but they are given the option to override them. - **No vendor-lock in** - Users are not required to use the default plugins or CSS, although they are highly encouraged to. Certain core lower-level infra level pieces like React Loadable, React Router cannot be swapped because we do default performance optimization on them. But not higher level ones, such as choice of Markdown engines, CSS frameworks, CSS methodology will be entirely up to users. ## How Docusaurus works <!-- moved in from how Docusaurus works @yangshun --> We believe that as developers, knowing how a library works is helpful in allowing us to become better at using it. Hence we're dedicating effort into explaining the architecture and various components of Docusaurus with the hope that users reading it will gain a deeper understanding of the tool and be even more proficient in using it. <!-- Explain the principles that guide the development of Docusaurus. References --- - https://www.gatsbyjs.org/docs/behind-the-scenes/ - https://reactjs.org/docs/design-principles.html - https://v1.vuepress.vuejs.org/miscellaneous/design-concepts.html -->
{ "pile_set_name": "Github" }
// Configure enzyme adapter const enzyme = require('enzyme'); const Adapter = require('enzyme-adapter-react-16'); enzyme.configure({ adapter: new Adapter() }); // require all modules ending in ".spec" from the // current directory and all subdirectories const testsContext = require.context('./src', true, /.spec$/); testsContext.keys().forEach(testsContext); const componentsContext = require.context('./src', true, /.ts$/); componentsContext.keys().forEach(componentsContext);
{ "pile_set_name": "Github" }
export * from './styles' export * from './accessibility' export * from './common' export * from './strings' export * from './window' export * from './isFixed'
{ "pile_set_name": "Github" }
<?php // +---------------------------------------------------------------------- // | ThinkPHP [ WE CAN DO IT JUST THINK IT ] // +---------------------------------------------------------------------- // | Copyright (c) 2006-2014 http://thinkphp.cn All rights reserved. // +---------------------------------------------------------------------- // | Licensed ( http://www.apache.org/licenses/LICENSE-2.0 ) // +---------------------------------------------------------------------- // | Author: luofei614<weibo.com/luofei614> // +---------------------------------------------------------------------- namespace Think\Upload\Driver; class Sae{ /** * Storage的Domain * @var string */ private $domain = ''; private $rootPath = ''; /** * 本地上传错误信息 * @var string */ private $error = ''; /** * 构造函数,设置storage的domain, 如果有传配置,则domain为配置项,如果没有传domain为第一个路径的目录名称。 * @param mixed $config 上传配置 */ public function __construct($config = null){ if(is_array($config) && !empty($config['domain'])){ $this->domain = strtolower($config['domain']); } } /** * 检测上传根目录 * @param string $rootpath 根目录 * @return boolean true-检测通过,false-检测失败 */ public function checkRootPath($rootpath){ $rootpath = trim($rootpath,'./'); if(!$this->domain){ $rootpath = explode('/', $rootpath); $this->domain = strtolower(array_shift($rootpath)); $rootpath = implode('/', $rootpath); } $this->rootPath = $rootpath; $st = new \SaeStorage(); if(false===$st->getDomainCapacity($this->domain)){ $this->error = '您好像没有建立Storage的domain['.$this->domain.']'; return false; } return true; } /** * 检测上传目录 * @param string $savepath 上传目录 * @return boolean 检测结果,true-通过,false-失败 */ public function checkSavePath($savepath){ return true; } /** * 保存指定文件 * @param array $file 保存的文件信息 * @param boolean $replace 同名文件是否覆盖 * @return boolean 保存状态,true-成功,false-失败 */ public function save(&$file, $replace=true) { $filename = ltrim($this->rootPath .'/'. $file['savepath'] . $file['savename'],'/'); $st = new \SaeStorage(); /* 不覆盖同名文件 */ if (!$replace && $st->fileExists($this->domain,$filename)) { $this->error = '存在同名文件' . $file['savename']; return false; } /* 移动文件 */ if (!$st->upload($this->domain,$filename,$file['tmp_name'])) { $this->error = '文件上传保存错误!['.$st->errno().']:'.$st->errmsg(); return false; }else{ $file['url'] = $st->getUrl($this->domain, $filename); } return true; } public function mkdir(){ return true; } /** * 获取最后一次上传错误信息 * @return string 错误信息 */ public function getError(){ return $this->error; } }
{ "pile_set_name": "Github" }
OUTPUT := ./ ifeq ("$(origin O)", "command line") ifneq ($(O),) OUTPUT := $(O)/ endif endif ifeq ($(strip $(STATIC)),true) LIBS = -L../ -L$(OUTPUT) -lm OBJS = $(OUTPUT)main.o $(OUTPUT)parse.o $(OUTPUT)system.o $(OUTPUT)benchmark.o \ $(OUTPUT)../lib/cpufreq.o $(OUTPUT)../lib/sysfs.o else LIBS = -L../ -L$(OUTPUT) -lm -lcpupower OBJS = $(OUTPUT)main.o $(OUTPUT)parse.o $(OUTPUT)system.o $(OUTPUT)benchmark.o endif CFLAGS += -D_GNU_SOURCE -I../lib -DDEFAULT_CONFIG_FILE=\"$(confdir)/cpufreq-bench.conf\" $(OUTPUT)%.o : %.c $(ECHO) " CC " $@ $(QUIET) $(CC) -c $(CFLAGS) $< -o $@ $(OUTPUT)cpufreq-bench: $(OBJS) $(ECHO) " CC " $@ $(QUIET) $(CC) -o $@ $(CFLAGS) $(LDFLAGS) $(OBJS) $(LIBS) all: $(OUTPUT)cpufreq-bench install: mkdir -p $(DESTDIR)/$(sbindir) mkdir -p $(DESTDIR)/$(bindir) mkdir -p $(DESTDIR)/$(docdir) mkdir -p $(DESTDIR)/$(confdir) install -m 755 $(OUTPUT)cpufreq-bench $(DESTDIR)/$(sbindir)/cpufreq-bench install -m 755 cpufreq-bench_plot.sh $(DESTDIR)/$(bindir)/cpufreq-bench_plot.sh install -m 644 README-BENCH $(DESTDIR)/$(docdir)/README-BENCH install -m 755 cpufreq-bench_script.sh $(DESTDIR)/$(docdir)/cpufreq-bench_script.sh install -m 644 example.cfg $(DESTDIR)/$(confdir)/cpufreq-bench.conf clean: rm -f $(OUTPUT)*.o rm -f $(OUTPUT)cpufreq-bench
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="UTF-8"?> <!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd"> <plist version="1.0"> <dict> <key>CFBundleName</key> <string>EtoApp.1</string> <key>CFBundleIdentifier</key> <string>com.example.EtoApp.1</string> <key>CFBundleShortVersionString</key> <string>1.0</string> <key>LSMinimumSystemVersion</key> <string>10.12</string> <key>CFBundleDevelopmentRegion</key> <string>en</string> <key>NSHumanReadableCopyright</key> <string></string> <key>CFBundleIconFile</key> <string>Icon.icns</string> </dict> </plist>
{ "pile_set_name": "Github" }
// Copyright (c) 2011 The LevelDB Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. See the AUTHORS file for names of contributors. #include "leveldb/options.h" #include "leveldb/comparator.h" #include "leveldb/env.h" namespace leveldb { Options::Options() : comparator(BytewiseComparator()), create_if_missing(false), error_if_exists(false), paranoid_checks(false), env(Env::Default()), info_log(NULL), write_buffer_size(4<<20), max_open_files(1000), block_cache(NULL), block_size(4096), block_restart_interval(16), max_file_size(2<<20), compression(kSnappyCompression), reuse_logs(false), filter_policy(NULL) { } } // namespace leveldb
{ "pile_set_name": "Github" }
# KDU ## Kernel Driver Utility #### System Requirements + x64 Windows 7/8/8.1/10; + Administrative privilege is required. # Purpose and Features The purpose of this tool is to give a simple way to explore Windows kernel/components without doing a lot of additional work or setting up local debugger. It features: + Protected Processes Hijacking via Process object modification; + Driver Signature Enforcement Overrider (similar to DSEFIx); + Driver loader for bypassing Driver Signature Enforcement (similar to TDL/Stryker); + Support of various vulnerable drivers use as functionality "providers". #### Usage ###### KDU -ps ProcessID ###### KDU -map filename ###### KDU -dse value ###### KDU -prv ProviderID ###### KDU -list * -prv - optional, select vulnerability driver provider; * -ps - modify process object of given ProcessID; * -map - load input file as code buffer to kernel mode and run it; * -dse - write user defined value to the system DSE state flags; * -list - list currently available providers. Example: + kdu -ps 1234 + kdu -map c:\driverless\mysuperhack.sys + kdu -prv 1 -ps 1234 + kdu -prv 1 -map c:\driverless\mysuperhack.sys + kdu -dse 0 + kdu -dse 6 Run on Windows 10 20H2 (precomplied version) <img src="https://raw.githubusercontent.com/hfiref0x/kdu/master/Help/kdu1.png" width="600" /> Compiled and run on Windows 8.1 <img src="https://raw.githubusercontent.com/hfiref0x/kdu/master/Help/kdu2.png" width="600" /> Run on Windows 7 SP1 fully patched (precomplied version) <img src="https://raw.githubusercontent.com/hfiref0x/kdu/master/Help/kdu3.png" width="600" /> Run on Windows 10 19H2 (precompiled version, SecureBoot enabled) <img src="https://raw.githubusercontent.com/hfiref0x/kdu/master/Help/kdu4.png" width="600" /> #### Limitations of -map command Due to unusual way of loading that is not involving standard kernel loader, but uses overwriting already loaded modules with shellcode, there are some limitations: + Loaded drivers MUST BE specially designed to run as "driverless"; That mean you cannot use parameters specified at your DriverEntry as they won't be valid. That also mean you can not load *any* drivers but only specially designed or you need to alter shellcode responsible for driver mapping. + No SEH support for target drivers; There is no SEH code in x64. Instead of this you have table of try/except/finally regions which must be in the executable image described by pointer in PE header. If there is an exception occured system handler will first look in which module that happened. Mapped drivers are not inside Windows controlled list of drivers (PsLoadedModulesList - PatchGuard protected), so nothing will be found and system will simple crash. + No driver unloading; Mapped code can't unload itself, however you still can release all resources allocated by your mapped code. DRIVER_OBJECT->DriverUnload should be set to NULL. + Only ntoskrnl import resolved, everything else is up to you; If your project need another module dependency then you have to rewrite this loader part. + Several Windows primitives are banned by PatchGuard from usage from the dynamic code. Because of unsual way of loading mapped driver won't be inside PsLoadedModulesList. That mean any callback registered by such code will have handler located in memory outside this list. PatchGuard has ability to check whatever the registered callbacks point to valid loaded modules or not and BSOD with "Kernel notification callout modification" if such dynamic code detected. In general if you want to know what you *should not do* in kernel look at https://github.com/hfiref0x/KDU/tree/master/Source/Examples/BadRkDemo which contain a few examples of forbidden things. #### Kernel traces note This tool does not change (and this won't change in future) internal Windows structures of MmUnloadedDrivers and/or PiDDBCacheTable. That's because: + KDU is not designed to circumvent third-party security software or various dubious crapware (e.g. anti-cheats); + These data can be a target for PatchGuard protection in the next major Windows 10 update. You use it at your own risk. Some lazy AV may flag this tool as hacktool/malware. # Currently Supported Providers + Intel Network Adapter Diagnostic Driver of version 1.03.0.7; + RTCore64 driver from MSI Afterburner of version 4.6.2 build 15658 and below; + Gdrv driver from various Gigabyte TOOLS of undefined version; + ATSZIO64 driver from ASUSTeK WinFlash utility of various versions; + MICSYS MsIo (WinIo) driver from Patriot Viper RGB utility of version 1.0; + GLCKIO2 (WinIo) driver from ASRock Polychrome RGB of version 1.0.4; + EneIo (WinIo) driver from G.SKILL Trident Z Lighting Control of version 1.00.08; + WinRing0x64 driver from EVGA Precision X1 of version 1.0.2.0; + EneTechIo (WinIo) driver from Thermaltake TOUGHRAM software of version 1.0.3. More providers maybe added in the future. # How it work It uses known to be vulnerable driver from legitimate software to access arbitrary kernel memory with read/write primitives. Depending on command KDU will either work as TDL/DSEFix or modify kernel mode process objects (EPROCESS). When in -map mode KDU will use 3rd party signed driver from SysInternals Process Explorer and hijack it by placing a small loader shellcode inside it IRP_MJ_DEVICE_CONTROL/IRP_MJ_CREATE/IRP_MJ_CLOSE handler. This is done by overwriting physical memory where Process Explorer dispatch handler located and triggering it by calling driver IRP_MJ_CREATE handler (CreateFile call). Next shellcode will map input driver as code buffer to kernel mode and run it with current IRQL be PASSIVE_LEVEL. After that hijacked Process Explorer driver will be unloaded together with vulnerable provider driver. This entire idea comes from malicious software of the middle of 200x known as rootkits. # Build KDU comes with full source code. In order to build from source you need Microsoft Visual Studio 2019 and later versions. For driver builds you need Microsoft Windows Driver Kit 10 and/or above. # Support and Warranties Using this program might render your computer into BSOD. Compiled binary and source code provided AS-IS in help it will be useful BUT WITHOUT WARRANTY OF ANY KIND. # Third party code usage * TinyAES, https://github.com/kokke/tiny-AES-c # References * DSEFix, https://github.com/hfiref0x/DSEFix * Turla Driver Loader, https://github.com/hfiref0x/TDL * Stryker, https://github.com/hfiref0x/Stryker * Unwinding RTCore, https://swapcontext.blogspot.com/2020/01/unwinding-rtcore.html * CVE-2019-16098, https://github.com/Barakat/CVE-2019-16098 * CVE-2015-2291, https://www.exploit-db.com/exploits/36392 * CVE-2018-19320, https://seclists.org/fulldisclosure/2018/Dec/39 * ATSZIO64 headers and libs, https://github.com/DOGSHITD/SciDetectorApp/tree/master/DetectSciApp * ATSZIO64 ASUS Drivers Privilege Escalation, https://github.com/LimiQS/AsusDriversPrivEscala * CVE-2019-18845, https://www.activecyber.us/activelabs/viper-rgb-driver-local-privilege-escalation-cve-2019-18845 * DEFCON27: Get off the kernel if you cant drive, https://eclypsium.com/wp-content/uploads/2019/08/EXTERNAL-Get-off-the-kernel-if-you-cant-drive-DEFCON27.pdf # Wormhole drivers code They are used in multiple products from hardware vendors mostly in unmodified state. They all break OS security model and additionally bugged. Links are for educational purposes of how not to do your drivers. Note that following github accounts have nothing to do with these code, they are just forked/uploaded it. * WinIo 3.0 BSOD/CVE generator, https://github.com/starofrainnight/winio/blob/master/Source/Drv/WinIo.c * WinRing0 BSOD/CVE generator, https://github.com/QCute/WinRing0/blob/master/dll/sys/OpenLibSys.c # Authors (c) 2020 KDU Project
{ "pile_set_name": "Github" }
/** * Alternate Sphinx design * Originally created by Armin Ronacher for Werkzeug, adapted by Georg Brandl. */ body { font-family: 'Lucida Grande', 'Lucida Sans Unicode', 'Geneva', 'Verdana', sans-serif; font-size: 14px; letter-spacing: -0.01em; line-height: 150%; text-align: center; /*background-color: #AFC1C4; */ background-color: #BFD1D4; color: black; padding: 0; border: 1px solid #aaa; margin: 0px 80px 0px 80px; min-width: 740px; } a { color: #CA7900; text-decoration: none; } a:hover { color: #2491CF; } pre { font-family: 'Consolas', 'Deja Vu Sans Mono', 'Bitstream Vera Sans Mono', monospace; font-size: 0.95em; letter-spacing: 0.015em; padding: 0.5em; border: 1px solid #ccc; background-color: #f8f8f8; } td.linenos pre { padding: 0.5em 0; border: 0; background-color: transparent; color: #aaa; } table.highlighttable { margin-left: 0.5em; } table.highlighttable td { padding: 0 0.5em 0 0.5em; } cite, code, tt { font-family: 'Consolas', 'Deja Vu Sans Mono', 'Bitstream Vera Sans Mono', monospace; font-size: 0.95em; letter-spacing: 0.01em; } hr { border: 1px solid #abc; margin: 2em; } tt { background-color: #f2f2f2; border-bottom: 1px solid #ddd; color: #333; } tt.descname { background-color: transparent; font-weight: bold; font-size: 1.2em; border: 0; } tt.descclassname { background-color: transparent; border: 0; } tt.xref { background-color: transparent; font-weight: bold; border: 0; } a tt { background-color: transparent; font-weight: bold; border: 0; color: #CA7900; } a tt:hover { color: #2491CF; } dl { margin-bottom: 15px; } dd p { margin-top: 0px; } dd ul, dd table { margin-bottom: 10px; } dd { margin-top: 3px; margin-bottom: 10px; margin-left: 30px; } .refcount { color: #060; } dt:target, .highlight { background-color: #fbe54e; } dl.class, dl.function { border-top: 2px solid #888; } dl.method, dl.attribute { border-top: 1px solid #aaa; } dl.glossary dt { font-weight: bold; font-size: 1.1em; } pre { line-height: 120%; } pre a { color: inherit; text-decoration: underline; } .first { margin-top: 0 !important; } div.document { background-color: white; text-align: left; background-image: url(contents.png); background-repeat: repeat-x; } /* div.documentwrapper { width: 100%; } */ div.clearer { clear: both; } div.related h3 { display: none; } div.related ul { background-image: url(navigation.png); height: 2em; list-style: none; border-top: 1px solid #ddd; border-bottom: 1px solid #ddd; margin: 0; padding-left: 10px; } div.related ul li { margin: 0; padding: 0; height: 2em; float: left; } div.related ul li.right { float: right; margin-right: 5px; } div.related ul li a { margin: 0; padding: 0 5px 0 5px; line-height: 1.75em; color: #EE9816; } div.related ul li a:hover { color: #3CA8E7; } div.body { margin: 0; padding: 0.5em 20px 20px 20px; } div.bodywrapper { margin: 0 240px 0 0; border-right: 1px solid #ccc; } div.body a { text-decoration: underline; } div.sphinxsidebar { margin: 0; padding: 0.5em 15px 15px 0; width: 210px; float: right; text-align: left; /* margin-left: -100%; */ } div.sphinxsidebar h4, div.sphinxsidebar h3 { margin: 1em 0 0.5em 0; font-size: 0.9em; padding: 0.1em 0 0.1em 0.5em; color: white; border: 1px solid #86989B; background-color: #AFC1C4; } div.sphinxsidebar ul { padding-left: 1.5em; margin-top: 7px; list-style: none; padding: 0; line-height: 130%; } div.sphinxsidebar ul ul { list-style: square; margin-left: 20px; } p { margin: 0.8em 0 0.5em 0; } p.rubric { font-weight: bold; } h1 { margin: 0; padding: 0.7em 0 0.3em 0; font-size: 1.5em; color: #11557C; } h2 { margin: 1.3em 0 0.2em 0; font-size: 1.35em; padding: 0; } h3 { margin: 1em 0 -0.3em 0; font-size: 1.2em; } h1 a, h2 a, h3 a, h4 a, h5 a, h6 a { color: black!important; } h1 a.anchor, h2 a.anchor, h3 a.anchor, h4 a.anchor, h5 a.anchor, h6 a.anchor { display: none; margin: 0 0 0 0.3em; padding: 0 0.2em 0 0.2em; color: #aaa!important; } h1:hover a.anchor, h2:hover a.anchor, h3:hover a.anchor, h4:hover a.anchor, h5:hover a.anchor, h6:hover a.anchor { display: inline; } h1 a.anchor:hover, h2 a.anchor:hover, h3 a.anchor:hover, h4 a.anchor:hover, h5 a.anchor:hover, h6 a.anchor:hover { color: #777; background-color: #eee; } table { border-collapse: collapse; margin: 0 -0.5em 0 -0.5em; } table td, table th { padding: 0.2em 0.5em 0.2em 0.5em; } div.footer { background-color: #E3EFF1; color: #86989B; padding: 3px 8px 3px 0; clear: both; font-size: 0.8em; text-align: right; } div.footer a { color: #86989B; text-decoration: underline; } div.pagination { margin-top: 2em; padding-top: 0.5em; border-top: 1px solid black; text-align: center; } div.sphinxsidebar ul.toc { margin: 1em 0 1em 0; padding: 0 0 0 0.5em; list-style: none; } div.sphinxsidebar ul.toc li { margin: 0.5em 0 0.5em 0; font-size: 0.9em; line-height: 130%; } div.sphinxsidebar ul.toc li p { margin: 0; padding: 0; } div.sphinxsidebar ul.toc ul { margin: 0.2em 0 0.2em 0; padding: 0 0 0 1.8em; } div.sphinxsidebar ul.toc ul li { padding: 0; } div.admonition, div.warning { font-size: 0.9em; margin: 1em 0 0 0; border: 1px solid #86989B; background-color: #f7f7f7; } div.admonition p, div.warning p { margin: 0.5em 1em 0.5em 1em; padding: 0; } div.admonition pre, div.warning pre { margin: 0.4em 1em 0.4em 1em; } div.admonition p.admonition-title, div.warning p.admonition-title { margin: 0; padding: 0.1em 0 0.1em 0.5em; color: white; border-bottom: 1px solid #86989B; font-weight: bold; background-color: #AFC1C4; } div.warning { border: 1px solid #940000; } div.warning p.admonition-title { background-color: #CF0000; border-bottom-color: #940000; } div.admonition ul, div.admonition ol, div.warning ul, div.warning ol { margin: 0.1em 0.5em 0.5em 3em; padding: 0; } div.versioninfo { margin: 1em 0 0 0; border: 1px solid #ccc; background-color: #DDEAF0; padding: 8px; line-height: 1.3em; font-size: 0.9em; } a.headerlink { color: #c60f0f!important; font-size: 1em; margin-left: 6px; padding: 0 4px 0 4px; text-decoration: none!important; visibility: hidden; } h1:hover > a.headerlink, h2:hover > a.headerlink, h3:hover > a.headerlink, h4:hover > a.headerlink, h5:hover > a.headerlink, h6:hover > a.headerlink, dt:hover > a.headerlink { visibility: visible; } a.headerlink:hover { background-color: #ccc; color: white!important; } table.indextable td { text-align: left; vertical-align: top; } table.indextable dl, table.indextable dd { margin-top: 0; margin-bottom: 0; } table.indextable tr.pcap { height: 10px; } table.indextable tr.cap { margin-top: 10px; background-color: #f2f2f2; } img.toggler { margin-right: 3px; margin-top: 3px; cursor: pointer; } img.inheritance { border: 0px } form.pfform { margin: 10px 0 20px 0; } table.contentstable { width: 90%; } table.contentstable p.biglink { line-height: 150%; } a.biglink { font-size: 1.3em; } span.linkdescr { font-style: italic; padding-top: 5px; font-size: 90%; } ul.search { margin: 10px 0 0 20px; padding: 0; } ul.search li { padding: 5px 0 5px 20px; background-image: url(file.png); background-repeat: no-repeat; background-position: 0 7px; } ul.search li a { font-weight: bold; } ul.search li div.context { color: #888; margin: 2px 0 0 30px; text-align: left; } ul.keywordmatches li.goodmatch a { font-weight: bold; }
{ "pile_set_name": "Github" }
#!/usr/bin/env python """ Artificial Intelligence for Humans Volume 3: Deep Learning and Neural Networks Python Version http://www.aifh.org http://www.jeffheaton.com Code repository: https://github.com/jeffheaton/aifh Copyright 2015 by Jeff Heaton Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. For more information on Heaton Research copyrights, licenses and trademarks visit: http://www.heatonresearch.com/copyright """ __author__ = 'jheaton' class AIFHError(Exception): """An error was raised. This is used for several purposes, see individual error messages.""" def __init__(self, value): self.value = value def __str__(self): return repr(self.value)
{ "pile_set_name": "Github" }
// RUN: llvm-mc -triple i686-elf -filetype asm -o - %s | FileCheck %s .type TYPE STT_FUNC // CHECK: .type TYPE,@function .type comma_TYPE, STT_FUNC // CHECK: .type comma_TYPE,@function .type at_TYPE, @STT_FUNC // CHECK: .type at_TYPE,@function .type percent_TYPE, %STT_FUNC // CHECK: .type percent_TYPE,@function .type string_TYPE, "STT_FUNC" // CHECK: .type string_TYPE,@function .type type function // CHECK: .type type,@function .type comma_type, function // CHECK: .type comma_type,@function .type at_type, @function // CHECK: .type at_type,@function .type percent_type, %function // CHECK: .type percent_type,@function .type string_type, "function" // CHECK: .type string_type,@function .type special gnu_unique_object // CHECK: .type special,@gnu_unique_object .type comma_special, gnu_unique_object // CHECK: .type comma_special,@gnu_unique_object
{ "pile_set_name": "Github" }
<b:style src="./Node.css"/> <b:style src="./Node_Expander.css"/> <b:style src="./Folder.css"/> <b:define name="selected" type="bool"/> <b:define name="collapsed" type="bool"/> <b:define name="disabled" type="bool"/> <li class="Basis-TreeNode"> <div{content} class="Basis-TreeNode-Title"> <div class="Basis-TreeNode_Expander Basis-TreeNode_Expander__{collapsed}" event-click="toggle" /> <span class="Basis-TreeNode-Caption Basis-TreeNode-FolderCaption Basis-TreeNode-FolderCaption_{collapsed} Basis-TreeNode-Caption__{disabled} Basis-TreeNode-Caption__{selected}" event-click="select"> {title} </span> </div> <ul{childNodesElement} class="Basis-TreeNode-Content Basis-TreeNode-Content__{collapsed}"/> </li>
{ "pile_set_name": "Github" }
// Copyright Oliver Kowalke 2009. // Distributed under the Boost Software License, Version 1.0. // (See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) #include <boost/config.hpp> #if defined(BOOST_USE_SEGMENTED_STACKS) # if defined(BOOST_WINDOWS) # error "segmented stacks are not supported by Windows" # else # include <boost/coroutine/posix/segmented_stack_allocator.hpp> # endif #endif
{ "pile_set_name": "Github" }
#ifdef __OBJC__ #import <UIKit/UIKit.h> #else #ifndef FOUNDATION_EXPORT #if defined(__cplusplus) #define FOUNDATION_EXPORT extern "C" #else #define FOUNDATION_EXPORT extern #endif #endif #endif #import "MotionOrientation.h" FOUNDATION_EXPORT double MotionOrientation_PTEzVersionNumber; FOUNDATION_EXPORT const unsigned char MotionOrientation_PTEzVersionString[];
{ "pile_set_name": "Github" }
/* * This file is part of the SDWebImage package. * (c) Olivier Poitrey <[email protected]> * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. */ #import <Foundation/Foundation.h> #import "SDWebImageCompat.h" #import "SDImageCoder.h" /// A player to control the playback of animated image, which can be used to drive Animated ImageView or any rendering usage, like CALayer/WatchKit/SwiftUI rendering. @interface SDAnimatedImagePlayer : NSObject /// Current playing frame image. This value is KVO Compliance. @property (nonatomic, readonly, nullable) UIImage *currentFrame; /// Current frame index, zero based. This value is KVO Compliance. @property (nonatomic, readonly) NSUInteger currentFrameIndex; /// Current loop count since its latest animating. This value is KVO Compliance. @property (nonatomic, readonly) NSUInteger currentLoopCount; /// Total frame count for niamted image rendering. Defaults is animated image's frame count. /// @note For progressive animation, you can update this value when your provider receive more frames. @property (nonatomic, assign) NSUInteger totalFrameCount; /// Total loop count for animated image rendering. Default is animated image's loop count. @property (nonatomic, assign) NSUInteger totalLoopCount; /// The animation playback rate. Default is 1.0 /// `1.0` means the normal speed. /// `0.0` means stopping the animation. /// `0.0-1.0` means the slow speed. /// `> 1.0` means the fast speed. /// `< 0.0` is not supported currently and stop animation. (may support reverse playback in the future) @property (nonatomic, assign) double playbackRate; /// Provide a max buffer size by bytes. This is used to adjust frame buffer count and can be useful when the decoding cost is expensive (such as Animated WebP software decoding). Default is 0. /// `0` means automatically adjust by calculating current memory usage. /// `1` means without any buffer cache, each of frames will be decoded and then be freed after rendering. (Lowest Memory and Highest CPU) /// `NSUIntegerMax` means cache all the buffer. (Lowest CPU and Highest Memory) @property (nonatomic, assign) NSUInteger maxBufferSize; /// You can specify a runloop mode to let it rendering. /// Default is NSRunLoopCommonModes on multi-core device, NSDefaultRunLoopMode on single-core device @property (nonatomic, copy, nonnull) NSRunLoopMode runLoopMode; /// Create a player with animated image provider. If the provider's `animatedImageFrameCount` is less than 1, returns nil. /// The provider can be any protocol implementation, like `SDAnimatedImage`, `SDImageGIFCoder`, etc. /// @note This provider can represent mutable content, like prorgessive animated loading. But you need to update the frame count by yourself /// @param provider The animated provider - (nullable instancetype)initWithProvider:(nonnull id<SDAnimatedImageProvider>)provider; /// Create a player with animated image provider. If the provider's `animatedImageFrameCount` is less than 1, returns nil. /// The provider can be any protocol implementation, like `SDAnimatedImage` or `SDImageGIFCoder`, etc. /// @note This provider can represent mutable content, like prorgessive animated loading. But you need to update the frame count by yourself /// @param provider The animated provider + (nullable instancetype)playerWithProvider:(nonnull id<SDAnimatedImageProvider>)provider; /// The handler block when current frame and index changed. @property (nonatomic, copy, nullable) void (^animationFrameHandler)(NSUInteger index, UIImage * _Nonnull frame); /// The handler block when one loop count finished. @property (nonatomic, copy, nullable) void (^animationLoopHandler)(NSUInteger loopCount); /// Return the status whehther animation is playing. @property (nonatomic, readonly) BOOL isPlaying; /// Start the animation. Or resume the previously paused animation. - (void)startPlaying; /// Pause the aniamtion. Keep the current frame index and loop count. - (void)pausePlaying; /// Stop the animation. Reset the current frame index and loop count. - (void)stopPlaying; /// Seek to the desired frame index and loop count. /// @note This can be used for advanced control like progressive loading, or skipping specify frames. /// @param index The frame index /// @param loopCount The loop count - (void)seekToFrameAtIndex:(NSUInteger)index loopCount:(NSUInteger)loopCount; /// Clear the frame cache buffer. The frame cache buffer size can be controled by `maxBufferSize`. /// By default, when stop or pause the animation, the frame buffer is still kept to ready for the next restart - (void)clearFrameBuffer; @end
{ "pile_set_name": "Github" }
/** * The MIT License * Copyright (c) 2015 Estonian Information System Authority (RIA), Population Register Centre (VRK) * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package ee.ria.xroad.proxy.testsuite.testcases; import ee.ria.xroad.proxy.testsuite.Message; import ee.ria.xroad.proxy.testsuite.MessageTestCase; import org.apache.commons.io.IOUtils; import org.eclipse.jetty.server.Request; import org.eclipse.jetty.server.handler.AbstractHandler; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; import static ee.ria.xroad.common.ErrorCodes.SERVER_CLIENTPROXY_X; import static ee.ria.xroad.common.ErrorCodes.X_IO_ERROR; import static ee.ria.xroad.common.ErrorCodes.X_SERVICE_FAILED_X; /** * Client sends normal message, SP aborts connection (content type: text/xml). * Result: CP responds with RequestFailed */ public class ServerProxyConnectionAborted2 extends MessageTestCase { /** * Constructs the test case. */ public ServerProxyConnectionAborted2() { requestFileName = "getstate.query"; } @Override public String getProviderAddress(String providerName) { return "127.0.0.2"; } @Override public AbstractHandler getServerProxyHandler() { return new AbstractHandler() { @Override public void handle(String target, Request baseRequest, HttpServletRequest request, HttpServletResponse response) throws IOException { // Read all of the request. IOUtils.readLines(request.getInputStream()); response.setContentType("text/xml"); response.setContentLength(1000); response.getOutputStream().close(); response.flushBuffer(); baseRequest.setHandled(true); } }; } @Override protected void validateFaultResponse(Message receivedResponse) { assertErrorCode(SERVER_CLIENTPROXY_X, X_SERVICE_FAILED_X, X_IO_ERROR); } }
{ "pile_set_name": "Github" }
from __future__ import absolute_import from builtins import str from .base_monitor import BaseMonitor from kafka import KafkaProducer from kafka.common import KafkaUnavailableError from scutils.method_timer import MethodTimer from retrying import retry import json import sys import traceback class KafkaBaseMonitor(BaseMonitor): ''' Base monitor for handling outbound Kafka results ''' def setup(self, settings): ''' Setup the handler @param settings: The loaded settings file ''' self.producer = self._create_producer(settings) self.topic_prefix = settings['KAFKA_TOPIC_PREFIX'] self.use_appid_topics = settings['KAFKA_APPID_TOPICS'] self.logger.debug("Successfully connected to Kafka in {name}" .format(name=self.__class__.__name__)) @retry(wait_exponential_multiplier=500, wait_exponential_max=10000) def _create_producer(self, settings): """Tries to establish a Kafka consumer connection""" try: brokers = settings['KAFKA_HOSTS'] self.logger.debug("Creating new kafka producer using brokers: " + str(brokers)) return KafkaProducer(bootstrap_servers=brokers, value_serializer=lambda m: json.dumps(m), retries=3, linger_ms=settings['KAFKA_PRODUCER_BATCH_LINGER_MS'], buffer_memory=settings['KAFKA_PRODUCER_BUFFER_BYTES']) except KeyError as e: self.logger.error('Missing setting named ' + str(e), {'ex': traceback.format_exc()}) except: self.logger.error("Couldn't initialize kafka producer in plugin.", {'ex': traceback.format_exc()}) raise def _kafka_success(self, response): ''' Callback for successful send ''' self.logger.debug("Sent message to Kafka") def _kafka_failure(self, response): ''' Callback for failed send ''' self.logger.error("Failed to send message to Kafka") def _send_to_kafka(self, master): ''' Sends the message back to Kafka @param master: the final dict to send @returns: True if successfully sent to kafka ''' appid_topic = "{prefix}.outbound_{appid}".format( prefix=self.topic_prefix, appid=master['appid']) firehose_topic = "{prefix}.outbound_firehose".format( prefix=self.topic_prefix) try: # dont want logger in outbound kafka message if self.use_appid_topics: f1 = self.producer.send(appid_topic, master) f1.add_callback(self._kafka_success) f1.add_errback(self._kafka_failure) f2 = self.producer.send(firehose_topic, master) f2.add_callback(self._kafka_success) f2.add_errback(self._kafka_failure) return True except Exception as ex: message = "An exception '{0}' occured while sending a message " \ "to kafka. Arguments:\n{1!r}" \ .format(type(ex).__name__, ex.args) self.logger.error(message) return False def close(self): self.producer.flush() self.producer.close(timeout=10)
{ "pile_set_name": "Github" }
f.lux-xcode =========== This installs the f.lux iOS app on your device without requiring a jailbreak. Learn more about f.lux at <https://justgetflux.com/> Why isn't this in the app store? -------------------------------- This app changes the color of all running apps on your phone, even when f.lux is not directly open. Such functionality is not allowed in the [App Store Review Guidelines](<https://developer.apple.com/app-store/review/guidelines/>), however this type of app is possible. How do I get this on my phone? ------------------------------ 1. Download (click releases above for file), then open with XCode 2. Plug in your phone 3. Select your phone from the device menu (next to the "Play" and "Stop" buttons) 4. Click "Play" How does it work? ----------------- There is an opaque, non-open-source app called `iflux` in this project. We trick Xcode into signing and installing this app on your phone by: 1. Building a dummy app "just an app" 2. Splicing in the opaque binary during the build process 3. Letting Xcode sign and install the app as normal There are build errors ---------------------- We are building the binary twice, once using source code, and again by splicing. This duplication is reported as an error by XCode.
{ "pile_set_name": "Github" }
/* YUI 3.7.3 (build 5687) Copyright 2012 Yahoo! Inc. All rights reserved. Licensed under the BSD License. http://yuilibrary.com/license/ */ if (typeof _yuitest_coverage == "undefined"){ _yuitest_coverage = {}; _yuitest_coverline = function(src, line){ var coverage = _yuitest_coverage[src]; if (!coverage.lines[line]){ coverage.calledLines++; } coverage.lines[line]++; }; _yuitest_coverfunc = function(src, name, line){ var coverage = _yuitest_coverage[src], funcId = name + ":" + line; if (!coverage.functions[funcId]){ coverage.calledFunctions++; } coverage.functions[funcId]++; }; } _yuitest_coverage["build/yui-later/yui-later.js"] = { lines: {}, functions: {}, coveredLines: 0, calledLines: 0, coveredFunctions: 0, calledFunctions: 0, path: "build/yui-later/yui-later.js", code: [] }; _yuitest_coverage["build/yui-later/yui-later.js"].code=["YUI.add('yui-later', function (Y, NAME) {","","/**"," * Provides a setTimeout/setInterval wrapper. This module is a `core` YUI module, <a href=\"../classes/YUI.html#method_later\">it's documentation is located under the YUI class</a>."," *"," * @module yui"," * @submodule yui-later"," */","","var NO_ARGS = [];","","/**"," * Executes the supplied function in the context of the supplied"," * object 'when' milliseconds later. Executes the function a"," * single time unless periodic is set to true."," * @for YUI"," * @method later"," * @param when {int} the number of milliseconds to wait until the fn"," * is executed."," * @param o the context object."," * @param fn {Function|String} the function to execute or the name of"," * the method in the 'o' object to execute."," * @param data [Array] data that is provided to the function. This"," * accepts either a single item or an array. If an array is provided,"," * the function is executed with one parameter for each array item."," * If you need to pass a single array parameter, it needs to be wrapped"," * in an array [myarray]."," *"," * Note: native methods in IE may not have the call and apply methods."," * In this case, it will work, but you are limited to four arguments."," *"," * @param periodic {boolean} if true, executes continuously at supplied"," * interval until canceled."," * @return {object} a timer object. Call the cancel() method on this"," * object to stop the timer."," */","Y.later = function(when, o, fn, data, periodic) {"," when = when || 0;"," data = (!Y.Lang.isUndefined(data)) ? Y.Array(data) : NO_ARGS;"," o = o || Y.config.win || Y;",""," var cancelled = false,"," method = (o && Y.Lang.isString(fn)) ? o[fn] : fn,"," wrapper = function() {"," // IE 8- may execute a setInterval callback one last time"," // after clearInterval was called, so in order to preserve"," // the cancel() === no more runny-run, we have to jump through"," // an extra hoop."," if (!cancelled) {"," if (!method.apply) {"," method(data[0], data[1], data[2], data[3]);"," } else {"," method.apply(o, data || NO_ARGS);"," }"," }"," },"," id = (periodic) ? setInterval(wrapper, when) : setTimeout(wrapper, when);",""," return {"," id: id,"," interval: periodic,"," cancel: function() {"," cancelled = true;"," if (this.interval) {"," clearInterval(id);"," } else {"," clearTimeout(id);"," }"," }"," };","};","","Y.Lang.later = Y.later;","","","","}, '3.7.3', {\"requires\": [\"yui-base\"]});"]; _yuitest_coverage["build/yui-later/yui-later.js"].lines = {"1":0,"10":0,"37":0,"38":0,"39":0,"40":0,"42":0,"49":0,"50":0,"51":0,"53":0,"59":0,"63":0,"64":0,"65":0,"67":0,"73":0}; _yuitest_coverage["build/yui-later/yui-later.js"].functions = {"wrapper:44":0,"cancel:62":0,"later:37":0,"(anonymous 1):1":0}; _yuitest_coverage["build/yui-later/yui-later.js"].coveredLines = 17; _yuitest_coverage["build/yui-later/yui-later.js"].coveredFunctions = 4; _yuitest_coverline("build/yui-later/yui-later.js", 1); YUI.add('yui-later', function (Y, NAME) { /** * Provides a setTimeout/setInterval wrapper. This module is a `core` YUI module, <a href="../classes/YUI.html#method_later">it's documentation is located under the YUI class</a>. * * @module yui * @submodule yui-later */ _yuitest_coverfunc("build/yui-later/yui-later.js", "(anonymous 1)", 1); _yuitest_coverline("build/yui-later/yui-later.js", 10); var NO_ARGS = []; /** * Executes the supplied function in the context of the supplied * object 'when' milliseconds later. Executes the function a * single time unless periodic is set to true. * @for YUI * @method later * @param when {int} the number of milliseconds to wait until the fn * is executed. * @param o the context object. * @param fn {Function|String} the function to execute or the name of * the method in the 'o' object to execute. * @param data [Array] data that is provided to the function. This * accepts either a single item or an array. If an array is provided, * the function is executed with one parameter for each array item. * If you need to pass a single array parameter, it needs to be wrapped * in an array [myarray]. * * Note: native methods in IE may not have the call and apply methods. * In this case, it will work, but you are limited to four arguments. * * @param periodic {boolean} if true, executes continuously at supplied * interval until canceled. * @return {object} a timer object. Call the cancel() method on this * object to stop the timer. */ _yuitest_coverline("build/yui-later/yui-later.js", 37); Y.later = function(when, o, fn, data, periodic) { _yuitest_coverfunc("build/yui-later/yui-later.js", "later", 37); _yuitest_coverline("build/yui-later/yui-later.js", 38); when = when || 0; _yuitest_coverline("build/yui-later/yui-later.js", 39); data = (!Y.Lang.isUndefined(data)) ? Y.Array(data) : NO_ARGS; _yuitest_coverline("build/yui-later/yui-later.js", 40); o = o || Y.config.win || Y; _yuitest_coverline("build/yui-later/yui-later.js", 42); var cancelled = false, method = (o && Y.Lang.isString(fn)) ? o[fn] : fn, wrapper = function() { // IE 8- may execute a setInterval callback one last time // after clearInterval was called, so in order to preserve // the cancel() === no more runny-run, we have to jump through // an extra hoop. _yuitest_coverfunc("build/yui-later/yui-later.js", "wrapper", 44); _yuitest_coverline("build/yui-later/yui-later.js", 49); if (!cancelled) { _yuitest_coverline("build/yui-later/yui-later.js", 50); if (!method.apply) { _yuitest_coverline("build/yui-later/yui-later.js", 51); method(data[0], data[1], data[2], data[3]); } else { _yuitest_coverline("build/yui-later/yui-later.js", 53); method.apply(o, data || NO_ARGS); } } }, id = (periodic) ? setInterval(wrapper, when) : setTimeout(wrapper, when); _yuitest_coverline("build/yui-later/yui-later.js", 59); return { id: id, interval: periodic, cancel: function() { _yuitest_coverfunc("build/yui-later/yui-later.js", "cancel", 62); _yuitest_coverline("build/yui-later/yui-later.js", 63); cancelled = true; _yuitest_coverline("build/yui-later/yui-later.js", 64); if (this.interval) { _yuitest_coverline("build/yui-later/yui-later.js", 65); clearInterval(id); } else { _yuitest_coverline("build/yui-later/yui-later.js", 67); clearTimeout(id); } } }; }; _yuitest_coverline("build/yui-later/yui-later.js", 73); Y.Lang.later = Y.later; }, '3.7.3', {"requires": ["yui-base"]});
{ "pile_set_name": "Github" }
package br.com.swconsultoria.nfe.schema_4.retConsReciNFe; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlID; import javax.xml.bind.annotation.XmlSchemaType; import javax.xml.bind.annotation.XmlType; import javax.xml.bind.annotation.adapters.CollapsedStringAdapter; import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter; /** * <p>Classe Java de KeyInfoType complex type. * * <p>O seguinte fragmento do esquema especifica o contedo esperado contido dentro desta classe. * * <pre> * &lt;complexType name="KeyInfoType"> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="X509Data" type="{http://www.w3.org/2000/09/xmldsig#}X509DataType"/> * &lt;/sequence> * &lt;attribute name="Id" type="{http://www.w3.org/2001/XMLSchema}ID" /> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "KeyInfoType", namespace = "http://www.w3.org/2000/09/xmldsig#", propOrder = { "x509Data" }) public class KeyInfoType { @XmlElement(name = "X509Data", namespace = "http://www.w3.org/2000/09/xmldsig#", required = true) protected X509DataType x509Data; @XmlAttribute(name = "Id") @XmlJavaTypeAdapter(CollapsedStringAdapter.class) @XmlID @XmlSchemaType(name = "ID") protected String id; /** * Obtm o valor da propriedade x509Data. * * @return * possible object is * {@link X509DataType } * */ public X509DataType getX509Data() { return x509Data; } /** * Define o valor da propriedade x509Data. * * @param value * allowed object is * {@link X509DataType } * */ public void setX509Data(X509DataType value) { this.x509Data = value; } /** * Obtm o valor da propriedade id. * * @return * possible object is * {@link String } * */ public String getId() { return id; } /** * Define o valor da propriedade id. * * @param value * allowed object is * {@link String } * */ public void setId(String value) { this.id = value; } }
{ "pile_set_name": "Github" }
'use strict'; angular.module("ngLocale", [], ["$provide", function($provide) { var PLURAL_CATEGORY = {ZERO: "zero", ONE: "one", TWO: "two", FEW: "few", MANY: "many", OTHER: "other"}; function getDecimals(n) { n = n + ''; var i = n.indexOf('.'); return (i == -1) ? 0 : n.length - i - 1; } function getVF(n, opt_precision) { var v = opt_precision; if (undefined === v) { v = Math.min(getDecimals(n), 3); } var base = Math.pow(10, v); var f = ((n * base) | 0) % base; return {v: v, f: f}; } $provide.value("$locale", { "DATETIME_FORMATS": { "AMPMS": [ "AM", "PM" ], "DAY": [ "\u0432\u043e\u0441\u043a\u0440\u0435\u0441\u0435\u043d\u044c\u0435", "\u043f\u043e\u043d\u0435\u0434\u0435\u043b\u044c\u043d\u0438\u043a", "\u0432\u0442\u043e\u0440\u043d\u0438\u043a", "\u0441\u0440\u0435\u0434\u0430", "\u0447\u0435\u0442\u0432\u0435\u0440\u0433", "\u043f\u044f\u0442\u043d\u0438\u0446\u0430", "\u0441\u0443\u0431\u0431\u043e\u0442\u0430" ], "ERANAMES": [ "\u0434\u043e \u043d. \u044d.", "\u043d. \u044d." ], "ERAS": [ "\u0434\u043e \u043d. \u044d.", "\u043d. \u044d." ], "FIRSTDAYOFWEEK": 0, "MONTH": [ "\u044f\u043d\u0432\u0430\u0440\u044f", "\u0444\u0435\u0432\u0440\u0430\u043b\u044f", "\u043c\u0430\u0440\u0442\u0430", "\u0430\u043f\u0440\u0435\u043b\u044f", "\u043c\u0430\u044f", "\u0438\u044e\u043d\u044f", "\u0438\u044e\u043b\u044f", "\u0430\u0432\u0433\u0443\u0441\u0442\u0430", "\u0441\u0435\u043d\u0442\u044f\u0431\u0440\u044f", "\u043e\u043a\u0442\u044f\u0431\u0440\u044f", "\u043d\u043e\u044f\u0431\u0440\u044f", "\u0434\u0435\u043a\u0430\u0431\u0440\u044f" ], "SHORTDAY": [ "\u0432\u0441", "\u043f\u043d", "\u0432\u0442", "\u0441\u0440", "\u0447\u0442", "\u043f\u0442", "\u0441\u0431" ], "SHORTMONTH": [ "\u044f\u043d\u0432.", "\u0444\u0435\u0432\u0440.", "\u043c\u0430\u0440\u0442\u0430", "\u0430\u043f\u0440.", "\u043c\u0430\u044f", "\u0438\u044e\u043d\u044f", "\u0438\u044e\u043b\u044f", "\u0430\u0432\u0433.", "\u0441\u0435\u043d\u0442.", "\u043e\u043a\u0442.", "\u043d\u043e\u044f\u0431.", "\u0434\u0435\u043a." ], "STANDALONEMONTH": [ "\u044f\u043d\u0432\u0430\u0440\u044c", "\u0444\u0435\u0432\u0440\u0430\u043b\u044c", "\u043c\u0430\u0440\u0442", "\u0430\u043f\u0440\u0435\u043b\u044c", "\u043c\u0430\u0439", "\u0438\u044e\u043d\u044c", "\u0438\u044e\u043b\u044c", "\u0430\u0432\u0433\u0443\u0441\u0442", "\u0441\u0435\u043d\u0442\u044f\u0431\u0440\u044c", "\u043e\u043a\u0442\u044f\u0431\u0440\u044c", "\u043d\u043e\u044f\u0431\u0440\u044c", "\u0434\u0435\u043a\u0430\u0431\u0440\u044c" ], "WEEKENDRANGE": [ 5, 6 ], "fullDate": "EEEE, d MMMM y '\u0433'.", "longDate": "d MMMM y '\u0433'.", "medium": "d MMM y '\u0433'. H:mm:ss", "mediumDate": "d MMM y '\u0433'.", "mediumTime": "H:mm:ss", "short": "dd.MM.yy H:mm", "shortDate": "dd.MM.yy", "shortTime": "H:mm" }, "NUMBER_FORMATS": { "CURRENCY_SYM": "\u0440\u0443\u0431.", "DECIMAL_SEP": ",", "GROUP_SEP": "\u00a0", "PATTERNS": [ { "gSize": 3, "lgSize": 3, "maxFrac": 3, "minFrac": 0, "minInt": 1, "negPre": "-", "negSuf": "", "posPre": "", "posSuf": "" }, { "gSize": 3, "lgSize": 3, "maxFrac": 2, "minFrac": 2, "minInt": 1, "negPre": "-", "negSuf": "\u00a0\u00a4", "posPre": "", "posSuf": "\u00a0\u00a4" } ] }, "id": "ru", "localeID": "ru", "pluralCat": function(n, opt_precision) { var i = n | 0; var vf = getVF(n, opt_precision); if (vf.v == 0 && i % 10 == 1 && i % 100 != 11) { return PLURAL_CATEGORY.ONE; } if (vf.v == 0 && i % 10 >= 2 && i % 10 <= 4 && (i % 100 < 12 || i % 100 > 14)) { return PLURAL_CATEGORY.FEW; } if (vf.v == 0 && i % 10 == 0 || vf.v == 0 && i % 10 >= 5 && i % 10 <= 9 || vf.v == 0 && i % 100 >= 11 && i % 100 <= 14) { return PLURAL_CATEGORY.MANY; } return PLURAL_CATEGORY.OTHER;} }); }]);
{ "pile_set_name": "Github" }
/* * Copyright (c) 2012, 2013 Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. * */ #ifndef SHARE_VM_TRACE_TRACEDATATYPES_HPP #define SHARE_VM_TRACE_TRACEDATATYPES_HPP #include <stddef.h> #include "utilities/globalDefinitions.hpp" enum { CONTENT_TYPE_NONE = 0, CONTENT_TYPE_BYTES = 1, CONTENT_TYPE_EPOCHMILLIS = 2, CONTENT_TYPE_MILLIS = 3, CONTENT_TYPE_NANOS = 4, CONTENT_TYPE_TICKS = 5, CONTENT_TYPE_ADDRESS = 6, CONTENT_TYPE_OSTHREAD, CONTENT_TYPE_JAVALANGTHREAD, CONTENT_TYPE_STACKTRACE, CONTENT_TYPE_CLASS, CONTENT_TYPE_PERCENTAGE, JVM_CONTENT_TYPES_START = 30, JVM_CONTENT_TYPES_END = 100 }; enum ReservedEvent { EVENT_PRODUCERS, EVENT_CHECKPOINT, EVENT_BUFFERLOST, NUM_RESERVED_EVENTS }; typedef enum ReservedEvent ReservedEvent; typedef u8 classid; typedef u8 stacktraceid; typedef u8 methodid; typedef u8 fieldid; class TraceUnicodeString; #endif // SHARE_VM_TRACE_TRACEDATATYPES_HPP
{ "pile_set_name": "Github" }
// Copyright 2008, Google Inc. // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // // Tests for Google Test itself. This verifies that the basic constructs of // Google Test work. #include "gtest/gtest.h" #include "test/googletest-param-test-test.h" using ::testing::Values; using ::testing::internal::ParamGenerator; // Tests that generators defined in a different translation unit // are functional. The test using extern_gen is defined // in googletest-param-test-test.cc. ParamGenerator<int> extern_gen = Values(33); // Tests that a parameterized test case can be defined in one translation unit // and instantiated in another. The test is defined in // googletest-param-test-test.cc and ExternalInstantiationTest fixture class is // defined in gtest-param-test_test.h. INSTANTIATE_TEST_SUITE_P(MultiplesOf33, ExternalInstantiationTest, Values(33, 66)); // Tests that a parameterized test case can be instantiated // in multiple translation units. Another instantiation is defined // in googletest-param-test-test.cc and // InstantiationInMultipleTranslationUnitsTest fixture is defined in // gtest-param-test_test.h INSTANTIATE_TEST_SUITE_P(Sequence2, InstantiationInMultipleTranslationUnitsTest, Values(42*3, 42*4, 42*5));
{ "pile_set_name": "Github" }
# `lisk account` Commands relating to Lisk accounts. * [`lisk account:create`](#lisk-account-create) * [`lisk account:get ADDRESSES`](#lisk-account-get-addresses) * [`lisk account:show`](#lisk-account-show) ## `lisk account:create` Returns a randomly-generated mnemonic passphrase with its corresponding public/private key pair and Lisk address. ``` USAGE $ lisk account:create OPTIONS -j, --[no-]json Prints output in JSON format. You can change the default behaviour in your config.json file. -n, --number=number [default: 1] Number of accounts to create. --[no-]pretty Prints JSON in pretty format rather than condensed. Has no effect if the output is set to table. You can change the default behaviour in your config.json file. DESCRIPTION Returns a randomly-generated mnemonic passphrase with its corresponding public/private key pair and Lisk address. EXAMPLES account:create account:create --number=3 ``` ## `lisk account:get ADDRESSES` Gets account information from the blockchain. ``` USAGE $ lisk account:get ADDRESSES ARGUMENTS ADDRESSES Comma-separated address(es) to get information about. OPTIONS -j, --[no-]json Prints output in JSON format. You can change the default behaviour in your config.json file. --[no-]pretty Prints JSON in pretty format rather than condensed. Has no effect if the output is set to table. You can change the default behaviour in your config.json file. DESCRIPTION Gets account information from the blockchain. EXAMPLES account:get 3520445367460290306L account:get 3520445367460290306L,2802325248134221536L ``` ## `lisk account:show` Shows account information for a given passphrase. ``` USAGE $ lisk account:show OPTIONS -j, --[no-]json Prints output in JSON format. You can change the default behaviour in your config.json file. -p, --passphrase=passphrase Specifies a source for your secret passphrase. Lisk Commander will prompt you for input if this option is not set. Source must be one of `prompt`, `pass`, `env`, `file` or `stdin`. For `pass`, `env` and `file` a corresponding identifier must also be provided. Examples: - --passphrase=prompt (default behaviour) - --passphrase='pass:my secret passphrase' (should only be used where security is not important) - --passphrase=env:SECRET_PASSPHRASE - --passphrase=file:/path/to/my/passphrase.txt (takes the first line only) - --passphrase=stdin (takes one line only) --[no-]pretty Prints JSON in pretty format rather than condensed. Has no effect if the output is set to table. You can change the default behaviour in your config.json file. DESCRIPTION Shows account information for a given passphrase. EXAMPLE account:show ```
{ "pile_set_name": "Github" }
#include <rosePublicConfig.h> #ifdef ROSE_BUILD_BINARY_ANALYSIS_SUPPORT #include <sage3basic.h> #include <BinarySmtCommandLine.h> #include <BinarySmtSolver.h> namespace Rose { namespace BinaryAnalysis { bool listSmtSolverNames(std::ostream &out) { BinaryAnalysis::SmtSolver::Availability solvers = BinaryAnalysis::SmtSolver::availability(); bool foundSolver = false; out <<"solver \"none\" is available\n"; BOOST_FOREACH (BinaryAnalysis::SmtSolver::Availability::value_type &node, solvers) { out <<"solver \"" <<node.first <<"\" is " <<(node.second?"":"not ") <<"available\n"; if (node.second) foundSolver = true; } return foundSolver; } std::string validateSmtSolverName(const std::string &name) { BinaryAnalysis::SmtSolver::Availability solvers = BinaryAnalysis::SmtSolver::availability(); if (solvers.find(name) != solvers.end()) return ""; return "SMT solver \"" + StringUtility::cEscape(name) + "\" is not recognized"; } std::string bestSmtSolverName() { std::string name; if (const BinaryAnalysis::SmtSolverPtr &solver = BinaryAnalysis::SmtSolver::bestAvailable()) name = solver->name(); return name; } void checkSmtCommandLineArg(const std::string &arg, const std::string &listSwitch, std::ostream &out) { if ("list" == arg) { listSmtSolverNames(std::cout); std::cout <<"solver \"best\" is an alias for \"" <<bestSmtSolverName() <<"\"\n"; exit(0); } else if ("" == arg || "none" == arg || "best" == arg) { // no solver } else { std::string err = validateSmtSolverName(arg); if (!err.empty()) { out <<err <<"\n"; if (!listSwitch.empty()) out <<"use \"" <<listSwitch <<"\" to get a list of supported solvers.\n"; exit(1); } } } std::string smtSolverDocumentationString(const std::string &dfltValue) { using namespace StringUtility; std::string docstr = "Specifies which connection is used to interface to an SMT solver for analyses that don't " "otherwise specify a solver. The choices are names of solver interfaces, \"none\" " "(or the empty string), \"best\", or \"list\"."; SmtSolver::Availability solvers = SmtSolver::availability(); std::vector<std::string> enabled, disabled; BOOST_FOREACH (const SmtSolver::Availability::value_type &node, solvers) { if (node.second) { enabled.push_back("\"" + cEscape(node.first) + "\""); } else { disabled.push_back("\"" + cEscape(node.first) + "\""); } } if (enabled.empty()) { docstr += " ROSE was not configured with any SMT solvers."; } else { docstr += " The following solvers are available in this configuration: " + joinEnglish(enabled) + "."; } if (!disabled.empty()) { docstr += " These solvers would be available, but were not configured: " + joinEnglish(disabled) + "."; } docstr += " In general, solvers ending with \"-exe\" translate the ROSE internal representation to text, send the " "text to a solver executable program which then parses it to another internal representation, solves, " "converts its internal representation to text, which ROSE then reads and parses. These \"-exe\" parsers " "are therefore quite slow, but work well for debugging. On the other hand, the \"-lib\" parsers use " "a solver library and can avoid two of the four translation steps, but don't produce much debugging " "output. To debug solvers, enable the " + SmtSolver::mlog.name() + " diagnostic facility (see @s{log})."; docstr += " The default is \"" + dfltValue + "\""; if ("best" == dfltValue) { if (SmtSolverPtr solver = SmtSolver::bestAvailable()) { docstr += ", which currently means \"" + solver->name() + "\"."; } else { docstr += ", which currently mean \"none\"."; } } else { docstr += "."; } return docstr; } void SmtSolverValidator::operator()(const Sawyer::CommandLine::ParserResult &cmdline) { ASSERT_require(cmdline.have("smt-solver")); std::string arg = cmdline.parsed("smt-solver", 0).as<std::string>(); if (cmdline.parser().errorStream().get()) { checkSmtCommandLineArg(arg, "--smt-solver=list", *cmdline.parser().errorStream().get()); } else { checkSmtCommandLineArg(arg, "--smt-solver=list", std::cerr); } } } // namespace } // namespace #endif
{ "pile_set_name": "Github" }
org.slf4j.simpleLogger.defaultLogLevel=warn
{ "pile_set_name": "Github" }
//===--- ParallelUtilities.cpp -------------------------------------------===// // // The LLVM Compiler Infrastructure // // This file is distributed under the University of Illinois Open Source // License. See LICENSE.TXT for details. // //===----------------------------------------------------------------------===// // //===----------------------------------------------------------------------===// #include "ParallelUtilities.h" #include "llvm/Support/Timer.h" #include <mutex> #include <shared_mutex> #define DEBUG_TYPE "par-utils" namespace opts { extern cl::OptionCategory BoltCategory; cl::opt<unsigned> ThreadCount("thread-count", cl::desc("number of threads"), cl::init(hardware_concurrency()), cl::cat(BoltCategory)); cl::opt<bool> NoThreads("no-threads", cl::desc("disable multithreading"), cl::init(false), cl::cat(BoltCategory)); cl::opt<unsigned> TaskCount("tasks-per-thread", cl::desc("number of tasks to be created per thread"), cl::init(20), cl::cat(BoltCategory)); } // namespace opts namespace llvm { namespace bolt { namespace ParallelUtilities { namespace { /// A single thread pool that is used to run parallel tasks std::unique_ptr<ThreadPool> ThreadPoolPtr; unsigned computeCostFor(const BinaryFunction &BF, const PredicateTy &SkipPredicate, const SchedulingPolicy &SchedPolicy) { if (SchedPolicy == SchedulingPolicy::SP_TRIVIAL) return 1; if (SkipPredicate && SkipPredicate(BF)) return 0; switch (SchedPolicy) { case SchedulingPolicy::SP_CONSTANT: return 1; case SchedulingPolicy::SP_INST_LINEAR: return BF.getSize(); case SchedulingPolicy::SP_INST_QUADRATIC: return BF.getSize() * BF.getSize(); case SchedulingPolicy::SP_BB_LINEAR: return BF.size(); case SchedulingPolicy::SP_BB_QUADRATIC: return BF.size() * BF.size(); default: llvm_unreachable("unsupported scheduling policy"); } } inline unsigned estimateTotalCost(const BinaryContext &BC, const PredicateTy &SkipPredicate, SchedulingPolicy &SchedPolicy) { if (SchedPolicy == SchedulingPolicy::SP_TRIVIAL) return BC.getBinaryFunctions().size(); unsigned TotalCost = 0; for (auto &BFI : BC.getBinaryFunctions()) { auto &BF = BFI.second; TotalCost += computeCostFor(BF, SkipPredicate, SchedPolicy); } // Switch to trivial scheduling if total estimated work is zero if (TotalCost == 0) { outs() << "BOLT-WARNING: Running parallel work of 0 estimated cost, will " "switch to trivial scheduling.\n"; SchedPolicy = SP_TRIVIAL; TotalCost = BC.getBinaryFunctions().size(); } return TotalCost; } } // namespace ThreadPool &getThreadPool() { if (ThreadPoolPtr.get()) return *ThreadPoolPtr; ThreadPoolPtr = std::make_unique<ThreadPool>(opts::ThreadCount); return *ThreadPoolPtr; } void runOnEachFunction(BinaryContext &BC, SchedulingPolicy SchedPolicy, WorkFuncTy WorkFunction, PredicateTy SkipPredicate, std::string LogName, bool ForceSequential, unsigned TasksPerThread) { if (BC.getBinaryFunctions().size() == 0) return; auto runBlock = [&](std::map<uint64_t, BinaryFunction>::iterator BlockBegin, std::map<uint64_t, BinaryFunction>::iterator BlockEnd) { Timer T(LogName, LogName); DEBUG(T.startTimer()); for (auto It = BlockBegin; It != BlockEnd; ++It) { auto &BF = It->second; if (SkipPredicate && SkipPredicate(BF)) continue; WorkFunction(BF); } DEBUG(T.stopTimer()); }; if (opts::NoThreads || ForceSequential) { runBlock(BC.getBinaryFunctions().begin(), BC.getBinaryFunctions().end()); return; } // Estimate the overall runtime cost using the scheduling policy const unsigned TotalCost = estimateTotalCost(BC, SkipPredicate, SchedPolicy); const unsigned BlocksCount = TasksPerThread * opts::ThreadCount; const unsigned BlockCost = TotalCost > BlocksCount ? TotalCost / BlocksCount : 1; // Divide work into blocks of equal cost ThreadPool &Pool = getThreadPool(); auto BlockBegin = BC.getBinaryFunctions().begin(); unsigned CurrentCost = 0; for (auto It = BC.getBinaryFunctions().begin(); It != BC.getBinaryFunctions().end(); ++It) { auto &BF = It->second; CurrentCost += computeCostFor(BF, SkipPredicate, SchedPolicy); if (CurrentCost >= BlockCost) { Pool.async(runBlock, BlockBegin, std::next(It)); BlockBegin = std::next(It); CurrentCost = 0; } } Pool.async(runBlock, BlockBegin, BC.getBinaryFunctions().end()); Pool.wait(); } void runOnEachFunctionWithUniqueAllocId( BinaryContext &BC, SchedulingPolicy SchedPolicy, WorkFuncWithAllocTy WorkFunction, PredicateTy SkipPredicate, std::string LogName, bool ForceSequential, unsigned TasksPerThread) { if (BC.getBinaryFunctions().size() == 0) return; std::shared_timed_mutex MainLock; auto runBlock = [&](std::map<uint64_t, BinaryFunction>::iterator BlockBegin, std::map<uint64_t, BinaryFunction>::iterator BlockEnd, MCPlusBuilder::AllocatorIdTy AllocId) { Timer T(LogName, LogName); DEBUG(T.startTimer()); std::shared_lock<std::shared_timed_mutex> Lock(MainLock); for (auto It = BlockBegin; It != BlockEnd; ++It) { auto &BF = It->second; if (SkipPredicate && SkipPredicate(BF)) continue; WorkFunction(BF, AllocId); } DEBUG(T.stopTimer()); }; if (opts::NoThreads || ForceSequential) { runBlock(BC.getBinaryFunctions().begin(), BC.getBinaryFunctions().end(), 0); return; } // This lock is used to postpone task execution std::unique_lock<std::shared_timed_mutex> Lock(MainLock); // Estimate the overall runtime cost using the scheduling policy const unsigned TotalCost = estimateTotalCost(BC, SkipPredicate, SchedPolicy); const unsigned BlocksCount = TasksPerThread * opts::ThreadCount; const unsigned BlockCost = TotalCost > BlocksCount ? TotalCost / BlocksCount : 1; // Divide work into blocks of equal cost ThreadPool &Pool = getThreadPool(); auto BlockBegin = BC.getBinaryFunctions().begin(); unsigned CurrentCost = 0; unsigned AllocId = 1; for (auto It = BC.getBinaryFunctions().begin(); It != BC.getBinaryFunctions().end(); ++It) { auto &BF = It->second; CurrentCost += computeCostFor(BF, SkipPredicate, SchedPolicy); if (CurrentCost >= BlockCost) { if (!BC.MIB->checkAllocatorExists(AllocId)) { auto Id = BC.MIB->initializeNewAnnotationAllocator(); assert(AllocId == Id && "unexpected allocator id created"); } Pool.async(runBlock, BlockBegin, std::next(It), AllocId); AllocId++; BlockBegin = std::next(It); CurrentCost = 0; } } if (!BC.MIB->checkAllocatorExists(AllocId)) { auto Id = BC.MIB->initializeNewAnnotationAllocator(); assert(AllocId == Id && "unexpected allocator id created"); } Pool.async(runBlock, BlockBegin, BC.getBinaryFunctions().end(), AllocId); Lock.unlock(); Pool.wait(); } } // namespace ParallelUtilities } // namespace bolt } // namespace llvm
{ "pile_set_name": "Github" }
// CodeMirror, copyright (c) by Marijn Haverbeke and others // Distributed under an MIT license: http://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS mod(require("../../lib/codemirror")); else if (typeof define == "function" && define.amd) // AMD define(["../../lib/codemirror"], mod); else // Plain browser env mod(CodeMirror); })(function(CodeMirror) { "use strict"; CodeMirror.defineMode("commonlisp", function (config) { var assumeBody = /^with|^def|^do|^prog|case$|^cond$|bind$|when$|unless$/; var numLiteral = /^(?:[+\-]?(?:\d+|\d*\.\d+)(?:[efd][+\-]?\d+)?|[+\-]?\d+(?:\/[+\-]?\d+)?|#b[+\-]?[01]+|#o[+\-]?[0-7]+|#x[+\-]?[\da-f]+)/; var symbol = /[^\s'`,@()\[\]";]/; var type; function readSym(stream) { var ch; while (ch = stream.next()) { if (ch == "\\") stream.next(); else if (!symbol.test(ch)) { stream.backUp(1); break; } } return stream.current(); } function base(stream, state) { if (stream.eatSpace()) {type = "ws"; return null;} if (stream.match(numLiteral)) return "number"; var ch = stream.next(); if (ch == "\\") ch = stream.next(); if (ch == '"') return (state.tokenize = inString)(stream, state); else if (ch == "(") { type = "open"; return "bracket"; } else if (ch == ")" || ch == "]") { type = "close"; return "bracket"; } else if (ch == ";") { stream.skipToEnd(); type = "ws"; return "comment"; } else if (/['`,@]/.test(ch)) return null; else if (ch == "|") { if (stream.skipTo("|")) { stream.next(); return "symbol"; } else { stream.skipToEnd(); return "error"; } } else if (ch == "#") { var ch = stream.next(); if (ch == "[") { type = "open"; return "bracket"; } else if (/[+\-=\.']/.test(ch)) return null; else if (/\d/.test(ch) && stream.match(/^\d*#/)) return null; else if (ch == "|") return (state.tokenize = inComment)(stream, state); else if (ch == ":") { readSym(stream); return "meta"; } else return "error"; } else { var name = readSym(stream); if (name == ".") return null; type = "symbol"; if (name == "nil" || name == "t") return "atom"; if (name.charAt(0) == ":") return "keyword"; if (name.charAt(0) == "&") return "variable-2"; return "variable"; } } function inString(stream, state) { var escaped = false, next; while (next = stream.next()) { if (next == '"' && !escaped) { state.tokenize = base; break; } escaped = !escaped && next == "\\"; } return "string"; } function inComment(stream, state) { var next, last; while (next = stream.next()) { if (next == "#" && last == "|") { state.tokenize = base; break; } last = next; } type = "ws"; return "comment"; } return { startState: function () { return {ctx: {prev: null, start: 0, indentTo: 0}, tokenize: base}; }, token: function (stream, state) { if (stream.sol() && typeof state.ctx.indentTo != "number") state.ctx.indentTo = state.ctx.start + 1; type = null; var style = state.tokenize(stream, state); if (type != "ws") { if (state.ctx.indentTo == null) { if (type == "symbol" && assumeBody.test(stream.current())) state.ctx.indentTo = state.ctx.start + config.indentUnit; else state.ctx.indentTo = "next"; } else if (state.ctx.indentTo == "next") { state.ctx.indentTo = stream.column(); } } if (type == "open") state.ctx = {prev: state.ctx, start: stream.column(), indentTo: null}; else if (type == "close") state.ctx = state.ctx.prev || state.ctx; return style; }, indent: function (state, _textAfter) { var i = state.ctx.indentTo; return typeof i == "number" ? i : state.ctx.start + 1; }, lineComment: ";;", blockCommentStart: "#|", blockCommentEnd: "|#" }; }); CodeMirror.defineMIME("text/x-common-lisp", "commonlisp"); });
{ "pile_set_name": "Github" }
import * as React from 'react'; import createSvgIcon from './utils/createSvgIcon'; export default createSvgIcon( <path d="M14.59 8L12 10.59 9.41 8 8 9.41 10.59 12 8 14.59 9.41 16 12 13.41 14.59 16 16 14.59 13.41 12 16 9.41 14.59 8zM12 2C6.47 2 2 6.47 2 12s4.47 10 10 10 10-4.47 10-10S17.53 2 12 2zm0 18c-4.41 0-8-3.59-8-8s3.59-8 8-8 8 3.59 8 8-3.59 8-8 8z" /> , 'HighlightOffSharp');
{ "pile_set_name": "Github" }
<%inherit file="/template.html" /> <%namespace name="badge_index" file="/badge/index.html"/> <%namespace name="components" file="/components.html"/> <%def name="title()">${_("Global badges")}</%def> <%def name="breadcrumbs()"> ${h.badge.breadcrumbs(None)|n} </%def> <%block name="main_content"> ${components.flashmessages()} %for (type_, data) in c.badge_tables.items(): ${badge_index.render_tables(data['global_badges'], data['instance_badges'], data['badge_base_url'], data['badge_header'], data['badge_type'])} %endfor </%block>
{ "pile_set_name": "Github" }
// © 2016 and later: Unicode, Inc. and others. // License & terms of use: http://www.unicode.org/copyright.html // Generated using tools/cldr/cldr-to-icu/build-icu-data.xml en_NZ{ %%Parent{"en_001"} calendar{ generic{ DateTimePatterns{ "h:mm:ss a zzzz", "h:mm:ss a z", "h:mm:ss a", "h:mm a", "EEEE, d MMMM y G", "d MMMM y G", "d/MM/y G", "d/MM/y GGGGG", "{1}, {0}", "{1} 'at' {0}", "{1} 'at' {0}", "{1}, {0}", "{1}, {0}", } availableFormats{ Md{"d/M"} yyyyMd{"d/MM/y G"} } intervalFormats{ MEd{ M{"E, d/MM – E, d/MM"} d{"E, d/MM – E, d/MM"} } MMMEd{ M{"E, d MMM – E, d MMM"} d{"E, d – E, d MMM"} } Md{ M{"d/MM – d/MM"} d{"d/MM – d/MM"} } yM{ M{"MM/y – MM/y G"} y{"MM/y – MM/y G"} } yMEd{ M{"E, d/MM/y – E, d/MM/y G"} d{"E, d/MM/y – E, d/MM/y G"} y{"E, d/MM/y – E, d/MM/y G"} } yMd{ M{"d/MM/y – d/MM/y G"} d{"d/MM/y – d/MM/y G"} y{"d/MM/y – d/MM/y G"} } } } gregorian{ DateTimePatterns{ "h:mm:ss a zzzz", "h:mm:ss a z", "h:mm:ss a", "h:mm a", "EEEE, d MMMM y", "d MMMM y", "d/MM/y", "d/MM/yy", "{1}, {0}", "{1} 'at' {0}", "{1} 'at' {0}", "{1}, {0}", "{1}, {0}", } availableFormats{ Md{"d/M"} yMd{"d/MM/y"} } intervalFormats{ MEd{ M{"E, d/MM – E, d/MM"} d{"E, d/MM – E, d/MM"} } MMMEd{ M{"E, d MMM – E, d MMM"} d{"E, d – E, d MMM"} } Md{ M{"d/MM – d/MM"} d{"d/MM – d/MM"} } yMEd{ M{"E, d/MM/y – E, d/MM/y"} d{"E, d/MM/y – E, d/MM/y"} y{"E, d/MM/y – E, d/MM/y"} } yMd{ M{"d/MM/y – d/MM/y"} d{"d/MM/y – d/MM/y"} y{"d/MM/y – d/MM/y"} } } } } }
{ "pile_set_name": "Github" }
/* * cocos2d for iPhone: http://www.cocos2d-iphone.org * * Copyright (c) 2008-2010 Ricardo Quesada * Copyright (c) 2011 Zynga Inc. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. * */ #import "CCActionInstant.h" #import "CCNode.h" #import "CCSprite.h" // // InstantAction // #pragma mark CCActionInstant @implementation CCActionInstant -(id) init { if( (self=[super init]) ) duration_ = 0; return self; } -(id) copyWithZone: (NSZone*) zone { CCActionInstant *copy = [[[self class] allocWithZone: zone] init]; return copy; } - (BOOL) isDone { return YES; } -(void) step: (ccTime) dt { [self update: 1]; } -(void) update: (ccTime) t { // nothing } -(CCFiniteTimeAction*) reverse { return [[self copy] autorelease]; } @end // // Show // #pragma mark CCShow @implementation CCShow -(void) update:(ccTime)time { ((CCNode *)target_).visible = YES; } -(CCFiniteTimeAction*) reverse { return [CCHide action]; } @end // // Hide // #pragma mark CCHide @implementation CCHide -(void) update:(ccTime)time { ((CCNode *)target_).visible = NO; } -(CCFiniteTimeAction*) reverse { return [CCShow action]; } @end // // ToggleVisibility // #pragma mark CCToggleVisibility @implementation CCToggleVisibility -(void) update:(ccTime)time { ((CCNode *)target_).visible = !((CCNode *)target_).visible; } @end // // FlipX // #pragma mark CCFlipX @implementation CCFlipX +(id) actionWithFlipX:(BOOL)x { return [[[self alloc] initWithFlipX:x] autorelease]; } -(id) initWithFlipX:(BOOL)x { if(( self=[super init])) flipX = x; return self; } -(void) update:(ccTime)time { [(CCSprite*)target_ setFlipX:flipX]; } -(CCFiniteTimeAction*) reverse { return [CCFlipX actionWithFlipX:!flipX]; } -(id) copyWithZone: (NSZone*) zone { CCActionInstant *copy = [[[self class] allocWithZone: zone] initWithFlipX:flipX]; return copy; } @end // // FlipY // #pragma mark CCFlipY @implementation CCFlipY +(id) actionWithFlipY:(BOOL)y { return [[[self alloc] initWithFlipY:y] autorelease]; } -(id) initWithFlipY:(BOOL)y { if(( self=[super init])) flipY = y; return self; } -(void) update:(ccTime)time { [(CCSprite*)target_ setFlipY:flipY]; } -(CCFiniteTimeAction*) reverse { return [CCFlipY actionWithFlipY:!flipY]; } -(id) copyWithZone: (NSZone*) zone { CCActionInstant *copy = [[[self class] allocWithZone: zone] initWithFlipY:flipY]; return copy; } @end // // Place // #pragma mark CCPlace @implementation CCPlace +(id) actionWithPosition: (CGPoint) pos { return [[[self alloc]initWithPosition:pos]autorelease]; } -(id) initWithPosition: (CGPoint) pos { if( (self=[super init]) ) position = pos; return self; } -(id) copyWithZone: (NSZone*) zone { CCActionInstant *copy = [[[self class] allocWithZone: zone] initWithPosition: position]; return copy; } -(void) update:(ccTime)time { ((CCNode *)target_).position = position; } @end // // CallFunc // #pragma mark CCCallFunc @implementation CCCallFunc @synthesize targetCallback = targetCallback_; +(id) actionWithTarget: (id) t selector:(SEL) s { return [[[self alloc] initWithTarget: t selector: s] autorelease]; } -(id) initWithTarget: (id) t selector:(SEL) s { if( (self=[super init]) ) { self.targetCallback = t; selector_ = s; } return self; } -(NSString*) description { return [NSString stringWithFormat:@"<%@ = %p | Tag = %ld | selector = %@>", [self class], self, (long)tag_, NSStringFromSelector(selector_) ]; } -(void) dealloc { [targetCallback_ release]; [super dealloc]; } -(id) copyWithZone: (NSZone*) zone { CCActionInstant *copy = [[[self class] allocWithZone: zone] initWithTarget:targetCallback_ selector:selector_]; return copy; } -(void) update:(ccTime)time { [self execute]; } -(void) execute { [targetCallback_ performSelector:selector_]; } @end // // CallFuncN // #pragma mark CCCallFuncN @implementation CCCallFuncN -(void) execute { [targetCallback_ performSelector:selector_ withObject:target_]; } @end // // CallFuncND // #pragma mark CCCallFuncND @implementation CCCallFuncND @synthesize callbackMethod = callbackMethod_; +(id) actionWithTarget:(id)t selector:(SEL)s data:(void*)d { return [[[self alloc] initWithTarget:t selector:s data:d] autorelease]; } -(id) initWithTarget:(id)t selector:(SEL)s data:(void*)d { if( (self=[super initWithTarget:t selector:s]) ) { data_ = d; #if COCOS2D_DEBUG NSMethodSignature * sig = [t methodSignatureForSelector:s]; // added NSAssert(sig !=0 , @"Signature not found for selector - does it have the following form? -(void)name:(id)sender data:(void*)data"); #endif callbackMethod_ = (CC_CALLBACK_ND) [t methodForSelector:s]; } return self; } -(id) copyWithZone: (NSZone*) zone { CCActionInstant *copy = [[[self class] allocWithZone: zone] initWithTarget:targetCallback_ selector:selector_ data:data_]; return copy; } -(void) dealloc { // nothing to dealloc really. Everything is dealloc on super (CCCallFuncN) [super dealloc]; } -(void) execute { callbackMethod_(targetCallback_,selector_,target_, data_); } @end @implementation CCCallFuncO @synthesize object = object_; +(id) actionWithTarget: (id) t selector:(SEL) s object:(id)object { return [[[self alloc] initWithTarget:t selector:s object:object] autorelease]; } -(id) initWithTarget:(id) t selector:(SEL) s object:(id)object { if( (self=[super initWithTarget:t selector:s] ) ) self.object = object; return self; } - (void) dealloc { [object_ release]; [super dealloc]; } -(id) copyWithZone: (NSZone*) zone { CCActionInstant *copy = [[[self class] allocWithZone: zone] initWithTarget:targetCallback_ selector:selector_ object:object_]; return copy; } -(void) execute { [targetCallback_ performSelector:selector_ withObject:object_]; } @end #pragma mark - #pragma mark Blocks #pragma mark CCCallBlock @implementation CCCallBlock +(id) actionWithBlock:(void(^)())block { return [[[self alloc] initWithBlock:block] autorelease]; } -(id) initWithBlock:(void(^)())block { if ((self = [super init])) block_ = [block copy]; return self; } -(id) copyWithZone: (NSZone*) zone { CCActionInstant *copy = [[[self class] allocWithZone: zone] initWithBlock:block_]; return copy; } -(void) update:(ccTime)time { [self execute]; } -(void) execute { block_(); } -(void) dealloc { [block_ release]; [super dealloc]; } @end #pragma mark CCCallBlockN @implementation CCCallBlockN +(id) actionWithBlock:(void(^)(CCNode *node))block { return [[[self alloc] initWithBlock:block] autorelease]; } -(id) initWithBlock:(void(^)(CCNode *node))block { if ((self = [super init])) block_ = [block copy]; return self; } -(id) copyWithZone: (NSZone*) zone { CCActionInstant *copy = [[[self class] allocWithZone: zone] initWithBlock:block_]; return copy; } -(void) update:(ccTime)time { [self execute]; } -(void) execute { block_(target_); } -(void) dealloc { [block_ release]; [super dealloc]; } @end #pragma mark CCCallBlockO @implementation CCCallBlockO @synthesize object=object_; +(id) actionWithBlock:(void(^)(id object))block object:(id)object { return [[[self alloc] initWithBlock:block object:object] autorelease]; } -(id) initWithBlock:(void(^)(id object))block object:(id)object { if ((self = [super init])) { block_ = [block copy]; object_ = [object retain]; } return self; } -(id) copyWithZone: (NSZone*) zone { CCActionInstant *copy = [[[self class] allocWithZone: zone] initWithBlock:block_]; return copy; } -(void) update:(ccTime)time { [self execute]; } -(void) execute { block_(object_); } -(void) dealloc { [object_ release]; [block_ release]; [super dealloc]; } @end
{ "pile_set_name": "Github" }
function test() { class C { constructor() { this.x = 1; } } return C.prototype.constructor === C && new C().x === 1; } if (!test()) throw new Error("Test failed");
{ "pile_set_name": "Github" }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Runtime.InteropServices; namespace Microsoft.Diagnostics.Runtime.DacInterface { [StructLayout(LayoutKind.Sequential)] public readonly struct GenerationData { public readonly ClrDataAddress StartSegment; public readonly ClrDataAddress AllocationStart; // These are examined only for generation 0, otherwise NULL public readonly ClrDataAddress AllocationContextPointer; public readonly ClrDataAddress AllocationContextLimit; } }
{ "pile_set_name": "Github" }
using System; using System.Collections.Generic; public static class Program { public static void Main () { var str = "abcdefgh"; var strChars = (str as IEnumerable<char>); foreach (var ch in strChars) Console.WriteLine(ch); } }
{ "pile_set_name": "Github" }
/* Copyright 2017 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package v1alpha1 import ( "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" ) // +genclient // +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object // PodPreset is a policy resource that defines additional runtime // requirements for a Pod. type PodPreset struct { metav1.TypeMeta `json:",inline"` // +optional metav1.ObjectMeta `json:"metadata,omitempty" protobuf:"bytes,1,opt,name=metadata"` // +optional Spec PodPresetSpec `json:"spec,omitempty" protobuf:"bytes,2,opt,name=spec"` } // PodPresetSpec is a description of a pod preset. type PodPresetSpec struct { // Selector is a label query over a set of resources, in this case pods. // Required. Selector metav1.LabelSelector `json:"selector,omitempty" protobuf:"bytes,1,opt,name=selector"` // Env defines the collection of EnvVar to inject into containers. // +optional Env []v1.EnvVar `json:"env,omitempty" protobuf:"bytes,2,rep,name=env"` // EnvFrom defines the collection of EnvFromSource to inject into containers. // +optional EnvFrom []v1.EnvFromSource `json:"envFrom,omitempty" protobuf:"bytes,3,rep,name=envFrom"` // Volumes defines the collection of Volume to inject into the pod. // +optional Volumes []v1.Volume `json:"volumes,omitempty" protobuf:"bytes,4,rep,name=volumes"` // VolumeMounts defines the collection of VolumeMount to inject into containers. // +optional VolumeMounts []v1.VolumeMount `json:"volumeMounts,omitempty" protobuf:"bytes,5,rep,name=volumeMounts"` } // +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object // PodPresetList is a list of PodPreset objects. type PodPresetList struct { metav1.TypeMeta `json:",inline"` // Standard list metadata. // More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#metadata // +optional metav1.ListMeta `json:"metadata,omitempty" protobuf:"bytes,1,opt,name=metadata"` // Items is a list of schema objects. Items []PodPreset `json:"items" protobuf:"bytes,2,rep,name=items"` }
{ "pile_set_name": "Github" }
// Copyright 2019 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package proto import ( "fmt" "google.golang.org/protobuf/reflect/protoreflect" "google.golang.org/protobuf/runtime/protoiface" ) // Merge merges src into dst, which must be a message with the same descriptor. // // Populated scalar fields in src are copied to dst, while populated // singular messages in src are merged into dst by recursively calling Merge. // The elements of every list field in src is appended to the corresponded // list fields in dst. The entries of every map field in src is copied into // the corresponding map field in dst, possibly replacing existing entries. // The unknown fields of src are appended to the unknown fields of dst. // // It is semantically equivalent to unmarshaling the encoded form of src // into dst with the UnmarshalOptions.Merge option specified. func Merge(dst, src Message) { // TODO: Should nil src be treated as semantically equivalent to a // untyped, read-only, empty message? What about a nil dst? dstMsg, srcMsg := dst.ProtoReflect(), src.ProtoReflect() if dstMsg.Descriptor() != srcMsg.Descriptor() { if got, want := dstMsg.Descriptor().FullName(), srcMsg.Descriptor().FullName(); got != want { panic(fmt.Sprintf("descriptor mismatch: %v != %v", got, want)) } panic("descriptor mismatch") } mergeOptions{}.mergeMessage(dstMsg, srcMsg) } // Clone returns a deep copy of m. // If the top-level message is invalid, it returns an invalid message as well. func Clone(m Message) Message { // NOTE: Most usages of Clone assume the following properties: // t := reflect.TypeOf(m) // t == reflect.TypeOf(m.ProtoReflect().New().Interface()) // t == reflect.TypeOf(m.ProtoReflect().Type().Zero().Interface()) // // Embedding protobuf messages breaks this since the parent type will have // a forwarded ProtoReflect method, but the Interface method will return // the underlying embedded message type. if m == nil { return nil } src := m.ProtoReflect() if !src.IsValid() { return src.Type().Zero().Interface() } dst := src.New() mergeOptions{}.mergeMessage(dst, src) return dst.Interface() } // mergeOptions provides a namespace for merge functions, and can be // exported in the future if we add user-visible merge options. type mergeOptions struct{} func (o mergeOptions) mergeMessage(dst, src protoreflect.Message) { methods := protoMethods(dst) if methods != nil && methods.Merge != nil { in := protoiface.MergeInput{ Destination: dst, Source: src, } out := methods.Merge(in) if out.Flags&protoiface.MergeComplete != 0 { return } } if !dst.IsValid() { panic(fmt.Sprintf("cannot merge into invalid %v message", dst.Descriptor().FullName())) } src.Range(func(fd protoreflect.FieldDescriptor, v protoreflect.Value) bool { switch { case fd.IsList(): o.mergeList(dst.Mutable(fd).List(), v.List(), fd) case fd.IsMap(): o.mergeMap(dst.Mutable(fd).Map(), v.Map(), fd.MapValue()) case fd.Message() != nil: o.mergeMessage(dst.Mutable(fd).Message(), v.Message()) case fd.Kind() == protoreflect.BytesKind: dst.Set(fd, o.cloneBytes(v)) default: dst.Set(fd, v) } return true }) if len(src.GetUnknown()) > 0 { dst.SetUnknown(append(dst.GetUnknown(), src.GetUnknown()...)) } } func (o mergeOptions) mergeList(dst, src protoreflect.List, fd protoreflect.FieldDescriptor) { // Merge semantics appends to the end of the existing list. for i, n := 0, src.Len(); i < n; i++ { switch v := src.Get(i); { case fd.Message() != nil: dstv := dst.NewElement() o.mergeMessage(dstv.Message(), v.Message()) dst.Append(dstv) case fd.Kind() == protoreflect.BytesKind: dst.Append(o.cloneBytes(v)) default: dst.Append(v) } } } func (o mergeOptions) mergeMap(dst, src protoreflect.Map, fd protoreflect.FieldDescriptor) { // Merge semantics replaces, rather than merges into existing entries. src.Range(func(k protoreflect.MapKey, v protoreflect.Value) bool { switch { case fd.Message() != nil: dstv := dst.NewValue() o.mergeMessage(dstv.Message(), v.Message()) dst.Set(k, dstv) case fd.Kind() == protoreflect.BytesKind: dst.Set(k, o.cloneBytes(v)) default: dst.Set(k, v) } return true }) } func (o mergeOptions) cloneBytes(v protoreflect.Value) protoreflect.Value { return protoreflect.ValueOfBytes(append([]byte{}, v.Bytes()...)) }
{ "pile_set_name": "Github" }
/* * * Support for audio capture for tm5600/6000/6010 * (c) 2007-2008 Mauro Carvalho Chehab * * Based on cx88-alsa.c * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License version 2 as * published by the Free Software Foundation. */ #include <linux/module.h> #include <linux/init.h> #include <linux/device.h> #include <linux/interrupt.h> #include <linux/usb.h> #include <linux/slab.h> #include <linux/vmalloc.h> #include <linux/delay.h> #include <sound/core.h> #include <sound/pcm.h> #include <sound/pcm_params.h> #include <sound/control.h> #include <sound/initval.h> #include "tm6000.h" #include "tm6000-regs.h" #undef dprintk #define dprintk(level, fmt, arg...) do { \ if (debug >= level) \ printk(KERN_INFO "%s/1: " fmt, chip->core->name , ## arg); \ } while (0) /**************************************************************************** Module global static vars ****************************************************************************/ static int index[SNDRV_CARDS] = SNDRV_DEFAULT_IDX; /* Index 0-MAX */ static bool enable[SNDRV_CARDS] = SNDRV_DEFAULT_ENABLE_PNP; module_param_array(enable, bool, NULL, 0444); MODULE_PARM_DESC(enable, "Enable tm6000x soundcard. default enabled."); module_param_array(index, int, NULL, 0444); MODULE_PARM_DESC(index, "Index value for tm6000x capture interface(s)."); /**************************************************************************** Module macros ****************************************************************************/ MODULE_DESCRIPTION("ALSA driver module for tm5600/tm6000/tm6010 based TV cards"); MODULE_AUTHOR("Mauro Carvalho Chehab"); MODULE_LICENSE("GPL"); MODULE_SUPPORTED_DEVICE("{{Trident,tm5600}," "{{Trident,tm6000}," "{{Trident,tm6010}"); static unsigned int debug; module_param(debug, int, 0644); MODULE_PARM_DESC(debug, "enable debug messages"); /**************************************************************************** Module specific funtions ****************************************************************************/ /* * BOARD Specific: Sets audio DMA */ static int _tm6000_start_audio_dma(struct snd_tm6000_card *chip) { struct tm6000_core *core = chip->core; dprintk(1, "Starting audio DMA\n"); /* Enables audio */ tm6000_set_reg_mask(core, TM6010_REQ07_RCC_ACTIVE_IF, 0x40, 0x40); tm6000_set_audio_bitrate(core, 48000); return 0; } /* * BOARD Specific: Resets audio DMA */ static int _tm6000_stop_audio_dma(struct snd_tm6000_card *chip) { struct tm6000_core *core = chip->core; dprintk(1, "Stopping audio DMA\n"); /* Disables audio */ tm6000_set_reg_mask(core, TM6010_REQ07_RCC_ACTIVE_IF, 0x00, 0x40); return 0; } static void dsp_buffer_free(struct snd_pcm_substream *substream) { struct snd_tm6000_card *chip = snd_pcm_substream_chip(substream); dprintk(2, "Freeing buffer\n"); vfree(substream->runtime->dma_area); substream->runtime->dma_area = NULL; substream->runtime->dma_bytes = 0; } static int dsp_buffer_alloc(struct snd_pcm_substream *substream, int size) { struct snd_tm6000_card *chip = snd_pcm_substream_chip(substream); dprintk(2, "Allocating buffer\n"); if (substream->runtime->dma_area) { if (substream->runtime->dma_bytes > size) return 0; dsp_buffer_free(substream); } substream->runtime->dma_area = vmalloc(size); if (!substream->runtime->dma_area) return -ENOMEM; substream->runtime->dma_bytes = size; return 0; } /**************************************************************************** ALSA PCM Interface ****************************************************************************/ /* * Digital hardware definition */ #define DEFAULT_FIFO_SIZE 4096 static struct snd_pcm_hardware snd_tm6000_digital_hw = { .info = SNDRV_PCM_INFO_BATCH | SNDRV_PCM_INFO_MMAP | SNDRV_PCM_INFO_INTERLEAVED | SNDRV_PCM_INFO_BLOCK_TRANSFER | SNDRV_PCM_INFO_MMAP_VALID, .formats = SNDRV_PCM_FMTBIT_S16_LE, .rates = SNDRV_PCM_RATE_CONTINUOUS | SNDRV_PCM_RATE_KNOT, .rate_min = 48000, .rate_max = 48000, .channels_min = 2, .channels_max = 2, .period_bytes_min = 64, .period_bytes_max = 12544, .periods_min = 2, .periods_max = 98, .buffer_bytes_max = 62720 * 8, }; /* * audio pcm capture open callback */ static int snd_tm6000_pcm_open(struct snd_pcm_substream *substream) { struct snd_tm6000_card *chip = snd_pcm_substream_chip(substream); struct snd_pcm_runtime *runtime = substream->runtime; int err; err = snd_pcm_hw_constraint_pow2(runtime, 0, SNDRV_PCM_HW_PARAM_PERIODS); if (err < 0) goto _error; chip->substream = substream; runtime->hw = snd_tm6000_digital_hw; snd_pcm_hw_constraint_integer(runtime, SNDRV_PCM_HW_PARAM_PERIODS); return 0; _error: dprintk(1, "Error opening PCM!\n"); return err; } /* * audio close callback */ static int snd_tm6000_close(struct snd_pcm_substream *substream) { struct snd_tm6000_card *chip = snd_pcm_substream_chip(substream); struct tm6000_core *core = chip->core; if (atomic_read(&core->stream_started) > 0) { atomic_set(&core->stream_started, 0); schedule_work(&core->wq_trigger); } return 0; } static int tm6000_fillbuf(struct tm6000_core *core, char *buf, int size) { struct snd_tm6000_card *chip = core->adev; struct snd_pcm_substream *substream = chip->substream; struct snd_pcm_runtime *runtime; int period_elapsed = 0; unsigned int stride, buf_pos; int length; if (atomic_read(&core->stream_started) == 0) return 0; if (!size || !substream) { dprintk(1, "substream was NULL\n"); return -EINVAL; } runtime = substream->runtime; if (!runtime || !runtime->dma_area) { dprintk(1, "runtime was NULL\n"); return -EINVAL; } buf_pos = chip->buf_pos; stride = runtime->frame_bits >> 3; if (stride == 0) { dprintk(1, "stride is zero\n"); return -EINVAL; } length = size / stride; if (length == 0) { dprintk(1, "%s: length was zero\n", __func__); return -EINVAL; } dprintk(1, "Copying %d bytes at %p[%d] - buf size=%d x %d\n", size, runtime->dma_area, buf_pos, (unsigned int)runtime->buffer_size, stride); if (buf_pos + length >= runtime->buffer_size) { unsigned int cnt = runtime->buffer_size - buf_pos; memcpy(runtime->dma_area + buf_pos * stride, buf, cnt * stride); memcpy(runtime->dma_area, buf + cnt * stride, length * stride - cnt * stride); } else memcpy(runtime->dma_area + buf_pos * stride, buf, length * stride); snd_pcm_stream_lock(substream); chip->buf_pos += length; if (chip->buf_pos >= runtime->buffer_size) chip->buf_pos -= runtime->buffer_size; chip->period_pos += length; if (chip->period_pos >= runtime->period_size) { chip->period_pos -= runtime->period_size; period_elapsed = 1; } snd_pcm_stream_unlock(substream); if (period_elapsed) snd_pcm_period_elapsed(substream); return 0; } /* * hw_params callback */ static int snd_tm6000_hw_params(struct snd_pcm_substream *substream, struct snd_pcm_hw_params *hw_params) { int size, rc; size = params_period_bytes(hw_params) * params_periods(hw_params); rc = dsp_buffer_alloc(substream, size); if (rc < 0) return rc; return 0; } /* * hw free callback */ static int snd_tm6000_hw_free(struct snd_pcm_substream *substream) { struct snd_tm6000_card *chip = snd_pcm_substream_chip(substream); struct tm6000_core *core = chip->core; if (atomic_read(&core->stream_started) > 0) { atomic_set(&core->stream_started, 0); schedule_work(&core->wq_trigger); } dsp_buffer_free(substream); return 0; } /* * prepare callback */ static int snd_tm6000_prepare(struct snd_pcm_substream *substream) { struct snd_tm6000_card *chip = snd_pcm_substream_chip(substream); chip->buf_pos = 0; chip->period_pos = 0; return 0; } /* * trigger callback */ static void audio_trigger(struct work_struct *work) { struct tm6000_core *core = container_of(work, struct tm6000_core, wq_trigger); struct snd_tm6000_card *chip = core->adev; if (atomic_read(&core->stream_started)) { dprintk(1, "starting capture"); _tm6000_start_audio_dma(chip); } else { dprintk(1, "stopping capture"); _tm6000_stop_audio_dma(chip); } } static int snd_tm6000_card_trigger(struct snd_pcm_substream *substream, int cmd) { struct snd_tm6000_card *chip = snd_pcm_substream_chip(substream); struct tm6000_core *core = chip->core; int err = 0; switch (cmd) { case SNDRV_PCM_TRIGGER_PAUSE_RELEASE: /* fall through */ case SNDRV_PCM_TRIGGER_RESUME: /* fall through */ case SNDRV_PCM_TRIGGER_START: atomic_set(&core->stream_started, 1); break; case SNDRV_PCM_TRIGGER_PAUSE_PUSH: /* fall through */ case SNDRV_PCM_TRIGGER_SUSPEND: /* fall through */ case SNDRV_PCM_TRIGGER_STOP: atomic_set(&core->stream_started, 0); break; default: err = -EINVAL; break; } schedule_work(&core->wq_trigger); return err; } /* * pointer callback */ static snd_pcm_uframes_t snd_tm6000_pointer(struct snd_pcm_substream *substream) { struct snd_tm6000_card *chip = snd_pcm_substream_chip(substream); return chip->buf_pos; } static struct page *snd_pcm_get_vmalloc_page(struct snd_pcm_substream *subs, unsigned long offset) { void *pageptr = subs->runtime->dma_area + offset; return vmalloc_to_page(pageptr); } /* * operators */ static struct snd_pcm_ops snd_tm6000_pcm_ops = { .open = snd_tm6000_pcm_open, .close = snd_tm6000_close, .ioctl = snd_pcm_lib_ioctl, .hw_params = snd_tm6000_hw_params, .hw_free = snd_tm6000_hw_free, .prepare = snd_tm6000_prepare, .trigger = snd_tm6000_card_trigger, .pointer = snd_tm6000_pointer, .page = snd_pcm_get_vmalloc_page, }; /* * create a PCM device */ /* FIXME: Control interface - How to control volume/mute? */ /**************************************************************************** Basic Flow for Sound Devices ****************************************************************************/ /* * Alsa Constructor - Component probe */ static int tm6000_audio_init(struct tm6000_core *dev) { struct snd_card *card; struct snd_tm6000_card *chip; int rc; static int devnr; char component[14]; struct snd_pcm *pcm; if (!dev) return 0; if (devnr >= SNDRV_CARDS) return -ENODEV; if (!enable[devnr]) return -ENOENT; rc = snd_card_new(&dev->udev->dev, index[devnr], "tm6000", THIS_MODULE, 0, &card); if (rc < 0) { snd_printk(KERN_ERR "cannot create card instance %d\n", devnr); return rc; } strcpy(card->driver, "tm6000-alsa"); strcpy(card->shortname, "TM5600/60x0"); sprintf(card->longname, "TM5600/60x0 Audio at bus %d device %d", dev->udev->bus->busnum, dev->udev->devnum); sprintf(component, "USB%04x:%04x", le16_to_cpu(dev->udev->descriptor.idVendor), le16_to_cpu(dev->udev->descriptor.idProduct)); snd_component_add(card, component); chip = kzalloc(sizeof(struct snd_tm6000_card), GFP_KERNEL); if (!chip) { rc = -ENOMEM; goto error; } chip->core = dev; chip->card = card; dev->adev = chip; spin_lock_init(&chip->reg_lock); rc = snd_pcm_new(card, "TM6000 Audio", 0, 0, 1, &pcm); if (rc < 0) goto error_chip; pcm->info_flags = 0; pcm->private_data = chip; strcpy(pcm->name, "Trident TM5600/60x0"); snd_pcm_set_ops(pcm, SNDRV_PCM_STREAM_CAPTURE, &snd_tm6000_pcm_ops); INIT_WORK(&dev->wq_trigger, audio_trigger); rc = snd_card_register(card); if (rc < 0) goto error_chip; dprintk(1, "Registered audio driver for %s\n", card->longname); return 0; error_chip: kfree(chip); dev->adev = NULL; error: snd_card_free(card); return rc; } static int tm6000_audio_fini(struct tm6000_core *dev) { struct snd_tm6000_card *chip; if (!dev) return 0; chip = dev->adev; if (!chip) return 0; if (!chip->card) return 0; snd_card_free(chip->card); chip->card = NULL; kfree(chip); dev->adev = NULL; return 0; } static struct tm6000_ops audio_ops = { .type = TM6000_AUDIO, .name = "TM6000 Audio Extension", .init = tm6000_audio_init, .fini = tm6000_audio_fini, .fillbuf = tm6000_fillbuf, }; static int __init tm6000_alsa_register(void) { return tm6000_register_extension(&audio_ops); } static void __exit tm6000_alsa_unregister(void) { tm6000_unregister_extension(&audio_ops); } module_init(tm6000_alsa_register); module_exit(tm6000_alsa_unregister);
{ "pile_set_name": "Github" }
/* * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ /* * Do not modify this file. This file is generated from the ssm-2014-11-06.normal.json service model. */ using System; using System.Collections.Generic; using System.Xml.Serialization; using System.Text; using System.IO; using System.Net; using Amazon.Runtime; using Amazon.Runtime.Internal; namespace Amazon.SimpleSystemsManagement.Model { /// <summary> /// The filters to describe or get information about your managed instances. /// </summary> public partial class InstanceInformationStringFilter { private string _key; private List<string> _values = new List<string>(); /// <summary> /// Gets and sets the property Key. /// <para> /// The filter key name to describe your instances. For example: /// </para> /// /// <para> /// "InstanceIds"|"AgentVersion"|"PingStatus"|"PlatformTypes"|"ActivationIds"|"IamRole"|"ResourceType"|"AssociationStatus"|"Tag /// Key" /// </para> /// </summary> [AWSProperty(Required=true, Min=1)] public string Key { get { return this._key; } set { this._key = value; } } // Check to see if Key property is set internal bool IsSetKey() { return this._key != null; } /// <summary> /// Gets and sets the property Values. /// <para> /// The filter values. /// </para> /// </summary> [AWSProperty(Required=true, Min=1, Max=100)] public List<string> Values { get { return this._values; } set { this._values = value; } } // Check to see if Values property is set internal bool IsSetValues() { return this._values != null && this._values.Count > 0; } } }
{ "pile_set_name": "Github" }
import React from 'react'; import PropTypes from 'prop-types'; const UilNavigator = (props) => { const { color, size, ...otherProps } = props return React.createElement('svg', { xmlns: 'http://www.w3.org/2000/svg', width: size, height: size, viewBox: '0 0 24 24', fill: color, ...otherProps }, React.createElement('path', { d: 'M20.17,9.23l-14-5.78a3,3,0,0,0-4,3.7L3.71,12,2.13,16.85A3,3,0,0,0,2.94,20a3,3,0,0,0,2,.8,3,3,0,0,0,1.15-.23l14.05-5.78a3,3,0,0,0,0-5.54ZM5.36,18.7a1,1,0,0,1-1.06-.19,1,1,0,0,1-.27-1L5.49,13H19.22ZM5.49,11,4,6.53a1,1,0,0,1,.27-1A1,1,0,0,1,5,5.22a1,1,0,0,1,.39.08L19.22,11Z' })); }; UilNavigator.propTypes = { color: PropTypes.string, size: PropTypes.oneOfType([PropTypes.string, PropTypes.number]), }; UilNavigator.defaultProps = { color: 'currentColor', size: '24', }; export default UilNavigator;
{ "pile_set_name": "Github" }
using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Runtime.InteropServices.WindowsRuntime; using Windows.Foundation; using Windows.Foundation.Collections; using Windows.UI.Xaml; using Windows.UI.Xaml.Controls; using Windows.UI.Xaml.Controls.Primitives; using Windows.UI.Xaml.Data; using Windows.UI.Xaml.Input; using Windows.UI.Xaml.Media; using Windows.UI.Xaml.Navigation; using System.Threading.Tasks; // The Blank Page item template is documented at https://go.microsoft.com/fwlink/?LinkId=234238 namespace Admin.UWP { /// <summary> /// An empty page that can be used on its own or navigated to within a Frame. /// </summary> public sealed partial class Admin_App4 : Page { public Admin_App4() { this.InitializeComponent(); NavigateNext(); } private async void NavigateNext() { await Task.Delay(3000); this.Frame.Navigate(typeof(Admin_App5)); } } }
{ "pile_set_name": "Github" }
var arrayCopy = require('./arrayCopy'), isArguments = require('../lang/isArguments'), isArray = require('../lang/isArray'), isArrayLike = require('./isArrayLike'), isPlainObject = require('../lang/isPlainObject'), isTypedArray = require('../lang/isTypedArray'), toPlainObject = require('../lang/toPlainObject'); /** * A specialized version of `baseMerge` for arrays and objects which performs * deep merges and tracks traversed objects enabling objects with circular * references to be merged. * * @private * @param {Object} object The destination object. * @param {Object} source The source object. * @param {string} key The key of the value to merge. * @param {Function} mergeFunc The function to merge values. * @param {Function} [customizer] The function to customize merged values. * @param {Array} [stackA=[]] Tracks traversed source objects. * @param {Array} [stackB=[]] Associates values with source counterparts. * @returns {boolean} Returns `true` if the objects are equivalent, else `false`. */ function baseMergeDeep(object, source, key, mergeFunc, customizer, stackA, stackB) { var length = stackA.length, srcValue = source[key]; while (length--) { if (stackA[length] == srcValue) { object[key] = stackB[length]; return; } } var value = object[key], result = customizer ? customizer(value, srcValue, key, object, source) : undefined, isCommon = result === undefined; if (isCommon) { result = srcValue; if (isArrayLike(srcValue) && (isArray(srcValue) || isTypedArray(srcValue))) { result = isArray(value) ? value : (isArrayLike(value) ? arrayCopy(value) : []); } else if (isPlainObject(srcValue) || isArguments(srcValue)) { result = isArguments(value) ? toPlainObject(value) : (isPlainObject(value) ? value : {}); } else { isCommon = false; } } // Add the source value to the stack of traversed objects and associate // it with its merged value. stackA.push(srcValue); stackB.push(result); if (isCommon) { // Recursively merge objects and arrays (susceptible to call stack limits). object[key] = mergeFunc(result, srcValue, customizer, stackA, stackB); } else if (result === result ? (result !== value) : (value === value)) { object[key] = result; } } module.exports = baseMergeDeep;
{ "pile_set_name": "Github" }
/*- * See the file LICENSE for redistribution information. * * Copyright (c) 1999, 2011 Oracle and/or its affiliates. All rights reserved. * * $Id$ */ #include "db_config.h" #include "db_int.h" #pragma hdrstop // @v9.5.5 #include "dbinc/db_verify.h" static int __bam_safe_getdata __P((DB*, DB_THREAD_INFO*,PAGE*, uint32, int, DBT*, int *)); static int __bam_vrfy_inp __P((DB*, VRFY_DBINFO*, PAGE*, db_pgno_t, db_indx_t*, uint32)); static int __bam_vrfy_treeorder __P((DB*, DB_THREAD_INFO*, PAGE*, BINTERNAL*, BINTERNAL*, int (*)(DB *, const DBT *, const DBT *), uint32)); static int __ram_vrfy_inp __P((DB*, VRFY_DBINFO*, PAGE*, db_pgno_t, db_indx_t*, uint32)); /* * __bam_vrfy_meta -- * Verify the btree-specific part of a metadata page. * * PUBLIC: int __bam_vrfy_meta __P((DB *, VRFY_DBINFO *, BTMETA *, * PUBLIC: db_pgno_t, uint32)); */ int __bam_vrfy_meta(DB * dbp, VRFY_DBINFO * vdp, BTMETA * meta, db_pgno_t pgno, uint32 flags) { VRFY_PAGEINFO * pip; int t_ret, ret; db_indx_t ovflsize; ENV * env = dbp->env; int isbad = 0; if((ret = __db_vrfy_getpageinfo(vdp, pgno, &pip)) != 0) return ret; /* * If VRFY_INCOMPLETE is not set, then we didn't come through * __db_vrfy_pagezero and didn't incompletely * check this page--we haven't checked it at all. * Thus we need to call __db_vrfy_meta and check the common fields. * * If VRFY_INCOMPLETE is set, we've already done all the same work * in __db_vrfy_pagezero, so skip the check. */ if(!F_ISSET(pip, VRFY_INCOMPLETE) && (ret = __db_vrfy_meta(dbp, vdp, &meta->dbmeta, pgno, flags)) != 0) { if(ret == DB_VERIFY_BAD) isbad = 1; else goto err; } /* bt_minkey: must be >= 2; must produce sensible ovflsize */ /* avoid division by zero */ ovflsize = meta->minkey > 0 ? B_MINKEY_TO_OVFLSIZE(dbp, meta->minkey, dbp->pgsize) : 0; if(meta->minkey < 2 || ovflsize > B_MINKEY_TO_OVFLSIZE(dbp, DEFMINKEYPAGE, dbp->pgsize)) { pip->bt_minkey = 0; isbad = 1; EPRINT((env, DB_STR_A("1034", "Page %lu: nonsensical bt_minkey value %lu on metadata page", "%lu %lu"), (ulong)pgno, (ulong)meta->minkey)); } else pip->bt_minkey = meta->minkey; /* re_len: no constraints on this (may be zero or huge--we make rope) */ pip->re_pad = meta->re_pad; pip->re_len = meta->re_len; /* * The root must not be current page or 0 and it must be within * database. If this metadata page is the master meta data page * of the file, then the root page had better be page 1. */ pip->root = 0; if(meta->root == PGNO_INVALID || meta->root == pgno || !IS_VALID_PGNO(meta->root) || (pgno == PGNO_BASE_MD && meta->root != 1)) { isbad = 1; EPRINT((env, DB_STR_A("1035", "Page %lu: nonsensical root page %lu on metadata page", "%lu %lu"), (ulong)pgno, (ulong)meta->root)); } else pip->root = meta->root; /* Flags. */ if(F_ISSET(&meta->dbmeta, BTM_RENUMBER)) F_SET(pip, VRFY_IS_RRECNO); if(F_ISSET(&meta->dbmeta, BTM_SUBDB)) { /* * If this is a master db meta page, it had better not have * duplicates. */ if(F_ISSET(&meta->dbmeta, BTM_DUP) && pgno == PGNO_BASE_MD) { isbad = 1; EPRINT((env, DB_STR_A("1036", "Page %lu: Btree metadata page has both duplicates and multiple databases", "%lu"), (ulong)pgno)); } F_SET(pip, VRFY_HAS_SUBDBS); } if(F_ISSET(&meta->dbmeta, BTM_DUP)) F_SET(pip, VRFY_HAS_DUPS); if(F_ISSET(&meta->dbmeta, BTM_DUPSORT)) F_SET(pip, VRFY_HAS_DUPSORT); if(F_ISSET(&meta->dbmeta, BTM_RECNUM)) F_SET(pip, VRFY_HAS_RECNUMS); if(F_ISSET(pip, VRFY_HAS_RECNUMS) && F_ISSET(pip, VRFY_HAS_DUPS)) { EPRINT((env, DB_STR_A("1037", "Page %lu: Btree metadata page illegally has both recnums and dups", "%lu"), (ulong)pgno)); isbad = 1; } if(F_ISSET(&meta->dbmeta, BTM_RECNO)) { F_SET(pip, VRFY_IS_RECNO); dbp->type = DB_RECNO; } else if(F_ISSET(pip, VRFY_IS_RRECNO)) { isbad = 1; EPRINT((env, DB_STR_A("1038", "Page %lu: metadata page has renumber flag set but is not recno", "%lu"), (ulong)pgno)); } #ifdef HAVE_COMPRESSION if(F_ISSET(&meta->dbmeta, BTM_COMPRESS)) { F_SET(pip, VRFY_HAS_COMPRESS); if(!DB_IS_COMPRESSED(dbp)) { static_cast<BTREE *>(dbp->bt_internal)->bt_compress = __bam_defcompress; static_cast<BTREE *>(dbp->bt_internal)->bt_decompress = __bam_defdecompress; } /* * Copy dup_compare to compress_dup_compare, and use the * compression duplicate compare. */ if(F_ISSET(pip, VRFY_HAS_DUPSORT)) { SETIFZ(dbp->dup_compare, __bam_defcmp); if(static_cast<BTREE *>(dbp->bt_internal)->compress_dup_compare == NULL) { static_cast<BTREE *>(dbp->bt_internal)->compress_dup_compare = dbp->dup_compare; dbp->dup_compare = __bam_compress_dupcmp; } } } if(F_ISSET(pip, VRFY_HAS_RECNUMS) && F_ISSET(pip, VRFY_HAS_COMPRESS)) { EPRINT((env, DB_STR_A("1039", "Page %lu: Btree metadata page illegally has both recnums and compression", "%lu"), (ulong)pgno)); isbad = 1; } if(F_ISSET(pip, VRFY_HAS_DUPS) && !F_ISSET(pip, VRFY_HAS_DUPSORT) && F_ISSET(pip, VRFY_HAS_COMPRESS)) { EPRINT((env, DB_STR_A("1040", "Page %lu: Btree metadata page illegally has both unsorted duplicates and compression", "%lu"), (ulong)pgno)); isbad = 1; } #endif if(F_ISSET(pip, VRFY_IS_RECNO) && F_ISSET(pip, VRFY_HAS_DUPS)) { EPRINT((env, DB_STR_A("1041", "Page %lu: recno metadata page specifies duplicates", "%lu"), (ulong)pgno)); isbad = 1; } if(F_ISSET(&meta->dbmeta, BTM_FIXEDLEN)) F_SET(pip, VRFY_IS_FIXEDLEN); else if(pip->re_len > 0) { /* * It's wrong to have an re_len if it's not a fixed-length * database */ isbad = 1; EPRINT((env, DB_STR_A("1042", "Page %lu: re_len of %lu in non-fixed-length database", "%lu %lu"), (ulong)pgno, (ulong)pip->re_len)); } /* * We do not check that the rest of the page is 0, because it may * not be and may still be correct. */ err: if((t_ret = __db_vrfy_putpageinfo(env, vdp, pip)) != 0 && ret == 0) ret = t_ret; if(LF_ISSET(DB_SALVAGE) && (t_ret = __db_salvage_markdone(vdp, pgno)) != 0 && ret == 0) ret = t_ret; return (ret == 0 && isbad == 1) ? DB_VERIFY_BAD : ret; } /* * __ram_vrfy_leaf -- * Verify a recno leaf page. * * PUBLIC: int __ram_vrfy_leaf __P((DB *, VRFY_DBINFO *, PAGE *, db_pgno_t, * PUBLIC: uint32)); */ int __ram_vrfy_leaf(DB * dbp, VRFY_DBINFO * vdp, PAGE * h, db_pgno_t pgno, uint32 flags) { BKEYDATA * bk; ENV * env; VRFY_PAGEINFO * pip; db_indx_t i; int ret, t_ret, isbad; uint32 re_len_guess, len; env = dbp->env; isbad = 0; if((ret = __db_vrfy_getpageinfo(vdp, pgno, &pip)) != 0) return ret; if(TYPE(h) != P_LRECNO) { ret = __db_unknown_path(env, "__ram_vrfy_leaf"); goto err; } /* * Verify (and, if relevant, save off) page fields common to * all PAGEs. */ if((ret = __db_vrfy_datapage(dbp, vdp, h, pgno, flags)) != 0) { if(ret == DB_VERIFY_BAD) isbad = 1; else goto err; } /* * Verify inp[]. Return immediately if it returns DB_VERIFY_BAD; * further checks are dangerous. */ if((ret = __bam_vrfy_inp(dbp, vdp, h, pgno, &pip->entries, flags)) != 0) goto err; if(F_ISSET(pip, VRFY_HAS_DUPS)) { EPRINT((env, DB_STR_A("1043", "Page %lu: Recno database has dups", "%lu"), (ulong)pgno)); ret = DB_VERIFY_BAD; goto err; } /* * Walk through inp and see if the lengths of all the records are the * same--if so, this may be a fixed-length database, and we want to * save off this value. We know inp to be safe if we've gotten this * far. */ re_len_guess = 0; for(i = 0; i < NUM_ENT(h); i++) { bk = GET_BKEYDATA(dbp, h, i); /* KEYEMPTY. Go on. */ if(B_DISSET(bk->type)) continue; if(bk->type == B_OVERFLOW) len = ((BOVERFLOW *)bk)->tlen; else if(bk->type == B_KEYDATA) len = bk->len; else { isbad = 1; EPRINT((env, DB_STR_A("1044", "Page %lu: nonsensical type for item %lu", "%lu %lu"), (ulong)pgno, (ulong)i)); continue; } if(re_len_guess == 0) re_len_guess = len; /* * Is this item's len the same as the last one's? If not, * reset to 0 and break--we don't have a single re_len. * Otherwise, go on to the next item. */ if(re_len_guess != len) { re_len_guess = 0; break; } } pip->re_len = re_len_guess; /* Save off record count. */ pip->rec_cnt = NUM_ENT(h); err: if((t_ret = __db_vrfy_putpageinfo(env, vdp, pip)) != 0 && ret == 0) ret = t_ret; return (ret == 0 && isbad == 1) ? DB_VERIFY_BAD : ret; } /* * __bam_vrfy -- * Verify a btree leaf or internal page. * * PUBLIC: int __bam_vrfy __P((DB *, VRFY_DBINFO *, PAGE *, db_pgno_t, * PUBLIC: uint32)); */ int __bam_vrfy(DB * dbp, VRFY_DBINFO * vdp, PAGE * h, db_pgno_t pgno, uint32 flags) { VRFY_PAGEINFO * pip; int ret, t_ret; ENV * env = dbp->env; int isbad = 0; if((ret = __db_vrfy_getpageinfo(vdp, pgno, &pip)) != 0) return ret; switch(TYPE(h)) { case P_IBTREE: case P_IRECNO: case P_LBTREE: case P_LDUP: break; default: ret = __db_unknown_path(env, "__bam_vrfy"); goto err; } /* * Verify (and, if relevant, save off) page fields common to * all PAGEs. */ if((ret = __db_vrfy_datapage(dbp, vdp, h, pgno, flags)) != 0) { if(ret == DB_VERIFY_BAD) isbad = 1; else goto err; } /* * The record count is, on internal pages, stored in an overloaded * next_pgno field. Save it off; we'll verify it when we check * overall database structure. We could overload the field * in VRFY_PAGEINFO, too, but this seems gross, and space * is not at such a premium. */ pip->rec_cnt = RE_NREC(h); /* * Verify inp[]. */ if(TYPE(h) == P_IRECNO) { if((ret = __ram_vrfy_inp(dbp, vdp, h, pgno, &pip->entries, flags)) != 0) goto err; } else if((ret = __bam_vrfy_inp(dbp, vdp, h, pgno, &pip->entries, flags)) != 0) { if(ret == DB_VERIFY_BAD) isbad = 1; else goto err; EPRINT((env, DB_STR_A("1045", "Page %lu: item order check unsafe: skipping", "%lu"), (ulong)pgno)); } else if(!LF_ISSET(DB_NOORDERCHK) && (ret = __bam_vrfy_itemorder(dbp, vdp, vdp->thread_info, h, pgno, 0, 0, 0, flags)) != 0) { /* * We know that the elements of inp are reasonable. * * Check that elements fall in the proper order. */ if(ret == DB_VERIFY_BAD) isbad = 1; else goto err; } err: if((t_ret = __db_vrfy_putpageinfo(env, vdp, pip)) != 0 && ret == 0) ret = t_ret; return (ret == 0 && isbad == 1) ? DB_VERIFY_BAD : ret; } /* * __ram_vrfy_inp -- * Verify that all entries in a P_IRECNO inp[] array are reasonable, * and count them. Note that P_LRECNO uses __bam_vrfy_inp; * P_IRECNOs are a special, and simpler, case, since they have * RINTERNALs rather than BKEYDATA/BINTERNALs. */ static int __ram_vrfy_inp(DB * dbp, VRFY_DBINFO * vdp, PAGE * h, db_pgno_t pgno, db_indx_t * nentriesp, uint32 flags) { ENV * env; RINTERNAL * ri; VRFY_CHILDINFO child; VRFY_PAGEINFO * pip; int ret, t_ret, isbad; uint32 himark, i, offset, nentries; db_indx_t * inp; uint8 * pagelayout, * p; env = dbp->env; isbad = 0; memzero(&child, sizeof(VRFY_CHILDINFO)); nentries = 0; pagelayout = NULL; if((ret = __db_vrfy_getpageinfo(vdp, pgno, &pip)) != 0) return ret; if(TYPE(h) != P_IRECNO) { ret = __db_unknown_path(env, "__ram_vrfy_inp"); goto err; } himark = dbp->pgsize; if((ret = __os_malloc(env, dbp->pgsize, &pagelayout)) != 0) goto err; memzero(pagelayout, dbp->pgsize); inp = P_INP(dbp, h); for(i = 0; i < NUM_ENT(h); i++) { if((uint8 *)inp+i >= (uint8 *)h+himark) { EPRINT((env, DB_STR_A("1046", "Page %lu: entries listing %lu overlaps data", "%lu %lu"), (ulong)pgno, (ulong)i)); ret = DB_VERIFY_BAD; goto err; } offset = inp[i]; /* * Check that the item offset is reasonable: it points * somewhere after the inp array and before the end of the * page. */ if(offset <= (uint32)((uint8 *)inp+i- (uint8 *)h) || offset > (uint32)(dbp->pgsize-RINTERNAL_SIZE)) { isbad = 1; EPRINT((env, DB_STR_A("1047", "Page %lu: bad offset %lu at index %lu", "%lu %lu %lu"), (ulong)pgno, (ulong)offset, (ulong)i)); continue; } /* Update the high-water mark (what HOFFSET should be) */ if(offset < himark) himark = offset; nentries++; /* Make sure this RINTERNAL is not multiply referenced. */ ri = GET_RINTERNAL(dbp, h, i); if(pagelayout[offset] == 0) { pagelayout[offset] = 1; child.pgno = ri->pgno; child.type = V_RECNO; child.nrecs = ri->nrecs; if((ret = __db_vrfy_childput(vdp, pgno, &child)) != 0) goto err; } else { EPRINT((env, DB_STR_A("1048", "Page %lu: RINTERNAL structure at offset %lu referenced twice", "%lu %lu"), (ulong)pgno, (ulong)offset)); isbad = 1; } } for(p = pagelayout+himark; p < pagelayout+dbp->pgsize; p += RINTERNAL_SIZE) if(*p != 1) { EPRINT((env, DB_STR_A("1049", "Page %lu: gap between items at offset %lu", "%lu %lu"), (ulong)pgno, (ulong)(p-pagelayout))); isbad = 1; } if((db_indx_t)himark != HOFFSET(h)) { EPRINT((env, DB_STR_A("1050", "Page %lu: bad HOFFSET %lu, appears to be %lu", "%lu %lu %lu"), (ulong)pgno, (ulong)(HOFFSET(h)), (ulong)himark)); isbad = 1; } *nentriesp = nentries; err: if((t_ret = __db_vrfy_putpageinfo(env, vdp, pip)) != 0 && ret == 0) ret = t_ret; __os_free(env, pagelayout); return (ret == 0 && isbad == 1) ? DB_VERIFY_BAD : ret; } typedef enum { VRFY_ITEM_NOTSET = 0, VRFY_ITEM_BEGIN, VRFY_ITEM_END } VRFY_ITEM; /* * __bam_vrfy_inp -- * Verify that all entries in inp[] array are reasonable; * count them. */ static int __bam_vrfy_inp(DB * dbp, VRFY_DBINFO * vdp, PAGE * h, db_pgno_t pgno, db_indx_t * nentriesp, uint32 flags) { BKEYDATA * bk; BOVERFLOW * bo; ENV * env; VRFY_CHILDINFO child; VRFY_ITEM * pagelayout; VRFY_PAGEINFO * pip; uint32 himark, offset; /* These would be db_indx_ts but for alignment. */ uint32 i, endoff, nentries; int isbad, initem, isdupitem, ret, t_ret; env = dbp->env; isbad = isdupitem = 0; nentries = 0; memzero(&child, sizeof(VRFY_CHILDINFO)); if((ret = __db_vrfy_getpageinfo(vdp, pgno, &pip)) != 0) return ret; switch(TYPE(h)) { case P_IBTREE: case P_LBTREE: case P_LDUP: case P_LRECNO: break; default: /* * In the salvager, we might call this from a page which * we merely suspect is a btree page. Otherwise, it * shouldn't get called--if it is, that's a verifier bug. */ if(LF_ISSET(DB_SALVAGE)) break; ret = __db_unknown_path(env, "__bam_vrfy_inp"); goto err; } /* * Loop through inp[], the array of items, until we either * run out of entries or collide with the data. Keep track * of h_offset in himark. * * For each element in inp[i], make sure it references a region * that starts after the end of the inp array (as defined by * NUM_ENT(h)), ends before the beginning of the page, doesn't * overlap any other regions, and doesn't have a gap between * it and the region immediately after it. */ himark = dbp->pgsize; if((ret = __os_calloc( env, dbp->pgsize, sizeof(pagelayout[0]), &pagelayout)) != 0) goto err; for(i = 0; i < NUM_ENT(h); i++) { switch(ret = __db_vrfy_inpitem(dbp, h, pgno, i, 1, flags, &himark, &offset)) { case 0: break; case DB_VERIFY_BAD: isbad = 1; continue; case DB_VERIFY_FATAL: isbad = 1; goto err; default: DB_ASSERT(env, ret != 0); break; } /* * We now have a plausible beginning for the item, and we know * its length is safe. * * Mark the beginning and end in pagelayout so we can make sure * items have no overlaps or gaps. */ bk = GET_BKEYDATA(dbp, h, i); if(pagelayout[offset] == VRFY_ITEM_NOTSET) pagelayout[offset] = VRFY_ITEM_BEGIN; else if(pagelayout[offset] == VRFY_ITEM_BEGIN) { /* * Having two inp entries that point at the same patch * of page is legal if and only if the page is * a btree leaf and they're onpage duplicate keys-- * that is, if(i % P_INDX) == 0. */ if((i%P_INDX == 0) && (TYPE(h) == P_LBTREE)) { /* Flag for later. */ F_SET(pip, VRFY_HAS_DUPS); /* Bump up nentries so we don't undercount. */ nentries++; /* * We'll check to make sure the end is * equal, too. */ isdupitem = 1; } else { isbad = 1; EPRINT((env, DB_STR_A("1051", "Page %lu: duplicated item %lu", "%lu %lu"), (ulong)pgno, (ulong)i)); } } /* * Mark the end. Its location varies with the page type * and the item type. * * If the end already has a sign other than 0, do nothing-- * it's an overlap that we'll catch later. */ switch(B_TYPE(bk->type)) { case B_KEYDATA: if(TYPE(h) == P_IBTREE) /* It's a BINTERNAL. */ endoff = offset+BINTERNAL_SIZE(bk->len)-1; else endoff = offset+BKEYDATA_SIZE(bk->len)-1; break; case B_DUPLICATE: /* * Flag that we have dups; we'll check whether * that's okay during the structure check. */ F_SET(pip, VRFY_HAS_DUPS); // @fallthrough case B_OVERFLOW: /* * Overflow entries on internal pages are stored * as the _data_ of a BINTERNAL; overflow entries * on leaf pages are stored as the entire entry. */ endoff = offset+ ((TYPE(h) == P_IBTREE) ? BINTERNAL_SIZE(BOVERFLOW_SIZE) : BOVERFLOW_SIZE)-1; break; default: /* * We'll complain later; for now, just mark * a minimum. */ endoff = offset+BKEYDATA_SIZE(0)-1; break; } /* * If this is an onpage duplicate key we've seen before, * the end had better coincide too. */ if(isdupitem && pagelayout[endoff] != VRFY_ITEM_END) { EPRINT((env, DB_STR_A("1052", "Page %lu: duplicated item %lu", "%lu %lu"), (ulong)pgno, (ulong)i)); isbad = 1; } else if(pagelayout[endoff] == VRFY_ITEM_NOTSET) pagelayout[endoff] = VRFY_ITEM_END; isdupitem = 0; /* * There should be no deleted items in a quiescent tree, * except in recno. */ if(B_DISSET(bk->type) && TYPE(h) != P_LRECNO) { isbad = 1; EPRINT((env, DB_STR_A("1053", "Page %lu: item %lu marked deleted", "%lu %lu"), (ulong)pgno, (ulong)i)); } /* * Check the type and such of bk--make sure it's reasonable * for the pagetype. */ switch(B_TYPE(bk->type)) { case B_KEYDATA: /* * This is a normal, non-overflow BKEYDATA or BINTERNAL. * The only thing to check is the len, and that's * already been done. */ break; case B_DUPLICATE: if(TYPE(h) == P_IBTREE) { isbad = 1; EPRINT((env, DB_STR_A("1054", "Page %lu: duplicate page referenced by internal btree page at item %lu", "%lu %lu"), (ulong)pgno, (ulong)i)); break; } else if(TYPE(h) == P_LRECNO) { isbad = 1; EPRINT((env, DB_STR_A("1055", "Page %lu: duplicate page referenced by recno page at item %lu", "%lu %lu"), (ulong)pgno, (ulong)i)); break; } // @fallthrough case B_OVERFLOW: bo = (TYPE(h) == P_IBTREE) ? (BOVERFLOW *)(((BINTERNAL *)bk)->data) : (BOVERFLOW *)bk; if(B_TYPE(bk->type) == B_OVERFLOW) /* Make sure tlen is reasonable. */ if(bo->tlen > dbp->pgsize*vdp->last_pgno) { isbad = 1; EPRINT((env, DB_STR_A("1056", "Page %lu: impossible tlen %lu, item %lu", "%lu %lu %lu"), (ulong)pgno, (ulong)bo->tlen, (ulong)i)); /* Don't save as a child. */ break; } if(!IS_VALID_PGNO(bo->pgno) || bo->pgno == pgno || bo->pgno == PGNO_INVALID) { isbad = 1; EPRINT((env, DB_STR_A("1057", "Page %lu: offpage item %lu has bad pgno %lu", "%lu %lu %lu"), (ulong)pgno, (ulong)i, (ulong)bo->pgno)); /* Don't save as a child. */ break; } child.pgno = bo->pgno; child.type = (B_TYPE(bk->type) == B_OVERFLOW ? V_OVERFLOW : V_DUPLICATE); child.tlen = bo->tlen; if((ret = __db_vrfy_childput(vdp, pgno, &child)) != 0) goto err; break; default: isbad = 1; EPRINT((env, DB_STR_A("1058", "Page %lu: item %lu of invalid type %lu", "%lu %lu %lu"), (ulong)pgno, (ulong)i, (ulong)B_TYPE(bk->type))); break; } } /* * Now, loop through and make sure the items are contiguous and * non-overlapping. */ initem = 0; for(i = himark; i < dbp->pgsize; i++) if(initem == 0) switch(pagelayout[i]) { case VRFY_ITEM_NOTSET: /* May be just for alignment. */ if(i != DB_ALIGN(i, sizeof(uint32))) continue; isbad = 1; EPRINT((env, DB_STR_A("1059", "Page %lu: gap between items at offset %lu", "%lu %lu"), (ulong)pgno, (ulong)i)); /* Find the end of the gap */ for(; pagelayout[i+1] == VRFY_ITEM_NOTSET && (size_t)(i+1) < dbp->pgsize; i++) ; break; case VRFY_ITEM_BEGIN: /* We've found an item. Check its alignment. */ if(i != DB_ALIGN(i, sizeof(uint32))) { isbad = 1; EPRINT((env, DB_STR_A("1060", "Page %lu: offset %lu unaligned", "%lu %lu"), (ulong)pgno, (ulong)i)); } initem = 1; nentries++; break; case VRFY_ITEM_END: /* * We've hit the end of an item even though * we don't think we're in one; must * be an overlap. */ isbad = 1; EPRINT((env, DB_STR_A("1061", "Page %lu: overlapping items at offset %lu", "%lu %lu"), (ulong)pgno, (ulong)i)); break; } else switch(pagelayout[i]) { case VRFY_ITEM_NOTSET: // In the middle of an item somewhere. Okay. break; case VRFY_ITEM_END: // End of an item; switch to out-of-item mode. initem = 0; break; case VRFY_ITEM_BEGIN: // Hit a second item beginning without an end. Overlap. isbad = 1; EPRINT((env, DB_STR_A("1062", "Page %lu: overlapping items at offset %lu", "%lu %lu"), (ulong)pgno, (ulong)i)); break; } __os_free(env, pagelayout); /* Verify HOFFSET. */ if((db_indx_t)himark != HOFFSET(h)) { EPRINT((env, DB_STR_A("1063", "Page %lu: bad HOFFSET %lu, appears to be %lu", "%lu %lu %lu"), (ulong)pgno, (ulong)HOFFSET(h), (ulong)himark)); isbad = 1; } err: ASSIGN_PTR(nentriesp, nentries); if((t_ret = __db_vrfy_putpageinfo(env, vdp, pip)) != 0 && ret == 0) ret = t_ret; return (isbad == 1 && ret == 0) ? DB_VERIFY_BAD : ret; } /* * __bam_vrfy_itemorder -- * Make sure the items on a page sort correctly. * * Assumes that NUM_ENT(h) and inp[0]..inp[NUM_ENT(h) - 1] are * reasonable; be sure that __bam_vrfy_inp has been called first. * * If ovflok is set, it also assumes that overflow page chains * hanging off the current page have been sanity-checked, and so we * can use __bam_cmp to verify their ordering. If it is not set, * and we run into an overflow page, carp and return DB_VERIFY_BAD; * we shouldn't be called if any exist. * * PUBLIC: int __bam_vrfy_itemorder __P((DB *, VRFY_DBINFO *, DB_THREAD_INFO *, * PUBLIC: PAGE *, db_pgno_t, uint32, int, int, uint32)); */ int __bam_vrfy_itemorder(DB * dbp, VRFY_DBINFO * vdp, DB_THREAD_INFO * ip, PAGE * h, db_pgno_t pgno, uint32 nentries, int ovflok, int hasdups, uint32 flags) { BINTERNAL * bi; BKEYDATA * bk; BOVERFLOW * bo; BTREE * bt; DBC * dbc; DBT dbta, dbtb, dup_1, dup_2, * p1, * p2, * tmp; ENV * env; VRFY_PAGEINFO * pip; db_indx_t i, * inp; int adj, cmp, freedup_1, freedup_2, isbad, ret, t_ret; int (*dupfunc)(DB*, const DBT*, const DBT *); int (*func)(DB*, const DBT*, const DBT *); void * buf1, * buf2, * tmpbuf; /* * We need to work in the ORDERCHKONLY environment where we might * not have a pip, but we also may need to work in contexts where * NUM_ENT isn't safe. */ if(vdp) { if((ret = __db_vrfy_getpageinfo(vdp, pgno, &pip)) != 0) return ret; nentries = pip->entries; } else pip = NULL; env = dbp->env; ret = isbad = 0; bo = NULL; /* Shut up compiler. */ // (replaced by ctr) memzero(&dbta, sizeof(DBT)); F_SET(&dbta, DB_DBT_REALLOC); // (replaced by ctr) memzero(&dbtb, sizeof(DBT)); F_SET(&dbtb, DB_DBT_REALLOC); buf1 = buf2 = NULL; DB_ASSERT(env, !LF_ISSET(DB_NOORDERCHK)); dupfunc = (dbp->dup_compare == NULL) ? __bam_defcmp : dbp->dup_compare; if(TYPE(h) == P_LDUP) func = dupfunc; else { func = __bam_defcmp; if(dbp->bt_internal) { bt = static_cast<BTREE *>(dbp->bt_internal); if(bt->bt_compare) func = bt->bt_compare; } } /* * We alternate our use of dbta and dbtb so that we can walk * through the page key-by-key without copying a dbt twice. * p1 is always the dbt for index i - 1, and p2 for index i. * Reset the data pointers in case we are retrying. */ retry: p1 = &dbta; p1->data = NULL; p2 = &dbtb; p2->data = NULL; /* * Loop through the entries. nentries ought to contain the * actual count, and so is a safe way to terminate the loop; whether * we inc. by one or two depends on whether we're a leaf page-- * on a leaf page, we care only about keys. On internal pages * and LDUP pages, we want to check the order of all entries. * * Note that on IBTREE pages or the index page of a partitioned * database, we start with item 1, since item 0 doesn't get looked * at by __bam_cmp. */ inp = P_INP(dbp, h); adj = (TYPE(h) == P_LBTREE) ? P_INDX : O_INDX; for(i = (TYPE(h) == P_IBTREE || dbp->p_internal) ? adj : 0; i < nentries; i += adj) { /* * Put key i-1, now in p2, into p1, by swapping DBTs and bufs. */ tmp = p1; p1 = p2; p2 = tmp; tmpbuf = buf1; buf1 = buf2; buf2 = tmpbuf; /* * Get key i into p2. */ switch(TYPE(h)) { case P_IBTREE: bi = GET_BINTERNAL(dbp, h, i); if(B_TYPE(bi->type) == B_OVERFLOW) { bo = (BOVERFLOW *)(bi->data); goto overflow; } else { p2->data = bi->data; p2->size = bi->len; } /* * The leftmost key on an internal page must be * len 0, since it's just a placeholder and * automatically sorts less than all keys. * * XXX * This criterion does not currently hold! * See todo list item #1686. Meanwhile, it's harmless * to just not check for it. */ #if 0 if(i == 0 && bi->len != 0) { isbad = 1; EPRINT((env, DB_STR_A("1064", "Page %lu: lowest key on internal page of nonzero length", "%lu"), (ulong)pgno)); } #endif break; case P_LBTREE: case P_LDUP: bk = GET_BKEYDATA(dbp, h, i); if(B_TYPE(bk->type) == B_OVERFLOW) { bo = (BOVERFLOW *)bk; goto overflow; } else { p2->data = bk->data; p2->size = bk->len; } break; default: /* * This means our caller screwed up and sent us * an inappropriate page. */ ret = __db_unknown_path(env, "__bam_vrfy_itemorder"); goto err; } if(0) { /* * If ovflok != 1, we can't safely go chasing * overflow pages with the normal routines now; * they might be unsafe or nonexistent. Mark this * page as incomplete and return. * * Note that we don't need to worry about freeing * buffers, since they can't have been allocated * if overflow items are unsafe. */ overflow: if(!ovflok) { F_SET(pip, VRFY_INCOMPLETE); goto err; } /* * Overflow items are safe to chase. Do so. * Fetch the overflow item into p2->data, * NULLing it or reallocing it as appropriate. * * (We set p2->data to buf2 before the call * so we're sure to realloc if we can and if p2 * was just pointing at a non-overflow item.) */ p2->data = buf2; if((ret = __db_cursor_int(dbp, ip, NULL, DB_BTREE, PGNO_INVALID, 0, DB_LOCK_INVALIDID, &dbc)) != 0) goto err; if((ret = __db_goff(dbc, p2, bo->tlen, bo->pgno, NULL, NULL)) != 0) { isbad = 1; EPRINT((env, DB_STR_A("1065", "Page %lu: error %lu in fetching overflow item %lu", "%lu %lu %lu"), (ulong)pgno, (ulong)ret, (ulong)i)); } /* In case it got realloc'ed and thus changed. */ buf2 = p2->data; } /* Compare with the last key. */ if(p1->data && p2->data) { cmp = inp[i] == inp[i-adj] ? 0 : func(dbp, p1, p2); /* comparison succeeded */ if(cmp > 0) { /* * If we are looking at an internal page, we * don't know whether it is part of the main * database or in an off-page-duplicate tree. * If the main comparator fails, retry with * the duplicate comparator. */ if(TYPE(h) == P_IBTREE && func != dupfunc) { func = dupfunc; goto retry; } isbad = 1; EPRINT((env, DB_STR_A("1066", "Page %lu: out-of-order key at entry %lu", "%lu %lu"), (ulong)pgno, (ulong)i)); /* proceed */ } else if(cmp == 0) { if(inp[i] != inp[i-adj]) { /* See above. */ if(TYPE(h) == P_IBTREE && func != dupfunc) { func = dupfunc; goto retry; } isbad = 1; EPRINT((env, DB_STR_A("1067", "Page %lu: non-dup dup key at entry %lu", "%lu %lu"), (ulong)pgno, (ulong)i)); } /* * If they compared equally, this * had better be a (sub)database with dups. * Mark it so we can check during the * structure check. */ if(pip) F_SET(pip, VRFY_HAS_DUPS); else if(hasdups == 0) { /* See above. */ if(TYPE(h) == P_IBTREE && func != dupfunc) { func = dupfunc; goto retry; } isbad = 1; EPRINT((env, DB_STR_A("1068", "Page %lu: database with no duplicates has duplicated keys", "%lu"), (ulong)pgno)); } /* * If we're a btree leaf, check to see * if the data items of these on-page dups are * in sorted order. If not, flag this, so * that we can make sure during the * structure checks that the DUPSORT flag * is unset. * * At this point i points to a duplicate key. * Compare the datum before it (same key) * to the datum after it, i.e. i-1 to i+1. */ if(TYPE(h) == P_LBTREE) { /* * Unsafe; continue and we'll pick * up the bogus nentries later. */ if(i+1 >= (db_indx_t)nentries) continue; /* * We don't bother with clever memory * management with on-page dups, * as it's only really a big win * in the overflow case, and overflow * dups are probably (?) rare. */ if(((ret = __bam_safe_getdata(dbp, ip, h, i-1, ovflok, &dup_1, &freedup_1)) != 0) || ((ret = __bam_safe_getdata(dbp, ip, h, i+1, ovflok, &dup_2, &freedup_2)) != 0)) goto err; /* * If either of the data are NULL, * it's because they're overflows and * it's not safe to chase them now. * Mark an incomplete and return. */ if(dup_1.data == NULL || dup_2.data == NULL) { DB_ASSERT(env, !ovflok); F_SET(pip, VRFY_INCOMPLETE); goto err; } /* * If the dups are out of order, * flag this. It's not an error * until we do the structure check * and see whether DUPSORT is set. */ if(dupfunc(dbp, &dup_1, &dup_2) > 0) F_SET(pip, VRFY_DUPS_UNSORTED); if(freedup_1) __os_ufree(env, dup_1.data); if(freedup_2) __os_ufree(env, dup_2.data); } } } } err: if(pip && ((t_ret = __db_vrfy_putpageinfo(env, vdp, pip)) != 0) && ret == 0) ret = t_ret; __os_ufree(env, buf1); __os_ufree(env, buf2); return (ret == 0 && isbad == 1) ? DB_VERIFY_BAD : ret; } /* * __bam_vrfy_structure -- * Verify the tree structure of a btree database (including the master * database containing subdbs). * * PUBLIC: int __bam_vrfy_structure __P((DB *, VRFY_DBINFO *, db_pgno_t, * PUBLIC: void *, void *, uint32)); */ int __bam_vrfy_structure(DB * dbp, VRFY_DBINFO * vdp, db_pgno_t meta_pgno, void * lp, void * rp, uint32 flags) { VRFY_PAGEINFO * mip = 0; VRFY_PAGEINFO * rip = 0; db_pgno_t root, p; int t_ret, ret; uint32 nrecs, level, relen, stflags; ENV * env = dbp->env; DB * pgset = vdp->pgset; if((ret = __db_vrfy_getpageinfo(vdp, meta_pgno, &mip)) != 0) return ret; if((ret = __db_vrfy_pgset_get(pgset, vdp->thread_info, vdp->txn, meta_pgno, (int *)&p)) != 0) goto err; if(p != 0) { EPRINT((env, DB_STR_A("1069", "Page %lu: btree metadata page observed twice", "%lu"), (ulong)meta_pgno)); ret = DB_VERIFY_BAD; goto err; } if((ret = __db_vrfy_pgset_inc(pgset, vdp->thread_info, vdp->txn, meta_pgno)) != 0) goto err; root = mip->root; if(root == 0) { EPRINT((env, DB_STR_A("1070", "Page %lu: btree metadata page has no root", "%lu"), (ulong)meta_pgno)); ret = DB_VERIFY_BAD; goto err; } if((ret = __db_vrfy_getpageinfo(vdp, root, &rip)) != 0) goto err; switch(rip->type) { case P_IBTREE: case P_LBTREE: stflags = flags|DB_ST_TOPLEVEL; if(F_ISSET(mip, VRFY_HAS_DUPS)) stflags |= DB_ST_DUPOK; if(F_ISSET(mip, VRFY_HAS_DUPSORT)) stflags |= DB_ST_DUPSORT; if(F_ISSET(mip, VRFY_HAS_RECNUMS)) stflags |= DB_ST_RECNUM; ret = __bam_vrfy_subtree(dbp, vdp, root, lp, rp, stflags, 0, 0, 0); break; case P_IRECNO: case P_LRECNO: stflags = flags|DB_ST_RECNUM|DB_ST_IS_RECNO|DB_ST_TOPLEVEL; if(mip->re_len > 0) stflags |= DB_ST_RELEN; if((ret = __bam_vrfy_subtree(dbp, vdp, root, NULL, NULL, stflags, &level, &nrecs, &relen)) != 0) goto err; /* * Even if mip->re_len > 0, re_len may come back zero if the * tree is empty. It should be okay to just skip the check in * this case, as if there are any non-deleted keys at all, * that should never happen. */ if(mip->re_len > 0 && relen > 0 && mip->re_len != relen) { EPRINT((env, DB_STR_A("1071", "Page %lu: recno database has bad re_len %lu", "%lu %lu"), (ulong)meta_pgno, (ulong)relen)); ret = DB_VERIFY_BAD; goto err; } ret = 0; break; case P_LDUP: EPRINT((env, DB_STR_A("1072", "Page %lu: duplicate tree referenced from metadata page", "%lu"), (ulong)meta_pgno)); ret = DB_VERIFY_BAD; break; default: EPRINT((env, DB_STR_A("1073", "Page %lu: btree root of incorrect type %lu on metadata page", "%lu %lu"), (ulong)meta_pgno, (ulong)rip->type)); ret = DB_VERIFY_BAD; break; } err: if(mip && ((t_ret = __db_vrfy_putpageinfo(env, vdp, mip)) != 0) && ret == 0) ret = t_ret; if(rip && ((t_ret = __db_vrfy_putpageinfo(env, vdp, rip)) != 0) && ret == 0) ret = t_ret; return ret; } /* * __bam_vrfy_subtree-- * Verify a subtree (or entire) btree with specified root. * * Note that this is public because it must be called to verify * offpage dup trees, including from hash. * * PUBLIC: int __bam_vrfy_subtree __P((DB *, VRFY_DBINFO *, db_pgno_t, void *, * PUBLIC: void *, uint32, uint32 *, uint32 *, uint32 *)); */ int __bam_vrfy_subtree(DB * dbp, VRFY_DBINFO * vdp, db_pgno_t pgno, void * l, void * r, uint32 flags, uint32 * levelp, uint32 * nrecsp, uint32 * relenp) { BINTERNAL * li, * ri; DB * pgset; DBC * cc; DB_MPOOLFILE * mpf; ENV * env; PAGE * h = 0; VRFY_CHILDINFO * child; VRFY_PAGEINFO * pip; db_indx_t i; db_pgno_t next_pgno, prev_pgno; db_recno_t child_nrecs, nrecs; uint32 child_level, child_relen, j, level, relen, stflags; uint8 leaf_type; int (*func)(DB*, const DBT*, const DBT *); int isbad, p, ret, t_ret, toplevel; ASSIGN_PTR(levelp, 0); /* Don't leave uninitialized on error. */ ASSIGN_PTR(nrecsp, 0); env = dbp->env; mpf = dbp->mpf; next_pgno = prev_pgno = PGNO_INVALID; nrecs = 0; relen = 0; leaf_type = P_INVALID; isbad = ret = 0; /* Provide feedback on our progress to the application. */ if(!LF_ISSET(DB_SALVAGE)) __db_vrfy_struct_feedback(dbp, vdp); if((ret = __db_vrfy_getpageinfo(vdp, pgno, &pip)) != 0) return ret; cc = NULL; level = pip->bt_level; toplevel = LF_ISSET(DB_ST_TOPLEVEL) ? 1 : 0; LF_CLR(DB_ST_TOPLEVEL); /* * If this is the root, initialize the vdp's prev- and next-pgno * accounting. * * For each leaf page we hit, we'll want to make sure that * vdp->prev_pgno is the same as pip->prev_pgno and vdp->next_pgno is * our page number. Then, we'll set vdp->next_pgno to pip->next_pgno * and vdp->prev_pgno to our page number, and the next leaf page in * line should be able to do the same verification. */ if(toplevel) { /* * Cache the values stored in the vdp so that if we're an * auxiliary tree such as an off-page duplicate set, our * caller's leaf page chain doesn't get lost. */ prev_pgno = vdp->prev_pgno; next_pgno = vdp->next_pgno; leaf_type = vdp->leaf_type; vdp->next_pgno = vdp->prev_pgno = PGNO_INVALID; vdp->leaf_type = P_INVALID; } /* * We are recursively descending a btree, starting from the root * and working our way out to the leaves. * * There are four cases we need to deal with: * 1. pgno is a recno leaf page. Any children are overflows. * 2. pgno is a duplicate leaf page. Any children * are overflow pages; traverse them, and then return * level and nrecs. * 3. pgno is an ordinary leaf page. Check whether dups are * allowed, and if so, traverse any off-page dups or * overflows. Then return nrecs and level. * 4. pgno is a recno internal page. Recursively check any * child pages, making sure their levels are one lower * and their nrecs sum to ours. * 5. pgno is a btree internal page. Same as #4, plus we * must verify that for each pair of BINTERNAL entries * N and N+1, the leftmost item on N's child sorts * greater than N, and the rightmost item on N's child * sorts less than N+1. * * Furthermore, in any sorted page type (P_LDUP, P_LBTREE, P_IBTREE), * we need to verify the internal sort order is correct if, * due to overflow items, we were not able to do so earlier. */ switch(pip->type) { case P_LRECNO: case P_LDUP: case P_LBTREE: /* * Cases 1, 2 and 3. * * We're some sort of leaf page; verify * that our linked list of leaves is consistent. */ if(vdp->leaf_type == P_INVALID) { /* * First leaf page. Set the type that all its * successors should be, and verify that our prev_pgno * is PGNO_INVALID. */ vdp->leaf_type = pip->type; if(pip->prev_pgno != PGNO_INVALID) goto bad_prev; } else { /* * Successor leaf page. Check our type, the previous * page's next_pgno, and our prev_pgno. */ if(pip->type != vdp->leaf_type) { isbad = 1; EPRINT((env, DB_STR_A("1074", "Page %lu: unexpected page type %lu found in leaf chain (expected %lu)", "%lu %lu %lu"), (ulong)pip->pgno, (ulong)pip->type, (ulong)vdp->leaf_type)); } /* * Don't do the prev/next_pgno checks if we've lost * leaf pages due to another corruption. */ if(!F_ISSET(vdp, VRFY_LEAFCHAIN_BROKEN)) { if(pip->pgno != vdp->next_pgno) { isbad = 1; EPRINT((env, DB_STR_A("1075", "Page %lu: incorrect next_pgno %lu found in leaf chain (should be %lu)", "%lu %lu %lu"), (ulong)vdp->prev_pgno, (ulong)vdp->next_pgno, (ulong)pip->pgno)); } if(pip->prev_pgno != vdp->prev_pgno) { bad_prev: isbad = 1; EPRINT((env, DB_STR_A("1076", "Page %lu: incorrect prev_pgno %lu found in leaf chain (should be %lu)", "%lu %lu %lu"), (ulong)pip->pgno, (ulong)pip->prev_pgno, (ulong)vdp->prev_pgno)); } } } vdp->prev_pgno = pip->pgno; vdp->next_pgno = pip->next_pgno; F_CLR(vdp, VRFY_LEAFCHAIN_BROKEN); /* * Overflow pages are common to all three leaf types; * traverse the child list, looking for overflows. */ if((ret = __db_vrfy_childcursor(vdp, &cc)) != 0) goto err; for(ret = __db_vrfy_ccset(cc, pgno, &child); ret == 0; ret = __db_vrfy_ccnext(cc, &child)) if(child->type == V_OVERFLOW && (ret = __db_vrfy_ovfl_structure(dbp, vdp, child->pgno, child->tlen, flags|DB_ST_OVFL_LEAF)) != 0) { if(ret == DB_VERIFY_BAD) isbad = 1; else goto done; } if((ret = __db_vrfy_ccclose(cc)) != 0) goto err; cc = NULL; /* Case 1 */ if(pip->type == P_LRECNO) { if(!LF_ISSET(DB_ST_IS_RECNO) && !(LF_ISSET(DB_ST_DUPOK) && !LF_ISSET(DB_ST_DUPSORT))) { isbad = 1; EPRINT((env, DB_STR_A("1077", "Page %lu: recno leaf page non-recno tree", "%lu"), (ulong)pgno)); goto done; } goto leaf; } else if(LF_ISSET(DB_ST_IS_RECNO)) { /* * It's a non-recno leaf. Had better not be a recno * subtree. */ isbad = 1; EPRINT((env, DB_STR_A("1078", "Page %lu: non-recno leaf page in recno tree", "%lu"), (ulong)pgno)); goto done; } /* Case 2--no more work. */ if(pip->type == P_LDUP) goto leaf; /* Case 3 */ /* Check if we have any dups. */ if(F_ISSET(pip, VRFY_HAS_DUPS)) { /* If dups aren't allowed in this btree, trouble. */ if(!LF_ISSET(DB_ST_DUPOK)) { isbad = 1; EPRINT((env, DB_STR_A("1079", "Page %lu: duplicates in non-dup btree", "%lu"), (ulong)pgno)); } else { /* * We correctly have dups. If any are off-page, * traverse those btrees recursively. */ if((ret = __db_vrfy_childcursor(vdp, &cc)) != 0) goto err; for(ret = __db_vrfy_ccset(cc, pgno, &child); ret == 0; ret = __db_vrfy_ccnext(cc, &child)) { stflags = flags|DB_ST_RECNUM|DB_ST_DUPSET; /* Skip any overflow entries. */ if(child->type == V_DUPLICATE) { if((ret = __db_vrfy_duptype(dbp, vdp, child->pgno, stflags)) != 0) { isbad = 1; /* Next child. */ continue; } if((ret = __bam_vrfy_subtree(dbp, vdp, child->pgno, NULL, NULL, stflags|DB_ST_TOPLEVEL, NULL, NULL, NULL)) != 0) { if(ret == DB_VERIFY_BAD) isbad = 1; else goto err; } } } if((ret = __db_vrfy_ccclose(cc)) != 0) goto err; cc = NULL; /* * If VRFY_DUPS_UNSORTED is set, * DB_ST_DUPSORT had better not be. */ if(F_ISSET(pip, VRFY_DUPS_UNSORTED) && LF_ISSET(DB_ST_DUPSORT)) { isbad = 1; EPRINT((env, DB_STR_A("1080", "Page %lu: unsorted duplicate set in sorted-dup database", "%lu"), (ulong)pgno)); } } } goto leaf; case P_IBTREE: case P_IRECNO: /* We handle these below. */ break; default: /* * If a P_IBTREE or P_IRECNO contains a reference to an * invalid page, we'll wind up here; handle it gracefully. * Note that the code at the "done" label assumes that the * current page is a btree/recno one of some sort; this * is not the case here, so we goto err. * * If the page is entirely zeroed, its pip->type will be a lie * (we assumed it was a hash page, as they're allowed to be * zeroed); handle this case specially. */ if(F_ISSET(pip, VRFY_IS_ALLZEROES)) ZEROPG_ERR_PRINT(env, pgno, DB_STR_P("btree or recno page")); else EPRINT((env, DB_STR_A("1081", "Page %lu: btree or recno page is of inappropriate type %lu", "%lu %lu"), (ulong)pgno, (ulong)pip->type)); /* * We probably lost a leaf page (or more if this was an * internal page) from our prev/next_pgno chain. Flag * that this is expected; we don't want or need to * spew error messages about erroneous prev/next_pgnos, * since that's probably not the real problem. */ F_SET(vdp, VRFY_LEAFCHAIN_BROKEN); ret = DB_VERIFY_BAD; goto err; } /* * Cases 4 & 5: This is a btree or recno internal page. For each child, * recurse, keeping a running count of nrecs and making sure the level * is always reasonable. */ if((ret = __db_vrfy_childcursor(vdp, &cc)) != 0) goto err; for(ret = __db_vrfy_ccset(cc, pgno, &child); ret == 0; ret = __db_vrfy_ccnext(cc, &child)) if(child->type == V_RECNO) { if(pip->type != P_IRECNO) { ret = __db_unknown_path(env, "__bam_vrfy_subtree"); goto err; } if((ret = __bam_vrfy_subtree(dbp, vdp, child->pgno, NULL, NULL, flags, &child_level, &child_nrecs, &child_relen)) != 0) { if(ret == DB_VERIFY_BAD) isbad = 1; else goto done; } if(LF_ISSET(DB_ST_RELEN)) { if(relen == 0) relen = child_relen; // child_relen may be zero if the child subtree is empty. else if(child_relen > 0 && relen != child_relen) { isbad = 1; EPRINT((env, DB_STR_A("1082", "Page %lu: recno page returned bad re_len %lu", "%lu %lu"), (ulong)child->pgno, (ulong)child_relen)); } ASSIGN_PTR(relenp, relen); } if(LF_ISSET(DB_ST_RECNUM)) { if(child->nrecs != child_nrecs) { isbad = 1; EPRINT((env, DB_STR_A("1083", "Page %lu: record count incorrect: actual %lu, in record %lu", "%lu %lu %lu"), (ulong)child->pgno, (ulong)child_nrecs, (ulong)child->nrecs)); } nrecs += child_nrecs; } if(isbad == 0 && level != child_level+1) { isbad = 1; EPRINT((env, DB_STR_A("1084", "Page %lu: recno level incorrect: got %lu, expected %lu", "%lu %lu %lu"), (ulong)child->pgno, (ulong)child_level, (ulong)(level-1))); } } else if(child->type == V_OVERFLOW) { /* * It is possible for one internal page to reference * a single overflow page twice, if all the items * in the subtree referenced by slot 0 are deleted, * then a similar number of items are put back * before the key that formerly had been in slot 1. * * (Btree doesn't look at the key in slot 0, so the * fact that the key formerly at slot 1 is the "wrong" * parent of the stuff in the slot 0 subtree isn't * really incorrect.) * * __db_vrfy_ovfl_structure is designed to be * efficiently called multiple times for multiple * references; call it here as many times as is * appropriate. */ /* Otherwise, __db_vrfy_childput would be broken. */ DB_ASSERT(env, child->refcnt >= 1); /* * An overflow referenced more than twice here * shouldn't happen. */ if(child->refcnt > 2) { isbad = 1; EPRINT((env, DB_STR_A("1085", "Page %lu: overflow page %lu referenced more than twice from internal page", "%lu %lu"), (ulong)pgno, (ulong)child->pgno)); } else for(j = 0; j < child->refcnt; j++) if((ret = __db_vrfy_ovfl_structure(dbp, vdp, child->pgno, child->tlen, flags)) != 0) { if(ret == DB_VERIFY_BAD) isbad = 1; else goto done; } } if((ret = __db_vrfy_ccclose(cc)) != 0) goto err; cc = NULL; /* We're done with case 4. */ if(pip->type == P_IRECNO) goto done; /* * Case 5. Btree internal pages. * As described above, we need to iterate through all the * items on the page and make sure that our children sort appropriately * with respect to them. * * For each entry, li will be the "left-hand" key for the entry * itself, which must sort lower than all entries on its child; * ri will be the key to its right, which must sort greater. */ if(!h && (ret = __memp_fget(mpf, &pgno, vdp->thread_info, NULL, 0, &h)) != 0) goto err; for(i = 0; i < pip->entries; i += O_INDX) { li = GET_BINTERNAL(dbp, h, i); ri = (i+O_INDX < pip->entries) ? GET_BINTERNAL(dbp, h, i+O_INDX) : (BINTERNAL *)r; /* * The leftmost key is forcibly sorted less than all entries, * so don't bother passing it. */ if((ret = __bam_vrfy_subtree(dbp, vdp, li->pgno, i == 0 ? NULL : li, ri, flags, &child_level, &child_nrecs, NULL)) != 0) { if(ret == DB_VERIFY_BAD) isbad = 1; else goto done; } if(LF_ISSET(DB_ST_RECNUM)) { /* * Keep a running tally on the actual record count so * we can return it to our parent (if we have one) or * compare it to the NRECS field if we're a root page. */ nrecs += child_nrecs; /* * Make sure the actual record count of the child * is equal to the value in the BINTERNAL structure. */ if(li->nrecs != child_nrecs) { isbad = 1; EPRINT((env, DB_STR_A("1086", "Page %lu: item %lu has incorrect record count of %lu, should be %lu", "%lu %lu %lu %lu"), (ulong)pgno, (ulong)i, (ulong)li->nrecs, (ulong)child_nrecs)); } } if(level != child_level+1) { isbad = 1; EPRINT((env, DB_STR_A("1087", "Page %lu: Btree level incorrect: got %lu, expected %lu", "%lu %lu %lu"), (ulong)li->pgno, (ulong)child_level, (ulong)(level-1))); } } if(0) { leaf: level = LEAFLEVEL; if(LF_ISSET(DB_ST_RECNUM)) nrecs = pip->rec_cnt; /* XXX * We should verify that the record count on a leaf page * is the sum of the number of keys and the number of * records in its off-page dups. This requires looking * at the page again, however, and it may all be changing * soon, so for now we don't bother. */ if(LF_ISSET(DB_ST_RELEN) && relenp) *relenp = pip->re_len; } done: if(F_ISSET(pip, VRFY_INCOMPLETE) && isbad == 0 && ret == 0) { /* * During the page-by-page pass, item order verification was * not finished due to the presence of overflow items. If * isbad == 0, though, it's now safe to do so, as we've * traversed any child overflow pages. Do it. */ if(!h && (ret = __memp_fget(mpf, &pgno, vdp->thread_info, NULL, 0, &h)) != 0) goto err; if((ret = __bam_vrfy_itemorder(dbp, vdp, vdp->thread_info, h, pgno, 0, 1, 0, flags)) != 0) goto err; F_CLR(pip, VRFY_INCOMPLETE); } /* * It's possible to get to this point with a page that has no * items, but without having detected any sort of failure yet. * Having zero items is legal if it's a leaf--it may be the * root page in an empty tree, or the tree may have been * modified with the DB_REVSPLITOFF flag set (there's no way * to tell from what's on disk). For an internal page, * though, having no items is a problem (all internal pages * must have children). */ if(isbad == 0 && ret == 0) { if(!h && (ret = __memp_fget(mpf, &pgno, vdp->thread_info, NULL, 0, &h)) != 0) goto err; if(NUM_ENT(h) == 0 && ISINTERNAL(h)) { isbad = 1; EPRINT((env, DB_STR_A("1088", "Page %lu: internal page is empty and should not be", "%lu"), (ulong)pgno)); goto err; } } /* * Our parent has sent us BINTERNAL pointers to parent records * so that we can verify our place with respect to them. If it's * appropriate--we have a default sort function--verify this. */ if(isbad == 0 && ret == 0 && !LF_ISSET(DB_NOORDERCHK) && pip->type != P_IRECNO && pip->type != P_LRECNO) { if(!h && (ret = __memp_fget(mpf, &pgno, vdp->thread_info, NULL, 0, &h)) != 0) goto err; /* * __bam_vrfy_treeorder needs to know what comparison function * to use. If DB_ST_DUPSET is set, we're in a duplicate tree * and we use the duplicate comparison function; otherwise, * use the btree one. If unset, use the default, of course. */ func = LF_ISSET(DB_ST_DUPSET) ? dbp->dup_compare : static_cast<BTREE *>(dbp->bt_internal)->bt_compare; if(func == NULL) func = __bam_defcmp; if((ret = __bam_vrfy_treeorder(dbp, vdp->thread_info, h, (BINTERNAL *)l, (BINTERNAL *)r, func, flags)) != 0) { if(ret == DB_VERIFY_BAD) isbad = 1; else goto err; } } /* * This is guaranteed to succeed for leaf pages, but no harm done. * * Internal pages below the top level do not store their own * record numbers, so we skip them. */ if(LF_ISSET(DB_ST_RECNUM) && nrecs != pip->rec_cnt && toplevel) { isbad = 1; EPRINT((env, DB_STR_A("1089", "Page %lu: bad record count: has %lu records, claims %lu", "%lu %lu %lu"), (ulong)pgno, (ulong)nrecs, (ulong)pip->rec_cnt)); } if(levelp) *levelp = level; if(nrecsp) *nrecsp = nrecs; pgset = vdp->pgset; if((ret = __db_vrfy_pgset_get(pgset, vdp->thread_info, vdp->txn, pgno, &p)) != 0) goto err; if(p != 0) { isbad = 1; EPRINT((env, DB_STR_A("1090", "Page %lu: linked twice", "%lu"), (ulong)pgno)); } else if((ret = __db_vrfy_pgset_inc(pgset, vdp->thread_info, vdp->txn, pgno)) != 0) goto err; if(toplevel) /* * The last page's next_pgno in the leaf chain should have been * PGNO_INVALID. */ if(vdp->next_pgno != PGNO_INVALID) { isbad = 1; EPRINT((env, DB_STR_A("1091", "Page %lu: unterminated leaf chain", "%lu"), (ulong)vdp->prev_pgno)); } err: if(toplevel) { /* Restore our caller's settings. */ vdp->next_pgno = next_pgno; vdp->prev_pgno = prev_pgno; vdp->leaf_type = leaf_type; } if((t_ret = __memp_fput(mpf, vdp->thread_info, h, DB_PRIORITY_UNCHANGED)) != 0 && ret == 0) ret = t_ret; if((t_ret = __db_vrfy_putpageinfo(env, vdp, pip)) != 0 && ret == 0) ret = t_ret; if(cc && ((t_ret = __db_vrfy_ccclose(cc)) != 0) && ret == 0) ret = t_ret; return (ret == 0 && isbad == 1) ? DB_VERIFY_BAD : ret; } /* * __bam_vrfy_treeorder -- * Verify that the lowest key on a page sorts greater than the * BINTERNAL which points to it (lp), and the highest key * sorts less than the BINTERNAL above that (rp). * * If lp is NULL, this means that it was the leftmost key on the * parent, which (regardless of sort function) sorts less than * all keys. No need to check it. * * If rp is NULL, lp was the highest key on the parent, so there's * no higher key we must sort less than. */ static int __bam_vrfy_treeorder(DB * dbp, DB_THREAD_INFO * ip, PAGE * h, BINTERNAL * lp, BINTERNAL * rp, int (*func)(DB*, const DBT*, const DBT *), uint32 flags) { BOVERFLOW * bo; DBC * dbc; DBT dbt; ENV * env; db_indx_t last; int ret, cmp; env = dbp->env; // (replaced by ctr) memzero(&dbt, sizeof(DBT)); F_SET(&dbt, DB_DBT_MALLOC); ret = 0; /* * Empty pages are sorted correctly by definition. We check * to see whether they ought to be empty elsewhere; leaf * pages legally may be. */ if(NUM_ENT(h) == 0) return 0; switch(TYPE(h)) { case P_IBTREE: case P_LDUP: last = NUM_ENT(h)-O_INDX; break; case P_LBTREE: last = NUM_ENT(h)-P_INDX; break; default: return __db_unknown_path(env, "__bam_vrfy_treeorder"); } /* Populate a dummy cursor. */ if((ret = __db_cursor_int(dbp, ip, NULL, DB_BTREE, PGNO_INVALID, 0, DB_LOCK_INVALIDID, &dbc)) != 0) return ret; /* * The key on page h, the child page, is more likely to be * an overflow page, so we pass its offset, rather than lp/rp's, * into __bam_cmp. This will take advantage of __db_moff. */ /* * Skip first-item check if we're an internal page--the first * entry on an internal page is treated specially by __bam_cmp, * so what's on the page shouldn't matter. (Plus, since we're passing * our page and item 0 as to __bam_cmp, we'll sort before our * parent and falsely report a failure.) */ if(lp && TYPE(h) != P_IBTREE) { if((ret = __db_cursor_int(dbp, ip, NULL, DB_BTREE, PGNO_INVALID, 0, DB_LOCK_INVALIDID, &dbc)) != 0) return ret; if(lp->type == B_KEYDATA) { dbt.data = lp->data; dbt.size = lp->len; } else if(lp->type == B_OVERFLOW) { bo = (BOVERFLOW *)lp->data; if((ret = __db_goff(dbc, &dbt, bo->tlen, bo->pgno, NULL, NULL)) != 0) return ret; } else return __db_unknown_path(env, "__bam_vrfy_treeorder"); /* On error, fall through, free if needed, and return. */ if((ret = __bam_cmp(dbc, &dbt, h, 0, func, &cmp)) == 0) { if(cmp > 0) { EPRINT((env, DB_STR_A("1092", "Page %lu: first item on page sorted greater than parent entry", "%lu"), (ulong)PGNO(h))); ret = DB_VERIFY_BAD; } } else EPRINT((env, DB_STR_A("1093", "Page %lu: first item on page had comparison error", "%lu"), (ulong)PGNO(h))); if(dbt.data != lp->data) __os_ufree(env, dbt.data); if(ret != 0) return ret; } if(rp) { if(rp->type == B_KEYDATA) { dbt.data = rp->data; dbt.size = rp->len; } else if(rp->type == B_OVERFLOW) { bo = (BOVERFLOW *)rp->data; if((ret = __db_goff(dbc, &dbt, bo->tlen, bo->pgno, NULL, NULL)) != 0) return ret; } else return __db_unknown_path(env, "__bam_vrfy_treeorder"); /* On error, fall through, free if needed, and return. */ if((ret = __bam_cmp(dbc, &dbt, h, last, func, &cmp)) == 0) { if(cmp < 0) { EPRINT((env, DB_STR_A("1094", "Page %lu: last item on page sorted greater than parent entry", "%lu"), (ulong)PGNO(h))); ret = DB_VERIFY_BAD; } } else EPRINT((env, DB_STR_A("1095", "Page %lu: last item on page had comparison error", "%lu"), (ulong)PGNO(h))); if(dbt.data != rp->data) __os_ufree(env, dbt.data); } return ret; } /* * __bam_salvage -- * Safely dump out anything that looks like a key on an alleged * btree leaf page, also mark overflow pages as seen. For internal btree * pages, just mark any overflow pages as seen. * * PUBLIC: int __bam_salvage __P((DB *, VRFY_DBINFO *, * PUBLIC: db_pgno_t, uint32, PAGE *, void *, * PUBLIC: int (*)(void *, const void *), DBT *, uint32)); */ int __bam_salvage(DB * dbp, VRFY_DBINFO * vdp, db_pgno_t pgno, uint32 pgtype, PAGE * h, void * handle, int (*callback)__P((void *, const void *)), DBT * key, uint32 flags) { BKEYDATA * bk; BOVERFLOW * bo; DBT dbt, repldbt, unknown_key, unknown_data; ENV * env; VRFY_ITEM * pgmap; db_indx_t i, last, beg, end, * inp; db_pgno_t ovflpg; uint32 himark, ovfl_bufsz; void * ovflbuf; int adj, ret, t_ret, t2_ret; #ifdef HAVE_COMPRESSION DBT kcpy, * last_key; int unknown_dup_key; #endif env = dbp->env; ovflbuf = pgmap = NULL; inp = P_INP(dbp, h); // (replaced by ctr) memzero(&dbt, sizeof(DBT)); dbt.flags = DB_DBT_REALLOC; // (replaced by ctr) memzero(&repldbt, sizeof(DBT)); #ifdef HAVE_COMPRESSION // (replaced by ctr) memzero(&kcpy, sizeof(DBT)); unknown_dup_key = LF_ISSET(DB_SA_UNKNOWNKEY); last_key = unknown_dup_key ? NULL : key; #endif LF_CLR(DB_SA_UNKNOWNKEY); DB_INIT_DBT(unknown_key, "UNKNOWN_KEY", sizeof("UNKNOWN_KEY")-1); DB_INIT_DBT(unknown_data, "UNKNOWN_DATA", sizeof("UNKNOWN_DATA")-1); /* * Allocate a buffer for overflow items. Start at one page; * __db_safe_goff will realloc as needed. */ if((ret = __os_malloc(env, dbp->pgsize, &ovflbuf)) != 0) goto err; ovfl_bufsz = dbp->pgsize; if(LF_ISSET(DB_AGGRESSIVE) && (ret = __os_calloc(env, dbp->pgsize, sizeof(pgmap[0]), &pgmap)) != 0) goto err; /* * Loop through the inp array, spitting out key/data pairs. * * If we're salvaging normally, loop from 0 through NUM_ENT(h). If * we're being aggressive, loop until we hit the end of the page -- * NUM_ENT() may be bogus. */ himark = dbp->pgsize; for(i = 0, last = UINT16_MAX;; i += O_INDX) { /* * If we're not aggressive, or if we're on an internal page, * break when we hit NUM_ENT(h). */ if((!LF_ISSET(DB_AGGRESSIVE) || pgtype == P_IBTREE) && i >= NUM_ENT(h)) break; /* Verify the current item. */ t_ret = __db_vrfy_inpitem(dbp, h, pgno, i, 1, flags, &himark, 0); if(t_ret != 0) { /* * If this is a btree leaf and we've printed out a key * but not its associated data item, fix this imbalance * by printing an "UNKNOWN_DATA". */ if(pgtype == P_LBTREE && i%P_INDX == 1 && last == i-1 && (t2_ret = __db_vrfy_prdbt(&unknown_data, 0, " ", handle, callback, 0, 0, vdp)) != 0) { SETIFZ(ret, t2_ret); goto err; } /* * Don't return DB_VERIFY_FATAL; it's private and means * only that we can't go on with this page, not with * the whole database. It's not even an error if we've * run into it after NUM_ENT(h). */ if(t_ret == DB_VERIFY_FATAL) { if(i < NUM_ENT(h) && ret == 0) ret = DB_VERIFY_BAD; break; } continue; } /* * If this returned 0, it's safe to print or (carefully) * try to fetch. * * We only print deleted items if DB_AGGRESSIVE is set. */ bk = GET_BKEYDATA(dbp, h, i); if(!LF_ISSET(DB_AGGRESSIVE) && B_DISSET(bk->type)) continue; /* * If this is a btree leaf and we're about to print out a data * item for which we didn't print out a key, fix this imbalance * by printing an "UNKNOWN_KEY". */ if(pgtype == P_LBTREE && i%P_INDX == 1 && last != i-1) { #ifdef HAVE_COMPRESSION last_key = NULL; #endif if((t_ret = __db_vrfy_prdbt(&unknown_key, 0, " ", handle, callback, 0, 0, vdp)) != 0) { SETIFZ(ret, t_ret); goto err; } } last = i; /* * We're going to go try to print the next item. If key is * non-NULL, we're a dup page, so we've got to print the key * first, unless DB_SA_SKIPFIRSTKEY is set and we're on the * first entry. */ if(key && (i != 0 || !LF_ISSET(DB_SA_SKIPFIRSTKEY))) { #ifdef HAVE_COMPRESSION last_key = unknown_dup_key ? NULL : key; #endif if((t_ret = __db_vrfy_prdbt(key, 0, " ", handle, callback, 0, 0, vdp)) != 0) { SETIFZ(ret, t_ret); goto err; } } beg = end = inp[i]; switch(B_TYPE(bk->type)) { case B_DUPLICATE: if(pgtype == P_IBTREE) break; end = beg+BOVERFLOW_SIZE-1; /* * If we're not on a normal btree leaf page, there * shouldn't be off-page dup sets. Something's * confused; just drop it, and the code to pick up * unlinked offpage dup sets will print it out * with key "UNKNOWN" later. */ if(pgtype != P_LBTREE) break; bo = (BOVERFLOW *)bk; /* * If the page number is unreasonable, or if this is * supposed to be a key item, output "UNKNOWN_KEY" -- * the best we can do is run into the data items in * the unlinked offpage dup pass. */ if(!IS_VALID_PGNO(bo->pgno) || (i%P_INDX == 0)) { /* Not much to do on failure. */ #ifdef HAVE_COMPRESSION if(key == NULL && i%P_INDX == 0) last_key = NULL; #endif if((t_ret = __db_vrfy_prdbt(i%P_INDX == 0 ? &unknown_key : &unknown_data, 0, " ", handle, callback, 0, 0, vdp)) != 0) { SETIFZ(ret, t_ret); goto err; } break; } /* Don't stop on error. */ if((t_ret = __db_salvage_duptree(dbp, vdp, bo->pgno, &dbt, handle, callback, flags|DB_SA_SKIPFIRSTKEY #ifdef HAVE_COMPRESSION |(last_key == NULL ? DB_SA_UNKNOWNKEY : 0) #endif )) != 0 && ret == 0) ret = t_ret; break; case B_KEYDATA: if(pgtype == P_IBTREE) break; end = (db_indx_t)DB_ALIGN(beg+bk->len, sizeof(uint32))-1; dbt.data = bk->data; dbt.size = bk->len; #ifdef HAVE_COMPRESSION if(DB_IS_COMPRESSED(dbp) && last_key && (key || (i%P_INDX == 1))) { /* Decompress the key/data pair - the key is in last_key, and the data is in dbt */ if((t_ret = __bam_compress_salvage(dbp, vdp, handle, callback, last_key, &dbt)) != 0) { if(t_ret == DB_VERIFY_FATAL) { SETIFZ(ret, DB_VERIFY_BAD); if(!LF_ISSET(DB_AGGRESSIVE)) goto err; } else if(!ret) { ret = t_ret; goto err; } } } else { if(key == NULL && i%P_INDX == 0) { if((ret = __os_realloc(env, dbt.size, &kcpy.data)) != 0) goto err; memcpy(kcpy.data, dbt.data, dbt.size); kcpy.size = dbt.size; last_key = &kcpy; } #endif if((t_ret = __db_vrfy_prdbt(&dbt, 0, " ", handle, callback, 0, 0, vdp)) != 0) { SETIFZ(ret, t_ret); goto err; } #ifdef HAVE_COMPRESSION } #endif break; case B_OVERFLOW: if(pgtype != P_IBTREE) end = beg+BOVERFLOW_SIZE-1; bo = (BOVERFLOW *)bk; /* * Check for replicated overflow keys, so that we only * call __db_safe_goff once per overflow page. If we * get the same offset as the previous key just re-use * the previous dbt. * * P_IBTREE pages will never have replicated overflow * keys. */ adj = pgtype == P_IBTREE ? O_INDX : P_INDX; if(pgtype == P_IBTREE) { /* * If we're looking at a P_IBTREE, we just want * to mark the overflow page as seen. * * Note that this call to __db_safe_goff differs * from the non-P_IBTREE call. * * Only call __db_safe_goff if the overflow page * hasn't been seen. */ ovflpg = ((BOVERFLOW *)((BINTERNAL *)bk)->data)->pgno; if(__db_salvage_isdone(vdp, ovflpg) == 0 && (t_ret = __db_safe_goff(dbp, vdp, ovflpg, &dbt, &ovflbuf, &ovfl_bufsz, flags)) != 0 && ret == 0) ret = t_ret; break; } else if(i > adj-1 && i%adj == 0 && inp[i] == inp[i-adj]) dbt = repldbt; else { /* Don't stop on error. */ if((t_ret = __db_safe_goff(dbp, vdp, bo->pgno, &dbt, &ovflbuf, &ovfl_bufsz, flags)) != 0 && ret == 0) ret = t_ret; /* * If this is a key, save it in case the next * key is a replicated overflow, so we don't * call __db_safe_goff again. Copy out dbt.data * in case that pointer gets realloc'd when * getting a data item. */ if(i%P_INDX == 0) { if(t_ret == 0) { if((t_ret = __os_realloc(env, dbt.size, &repldbt.data)) != 0) { SETIFZ(ret, t_ret); goto err; } memcpy(repldbt.data, dbt.data, dbt.size); repldbt.size = dbt.size; } else { if(__os_realloc(env, unknown_key.size, &repldbt.data) != 0) goto err; memcpy(repldbt.data, unknown_key.data, unknown_key.size); repldbt.size = unknown_key.size; } } } #ifdef HAVE_COMPRESSION if(DB_IS_COMPRESSED(dbp) && last_key && t_ret == 0 && (key || (i%P_INDX == 1))) { /* Decompress the key/data pair - the key is in last_key, and the data is in dbt */ if((t_ret = __bam_compress_salvage(dbp, vdp, handle, callback, last_key, &dbt)) != 0) { if(t_ret == DB_VERIFY_FATAL) { SETIFZ(ret, DB_VERIFY_BAD); if(!LF_ISSET(DB_AGGRESSIVE)) goto err; } else if(!ret) { ret = t_ret; goto err; } } } else { if(key == NULL && i%P_INDX == 0) { if(t_ret == 0) { if((ret = __os_realloc(env, dbt.size, &kcpy.data)) != 0) goto err; memcpy(kcpy.data, dbt.data, dbt.size); kcpy.size = dbt.size; last_key = &kcpy; } else last_key = NULL; } #endif if((t_ret = __db_vrfy_prdbt(t_ret == 0 ? &dbt : &unknown_key, 0, " ", handle, callback, 0, 0, vdp)) != 0 && ret == 0) ret = t_ret; #ifdef HAVE_COMPRESSION } #endif break; default: /* * We should never get here; __db_vrfy_inpitem should * not be returning 0 if bk->type is unrecognizable. */ t_ret = __db_unknown_path(env, "__bam_salvage"); SETIFZ(ret, t_ret); goto err; } /* * If we're being aggressive, mark the beginning and end of * the item; we'll come back and print whatever "junk" is in * the gaps in case we had any bogus inp elements and thereby * missed stuff. */ if(LF_ISSET(DB_AGGRESSIVE) && pgtype != P_IBTREE) { pgmap[beg] = VRFY_ITEM_BEGIN; pgmap[end] = VRFY_ITEM_END; } } err: __os_free(env, pgmap); __os_free(env, ovflbuf); __os_free(env, repldbt.data); #ifdef HAVE_COMPRESSION __os_free(env, kcpy.data); #endif /* Mark this page as done. */ if((t_ret = __db_salvage_markdone(vdp, pgno)) != 0 && ret == 0) ret = t_ret; return ret; } /* * __bam_salvage_walkdupint -- * Walk a known-good btree or recno internal page which is part of * a dup tree, calling __db_salvage_duptree on each child page. * * PUBLIC: int __bam_salvage_walkdupint __P((DB *, VRFY_DBINFO *, PAGE *, * PUBLIC: DBT *, void *, int (*)(void *, const void *), uint32)); */ int __bam_salvage_walkdupint(DB * dbp, VRFY_DBINFO * vdp, PAGE * h, DBT * key, void * handle, int (*callback)__P((void *, const void *)), uint32 flags) { BINTERNAL * bi; RINTERNAL * ri; int t_ret; db_indx_t i; ENV * env = dbp->env; int ret = 0; for(i = 0; i < NUM_ENT(h); i++) { switch(TYPE(h)) { case P_IBTREE: bi = GET_BINTERNAL(dbp, h, i); if((t_ret = __db_salvage_duptree(dbp, vdp, bi->pgno, key, handle, callback, flags)) != 0) ret = t_ret; break; case P_IRECNO: ri = GET_RINTERNAL(dbp, h, i); if((t_ret = __db_salvage_duptree(dbp, vdp, ri->pgno, key, handle, callback, flags)) != 0) ret = t_ret; break; default: return __db_unknown_path(env, "__bam_salvage_walkdupint"); } /* Pass DB_SA_SKIPFIRSTKEY, if set, on to the 0th child only. */ flags &= ~LF_ISSET(DB_SA_SKIPFIRSTKEY); } return ret; } /* * __bam_meta2pgset -- * Given a known-good meta page, return in pgsetp a 0-terminated list of * db_pgno_t's corresponding to the pages in the btree. * * We do this by a somewhat sleazy method, to avoid having to traverse the * btree structure neatly: we walk down the left side to the very * first leaf page, then we mark all the pages in the chain of * NEXT_PGNOs (being wary of cycles and invalid ones), then we * consolidate our scratch array into a nice list, and return. This * avoids the memory management hassles of recursion and the * trouble of walking internal pages--they just don't matter, except * for the left branch. * * PUBLIC: int __bam_meta2pgset __P((DB *, VRFY_DBINFO *, BTMETA *, * PUBLIC: uint32, DB *)); */ int __bam_meta2pgset(DB * dbp, VRFY_DBINFO * vdp, BTMETA * btmeta, uint32 flags, DB * pgset) { BINTERNAL * bi; DB_MPOOLFILE * mpf; PAGE * h; RINTERNAL * ri; db_pgno_t current, p; int err_ret, ret; DB_ASSERT(dbp->env, pgset != NULL); mpf = dbp->mpf; h = NULL; ret = err_ret = 0; for(current = btmeta->root;; ) { if(!IS_VALID_PGNO(current) || current == PGNO(btmeta)) { err_ret = DB_VERIFY_BAD; goto err; } if((ret = __memp_fget(mpf, &current, vdp->thread_info, NULL, 0, &h)) != 0) { err_ret = ret; goto err; } switch(TYPE(h)) { case P_IBTREE: case P_IRECNO: if((ret = __bam_vrfy(dbp, vdp, h, current, flags|DB_NOORDERCHK)) != 0) { err_ret = ret; goto err; } if(TYPE(h) == P_IBTREE) { bi = GET_BINTERNAL(dbp, h, 0); current = bi->pgno; } else { /* P_IRECNO */ ri = GET_RINTERNAL(dbp, h, 0); current = ri->pgno; } break; case P_LBTREE: case P_LRECNO: goto traverse; default: err_ret = DB_VERIFY_BAD; goto err; } if((ret = __memp_fput(mpf, vdp->thread_info, h, DB_PRIORITY_UNCHANGED)) != 0) err_ret = ret; h = NULL; } /* * At this point, current is the pgno of leaf page h, the 0th in the * tree we're concerned with. */ traverse: while(IS_VALID_PGNO(current) && current != PGNO_INVALID) { if(!h && (ret = __memp_fget(mpf, &current, vdp->thread_info, NULL, 0, &h)) != 0) { err_ret = ret; break; } if((ret = __db_vrfy_pgset_get(pgset, vdp->thread_info, vdp->txn, current, (int *)&p)) != 0) goto err; if(p != 0) { /* * We've found a cycle. Return success anyway-- * our caller may as well use however much of * the pgset we've come up with. */ break; } if((ret = __db_vrfy_pgset_inc(pgset, vdp->thread_info, vdp->txn, current)) != 0) goto err; current = NEXT_PGNO(h); if((ret = __memp_fput(mpf, vdp->thread_info, h, DB_PRIORITY_UNCHANGED)) != 0) err_ret = ret; h = NULL; } err: __memp_fput(mpf, vdp->thread_info, h, DB_PRIORITY_UNCHANGED); return ret == 0 ? err_ret : ret; } /* * __bam_safe_getdata -- * * Utility function for __bam_vrfy_itemorder. Safely gets the datum at * index i, page h, and sticks it in DBT dbt. If ovflok is 1 and i's an * overflow item, we do a safe_goff to get the item and signal that we need * to free dbt->data; if ovflok is 0, we leaves the DBT zeroed. */ static int __bam_safe_getdata(DB * dbp, DB_THREAD_INFO * ip, PAGE * h, uint32 i, int ovflok, DBT * dbt, int * freedbtp) { BKEYDATA * bk; BOVERFLOW * bo; DBC * dbc; int ret; memzero(dbt, sizeof(DBT)); *freedbtp = 0; bk = GET_BKEYDATA(dbp, h, i); if(B_TYPE(bk->type) == B_OVERFLOW) { if(!ovflok) return 0; else if((ret = __db_cursor_int(dbp, ip, NULL, DB_BTREE, PGNO_INVALID, 0, DB_LOCK_INVALIDID, &dbc)) != 0) return ret; else { bo = (BOVERFLOW *)bk; F_SET(dbt, DB_DBT_MALLOC); *freedbtp = 1; return __db_goff(dbc, dbt, bo->tlen, bo->pgno, 0, 0); } } else { dbt->data = bk->data; dbt->size = bk->len; } return 0; }
{ "pile_set_name": "Github" }
<?php /** * Folders Access Control Lists Management (RFC4314, RFC2086) * * @version @package_version@ * @author Aleksander Machniak <[email protected]> * * * Copyright (C) 2011-2012, Kolab Systems AG * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see http://www.gnu.org/licenses/. */ class acl extends rcube_plugin { public $task = 'settings|addressbook|calendar'; private $rc; private $supported = null; private $mbox; private $ldap; private $specials = array('anyone', 'anonymous'); /** * Plugin initialization */ function init() { $this->rc = rcmail::get_instance(); // Register hooks $this->add_hook('folder_form', array($this, 'folder_form')); // kolab_addressbook plugin $this->add_hook('addressbook_form', array($this, 'folder_form')); $this->add_hook('calendar_form_kolab', array($this, 'folder_form')); // Plugin actions $this->register_action('plugin.acl', array($this, 'acl_actions')); $this->register_action('plugin.acl-autocomplete', array($this, 'acl_autocomplete')); } /** * Handler for plugin actions (AJAX) */ function acl_actions() { $action = trim(rcube_utils::get_input_value('_act', rcube_utils::INPUT_GPC)); // Connect to IMAP $this->rc->storage_init(); // Load localization and configuration $this->add_texts('localization/'); $this->load_config(); if ($action == 'save') { $this->action_save(); } else if ($action == 'delete') { $this->action_delete(); } else if ($action == 'list') { $this->action_list(); } // Only AJAX actions $this->rc->output->send(); } /** * Handler for user login autocomplete request */ function acl_autocomplete() { $this->load_config(); $search = rcube_utils::get_input_value('_search', rcube_utils::INPUT_GPC, true); $sid = rcube_utils::get_input_value('_id', rcube_utils::INPUT_GPC); $users = array(); if ($this->init_ldap()) { $max = (int) $this->rc->config->get('autocomplete_max', 15); $mode = (int) $this->rc->config->get('addressbook_search_mode'); $this->ldap->set_pagesize($max); $result = $this->ldap->search('*', $search, $mode); foreach ($result->records as $record) { $user = $record['uid']; if (is_array($user)) { $user = array_filter($user); $user = $user[0]; } if ($user) { if ($record['name']) $user = $record['name'] . ' (' . $user . ')'; $users[] = $user; } } } sort($users, SORT_LOCALE_STRING); $this->rc->output->command('ksearch_query_results', $users, $search, $sid); $this->rc->output->send(); } /** * Handler for 'folder_form' hook * * @param array $args Hook arguments array (form data) * * @return array Hook arguments array */ function folder_form($args) { $mbox_imap = $args['options']['name']; $myrights = $args['options']['rights']; // Edited folder name (empty in create-folder mode) if (!strlen($mbox_imap)) { return $args; } /* // Do nothing on protected folders (?) if ($args['options']['protected']) { return $args; } */ // Get MYRIGHTS if (empty($myrights)) { return $args; } // Load localization and include scripts $this->load_config(); $this->specials = $this->rc->config->get('acl_specials', $this->specials); $this->add_texts('localization/', array('deleteconfirm', 'norights', 'nouser', 'deleting', 'saving', 'newuser', 'editperms')); $this->rc->output->add_label('save', 'cancel'); $this->include_script('acl.js'); $this->rc->output->include_script('list.js'); $this->include_stylesheet($this->local_skin_path().'/acl.css'); // add Info fieldset if it doesn't exist if (!isset($args['form']['props']['fieldsets']['info'])) $args['form']['props']['fieldsets']['info'] = array( 'name' => $this->rc->gettext('info'), 'content' => array()); // Display folder rights to 'Info' fieldset $args['form']['props']['fieldsets']['info']['content']['myrights'] = array( 'label' => rcube::Q($this->gettext('myrights')), 'value' => $this->acl2text($myrights) ); // Return if not folder admin if (!in_array('a', $myrights)) { return $args; } // The 'Sharing' tab $this->mbox = $mbox_imap; $this->rc->output->set_env('acl_users_source', (bool) $this->rc->config->get('acl_users_source')); $this->rc->output->set_env('mailbox', $mbox_imap); $this->rc->output->add_handlers(array( 'acltable' => array($this, 'templ_table'), 'acluser' => array($this, 'templ_user'), 'aclrights' => array($this, 'templ_rights'), )); $this->rc->output->set_env('autocomplete_max', (int)$this->rc->config->get('autocomplete_max', 15)); $this->rc->output->set_env('autocomplete_min_length', $this->rc->config->get('autocomplete_min_length')); $this->rc->output->add_label('autocompletechars', 'autocompletemore'); $args['form']['sharing'] = array( 'name' => rcube::Q($this->gettext('sharing')), 'content' => $this->rc->output->parse('acl.table', false, false), ); return $args; } /** * Creates ACL rights table * * @param array $attrib Template object attributes * * @return string HTML Content */ function templ_table($attrib) { if (empty($attrib['id'])) $attrib['id'] = 'acl-table'; $out = $this->list_rights($attrib); $this->rc->output->add_gui_object('acltable', $attrib['id']); return $out; } /** * Creates ACL rights form (rights list part) * * @param array $attrib Template object attributes * * @return string HTML Content */ function templ_rights($attrib) { // Get supported rights $supported = $this->rights_supported(); // depending on server capability either use 'te' or 'd' for deleting msgs $deleteright = implode(array_intersect(str_split('ted'), $supported)); $out = ''; $ul = ''; $input = new html_checkbox(); // Advanced rights $attrib['id'] = 'advancedrights'; foreach ($supported as $key => $val) { $id = "acl$val"; $ul .= html::tag('li', null, $input->show('', array( 'name' => "acl[$val]", 'value' => $val, 'id' => $id)) . html::label(array('for' => $id, 'title' => $this->gettext('longacl'.$val)), $this->gettext('acl'.$val))); } $out = html::tag('ul', $attrib, $ul, html::$common_attrib); // Simple rights $ul = ''; $attrib['id'] = 'simplerights'; $items = array( 'read' => 'lrs', 'write' => 'wi', 'delete' => $deleteright, 'other' => preg_replace('/[lrswi'.$deleteright.']/', '', implode($supported)), ); foreach ($items as $key => $val) { $id = "acl$key"; $ul .= html::tag('li', null, $input->show('', array( 'name' => "acl[$val]", 'value' => $val, 'id' => $id)) . html::label(array('for' => $id, 'title' => $this->gettext('longacl'.$key)), $this->gettext('acl'.$key))); } $out .= "\n" . html::tag('ul', $attrib, $ul, html::$common_attrib); $this->rc->output->set_env('acl_items', $items); return $out; } /** * Creates ACL rights form (user part) * * @param array $attrib Template object attributes * * @return string HTML Content */ function templ_user($attrib) { // Create username input $attrib['name'] = 'acluser'; $textfield = new html_inputfield($attrib); $fields['user'] = html::label(array('for' => 'iduser'), $this->gettext('username')) . ' ' . $textfield->show(); // Add special entries if (!empty($this->specials)) { foreach ($this->specials as $key) { $fields[$key] = html::label(array('for' => 'id'.$key), $this->gettext($key)); } } $this->rc->output->set_env('acl_specials', $this->specials); // Create list with radio buttons if (count($fields) > 1) { $ul = ''; $radio = new html_radiobutton(array('name' => 'usertype')); foreach ($fields as $key => $val) { $ul .= html::tag('li', null, $radio->show($key == 'user' ? 'user' : '', array('value' => $key, 'id' => 'id'.$key)) . $val); } $out = html::tag('ul', array('id' => 'usertype', 'class' => $attrib['class']), $ul, html::$common_attrib); } // Display text input alone else { $out = $fields['user']; } return $out; } /** * Creates ACL rights table * * @param array $attrib Template object attributes * * @return string HTML Content */ private function list_rights($attrib=array()) { // Get ACL for the folder $acl = $this->rc->storage->get_acl($this->mbox); if (!is_array($acl)) { $acl = array(); } // Keep special entries (anyone/anonymous) on top of the list if (!empty($this->specials) && !empty($acl)) { foreach ($this->specials as $key) { if (isset($acl[$key])) { $acl_special[$key] = $acl[$key]; unset($acl[$key]); } } } // Sort the list by username uksort($acl, 'strnatcasecmp'); if (!empty($acl_special)) { $acl = array_merge($acl_special, $acl); } // Get supported rights and build column names $supported = $this->rights_supported(); // depending on server capability either use 'te' or 'd' for deleting msgs $deleteright = implode(array_intersect(str_split('ted'), $supported)); // Use advanced or simple (grouped) rights $advanced = $this->rc->config->get('acl_advanced_mode'); if ($advanced) { $items = array(); foreach ($supported as $sup) { $items[$sup] = $sup; } } else { $items = array( 'read' => 'lrs', 'write' => 'wi', 'delete' => $deleteright, 'other' => preg_replace('/[lrswi'.$deleteright.']/', '', implode($supported)), ); } // Create the table $attrib['noheader'] = true; $table = new html_table($attrib); // Create table header $table->add_header('user', $this->gettext('identifier')); foreach (array_keys($items) as $key) { $label = $this->gettext('shortacl'.$key); $table->add_header(array('class' => 'acl'.$key, 'title' => $label), $label); } $js_table = array(); foreach ($acl as $user => $rights) { if ($this->rc->storage->conn->user == $user) { continue; } // filter out virtual rights (c or d) the server may return $userrights = array_intersect($rights, $supported); $userid = rcube_utils::html_identifier($user); if (!empty($this->specials) && in_array($user, $this->specials)) { $user = $this->gettext($user); } $table->add_row(array('id' => 'rcmrow'.$userid)); $table->add('user', rcube::Q($user)); foreach ($items as $key => $right) { $in = $this->acl_compare($userrights, $right); switch ($in) { case 2: $class = 'enabled'; break; case 1: $class = 'partial'; break; default: $class = 'disabled'; break; } $table->add('acl' . $key . ' ' . $class, ''); } $js_table[$userid] = implode($userrights); } $this->rc->output->set_env('acl', $js_table); $this->rc->output->set_env('acl_advanced', $advanced); $out = $table->show(); return $out; } /** * Handler for ACL update/create action */ private function action_save() { $mbox = trim(rcube_utils::get_input_value('_mbox', rcube_utils::INPUT_GPC, true)); // UTF7-IMAP $user = trim(rcube_utils::get_input_value('_user', rcube_utils::INPUT_GPC)); $acl = trim(rcube_utils::get_input_value('_acl', rcube_utils::INPUT_GPC)); $oldid = trim(rcube_utils::get_input_value('_old', rcube_utils::INPUT_GPC)); $acl = array_intersect(str_split($acl), $this->rights_supported()); $users = $oldid ? array($user) : explode(',', $user); $result = 0; foreach ($users as $user) { $user = trim($user); if (!empty($this->specials) && in_array($user, $this->specials)) { $username = $this->gettext($user); } else if (!empty($user)) { if (!strpos($user, '@') && ($realm = $this->get_realm())) { $user .= '@' . rcube_utils::idn_to_ascii(preg_replace('/^@/', '', $realm)); } $username = $user; } if (!$acl || !$user || !strlen($mbox)) { continue; } $user = $this->mod_login($user); $username = $this->mod_login($username); if ($user != $_SESSION['username'] && $username != $_SESSION['username']) { if ($this->rc->storage->set_acl($mbox, $user, $acl)) { $ret = array('id' => rcube_utils::html_identifier($user), 'username' => $username, 'acl' => implode($acl), 'old' => $oldid); $this->rc->output->command('acl_update', $ret); $result++; } } } if ($result) { $this->rc->output->show_message($oldid ? 'acl.updatesuccess' : 'acl.createsuccess', 'confirmation'); } else { $this->rc->output->show_message($oldid ? 'acl.updateerror' : 'acl.createerror', 'error'); } } /** * Handler for ACL delete action */ private function action_delete() { $mbox = trim(rcube_utils::get_input_value('_mbox', rcube_utils::INPUT_GPC, true)); //UTF7-IMAP $user = trim(rcube_utils::get_input_value('_user', rcube_utils::INPUT_GPC)); $user = explode(',', $user); foreach ($user as $u) { $u = trim($u); if ($this->rc->storage->delete_acl($mbox, $u)) { $this->rc->output->command('acl_remove_row', rcube_utils::html_identifier($u)); } else { $error = true; } } if (!$error) { $this->rc->output->show_message('acl.deletesuccess', 'confirmation'); } else { $this->rc->output->show_message('acl.deleteerror', 'error'); } } /** * Handler for ACL list update action (with display mode change) */ private function action_list() { if (in_array('acl_advanced_mode', (array)$this->rc->config->get('dont_override'))) { return; } $this->mbox = trim(rcube_utils::get_input_value('_mbox', rcube_utils::INPUT_GPC, true)); // UTF7-IMAP $advanced = trim(rcube_utils::get_input_value('_mode', rcube_utils::INPUT_GPC)); $advanced = $advanced == 'advanced' ? true : false; // Save state in user preferences $this->rc->user->save_prefs(array('acl_advanced_mode' => $advanced)); $out = $this->list_rights(); $out = preg_replace(array('/^<table[^>]+>/', '/<\/table>$/'), '', $out); $this->rc->output->command('acl_list_update', $out); } /** * Creates <UL> list with descriptive access rights * * @param array $rights MYRIGHTS result * * @return string HTML content */ function acl2text($rights) { if (empty($rights)) { return ''; } $supported = $this->rights_supported(); $list = array(); $attrib = array( 'name' => 'rcmyrights', 'style' => 'margin:0; padding:0 15px;', ); foreach ($supported as $right) { if (in_array($right, $rights)) { $list[] = html::tag('li', null, rcube::Q($this->gettext('acl' . $right))); } } if (count($list) == count($supported)) return rcube::Q($this->gettext('aclfull')); return html::tag('ul', $attrib, implode("\n", $list)); } /** * Compares two ACLs (according to supported rights) * * @param array $acl1 ACL rights array (or string) * @param array $acl2 ACL rights array (or string) * * @param int Comparision result, 2 - full match, 1 - partial match, 0 - no match */ function acl_compare($acl1, $acl2) { if (!is_array($acl1)) $acl1 = str_split($acl1); if (!is_array($acl2)) $acl2 = str_split($acl2); $rights = $this->rights_supported(); $acl1 = array_intersect($acl1, $rights); $acl2 = array_intersect($acl2, $rights); $res = array_intersect($acl1, $acl2); $cnt1 = count($res); $cnt2 = count($acl2); if ($cnt1 == $cnt2) return 2; else if ($cnt1) return 1; else return 0; } /** * Get list of supported access rights (according to RIGHTS capability) * * @return array List of supported access rights abbreviations */ function rights_supported() { if ($this->supported !== null) { return $this->supported; } $capa = $this->rc->storage->get_capability('RIGHTS'); if (is_array($capa)) { $rights = strtolower($capa[0]); } else { $rights = 'cd'; } return $this->supported = str_split('lrswi' . $rights . 'pa'); } /** * Username realm detection. * * @return string Username realm (domain) */ private function get_realm() { // When user enters a username without domain part, realm // allows to add it to the username (and display correct username in the table) if (isset($_SESSION['acl_username_realm'])) { return $_SESSION['acl_username_realm']; } // find realm in username of logged user (?) list($name, $domain) = explode('@', $_SESSION['username']); // Use (always existent) ACL entry on the INBOX for the user to determine // whether or not the user ID in ACL entries need to be qualified and how // they would need to be qualified. if (empty($domain)) { $acl = $this->rc->storage->get_acl('INBOX'); if (is_array($acl)) { $regexp = '/^' . preg_quote($_SESSION['username'], '/') . '@(.*)$/'; foreach (array_keys($acl) as $name) { if (preg_match($regexp, $name, $matches)) { $domain = $matches[1]; break; } } } } return $_SESSION['acl_username_realm'] = $domain; } /** * Initializes autocomplete LDAP backend */ private function init_ldap() { if ($this->ldap) return $this->ldap->ready; // get LDAP config $config = $this->rc->config->get('acl_users_source'); if (empty($config)) { return false; } // not an array, use configured ldap_public source if (!is_array($config)) { $ldap_config = (array) $this->rc->config->get('ldap_public'); $config = $ldap_config[$config]; } $uid_field = $this->rc->config->get('acl_users_field', 'mail'); $filter = $this->rc->config->get('acl_users_filter'); if (empty($uid_field) || empty($config)) { return false; } // get name attribute if (!empty($config['fieldmap'])) { $name_field = $config['fieldmap']['name']; } // ... no fieldmap, use the old method if (empty($name_field)) { $name_field = $config['name_field']; } // add UID field to fieldmap, so it will be returned in a record with name $config['fieldmap'] = array( 'name' => $name_field, 'uid' => $uid_field, ); // search in UID and name fields $config['search_fields'] = array_values($config['fieldmap']); $config['required_fields'] = array($uid_field); // set search filter if ($filter) $config['filter'] = $filter; // disable vlv $config['vlv'] = false; // Initialize LDAP connection $this->ldap = new rcube_ldap($config, $this->rc->config->get('ldap_debug'), $this->rc->config->mail_domain($_SESSION['imap_host'])); return $this->ldap->ready; } /** * Modify user login according to 'login_lc' setting */ protected function mod_login($user) { $login_lc = $this->rc->config->get('login_lc'); if ($login_lc === true || $login_lc == 2) { $user = mb_strtolower($user); } // lowercase domain name else if ($login_lc && strpos($user, '@')) { list($local, $domain) = explode('@', $user); $user = $local . '@' . mb_strtolower($domain); } return $user; } }
{ "pile_set_name": "Github" }
/*-------------------------------------------------------------- # Illustrations --------------------------------------------------------------*/ @import "illustrations/illustrations"; /*-------------------------------------------------------------- # Fonts --------------------------------------------------------------*/ @import "base/fonts"; /*-------------------------------------------------------------- # Scroll reveal --------------------------------------------------------------*/ @import "base/scroll-reveal"; /*-------------------------------------------------------------- # Base --------------------------------------------------------------*/ @import "base/base"; /*-------------------------------------------------------------- # Typography --------------------------------------------------------------*/ @import "base/typography"; /*-------------------------------------------------------------- # Containers --------------------------------------------------------------*/ @import "elements/containers"; /*-------------------------------------------------------------- # Buttons --------------------------------------------------------------*/ @import "elements/buttons"; /*-------------------------------------------------------------- # Forms --------------------------------------------------------------*/ @import "elements/forms"; /*-------------------------------------------------------------- # Hamburger --------------------------------------------------------------*/ @import "elements/hamburger"; /*-------------------------------------------------------------- # Modal --------------------------------------------------------------*/ @import "elements/modal"; /*-------------------------------------------------------------- # Split pattern --------------------------------------------------------------*/ @import "patterns/split"; /*-------------------------------------------------------------- # Tiles pattern --------------------------------------------------------------*/ @import "patterns/tiles"; /*-------------------------------------------------------------- # Header --------------------------------------------------------------*/ @import "layout/header"; /*-------------------------------------------------------------- # Site content --------------------------------------------------------------*/ @import "layout/main"; /*-------------------------------------------------------------- # Footer --------------------------------------------------------------*/ @import "layout/footer"; /*-------------------------------------------------------------- # Section --------------------------------------------------------------*/ @import "sections/section"; /*-------------------------------------------------------------- # Hero --------------------------------------------------------------*/ @import "sections/hero"; /*-------------------------------------------------------------- # Features split --------------------------------------------------------------*/ @import "sections/features-split"; /*-------------------------------------------------------------- # Features tiles --------------------------------------------------------------*/ @import "sections/features-tiles"; /*-------------------------------------------------------------- # Testimonial --------------------------------------------------------------*/ @import "sections/testimonial"; /*-------------------------------------------------------------- # Call to action --------------------------------------------------------------*/ @import "sections/cta";
{ "pile_set_name": "Github" }
import java.io.{File} import scalaxb.compiler.{Config} class UnqualifiedLocalTest extends TestBase { val inFile = new File("integration/src/test/resources/unqualified.xsd") lazy val generated = module.process(inFile, "unqualified", tmp) "unqualified.scala file must compile so that Foo can be used" in { (List("""scalaxb.toXML[unqualified.Foo](scalaxb.fromXML[unqualified.Foo](""" + """<unq:foo xmlns:unq="http://www.example.com/unqualified" attribute1="bar">""" + "<string1></string1>" + """<unq:bar>bar</unq:bar>""" + """</unq:foo>), """ + """Some("http://www.example.com/unqualified"), "foo", """ + """scalaxb.toScope(Some("unq") -> "http://www.example.com/unqualified") ).toString"""), generated) must evaluateTo("""<unq:foo attribute1="bar" xmlns:unq="http://www.example.com/unqualified">""" + "<string1></string1>" + """<unq:bar>bar</unq:bar>""" + "</unq:foo>", outdir = "./tmp") } "unqualified.scala file must compiled with an alternative toXML" in { (List("""scalaxb.toXML[unqualified.Foo](scalaxb.fromXML[unqualified.Foo](""" + """<unq:foo xmlns:unq="http://www.example.com/unqualified" attribute1="bar">""" + "<string1></string1>" + """<unq:bar>bar</unq:bar>""" + """</unq:foo>), """ + """Some("http://www.example.com/unqualified"), Some("foo"), """ + """scalaxb.toScope(Some("unq") -> "http://www.example.com/unqualified") ).toString"""), generated) must evaluateTo("""<unq:foo attribute1="bar" xmlns:unq="http://www.example.com/unqualified">""" + "<string1></string1>" + """<unq:bar>bar</unq:bar>""" + "</unq:foo>", outdir = "./tmp") } "unqualified.scala file must compile so that Foo can be used without toplevel prefix" in { (List("""scalaxb.toXML[unqualified.Foo](scalaxb.fromXML[unqualified.Foo](""" + """<unq:foo xmlns:unq="http://www.example.com/unqualified" attribute1="bar">""" + "<string1></string1>" + """<unq:bar>bar</unq:bar>""" + """</unq:foo>), "foo", """ + """scalaxb.toScope(Some("unq") -> "http://www.example.com/unqualified") ).toString"""), generated) must evaluateTo("""<foo attribute1="bar" xmlns:unq="http://www.example.com/unqualified">""" + "<string1></string1>" + """<unq:bar>bar</unq:bar>""" + "</foo>", outdir = "./tmp") } "unqualified.scala file must compile so that USAddress can roundtrip" in { (List("""val usaddress = unqualified.USAddress("123", "New York", "NY", 10000, Map())""", """val xml = scalaxb.toXML[unqualified.Addressable](usaddress, None, Some("shipTo"), unqualified.defaultScope)""", """val x = scalaxb.fromXML[unqualified.Addressable](xml).toString""", """x"""), generated) must evaluateTo ("""USAddress(123,New York,NY,10000,Map(@{http://www.w3.org/2001/XMLSchema-instance}type -> DataRecord({http://www.w3.org/2001/XMLSchema-instance}type,tns:USAddress)))""", outdir = "./tmp") } /* val inFile2 = new File("integration/src/test/resources/qualified.xsd") lazy val generated2 = (new Driver).process(inFile2, "qualified", tmp) "qualified.scala file must compile so that Foo can be used" in { (List("""scalaxb.toXML[qualified.Foo](scalaxb.fromXML[qualified.Foo](""" + """<q:foo xmlns:q="http://www.example.com/qualified" q:attribute1="bar">""" + "<q:string1></q:string1>" + """</q:foo>), """ + """Some("http://www.example.com/qualified"), "foo", """ + """scalaxb.toScope(Some("q") -> "http://www.example.com/qualified") ).toString"""), generated2) must evaluateTo("""<q:foo q:attribute1="bar" xmlns:q="http://www.example.com/qualified">""" + "<q:string1></q:string1>" + "</q:foo>", outdir = "./tmp") } */ }
{ "pile_set_name": "Github" }
<%@page pageEncoding="UTF-8" contentType="text/html; charset=UTF-8" %><!DOCTYPE html> <html> <head> <meta charset="UTF-8"> <title><la:message key="labels.admin_brand_title"/> | <la:message key="labels.search_list_configuration"/></title> <jsp:include page="/WEB-INF/view/common/admin/head.jsp"></jsp:include> </head> <body class="hold-transition sidebar-mini"> <div class="wrapper"> <jsp:include page="/WEB-INF/view/common/admin/header.jsp"></jsp:include> <jsp:include page="/WEB-INF/view/common/admin/sidebar.jsp"> <jsp:param name="menuCategoryType" value="log"/> <jsp:param name="menuType" value="searchList"/> </jsp:include> <div class="content-wrapper"> <div class="content-header"> <div class="container-fluid"> <div class="row mb-2"> <div class="col-sm-6"> <h1> <la:message key="labels.search_list_configuration"/> </h1> </div> <div class="col-sm-6"> <ol class="breadcrumb float-sm-right"> <li class="breadcrumb-item active"><la:link href="/admin/searchlist/search?q=${f:u(q)}"> <la:message key="labels.search_list_configuration"/> </la:link></li> </ol> </div> </div> </div> </div> <section class="content"> <la:form action="/admin/searchlist/"> <la:hidden property="crudMode"/> <la:hidden property="q"/> <c:if test="${crudMode==2}"> <la:hidden property="id"/> <la:hidden property="seqNo"/> <la:hidden property="primaryTerm"/> </c:if> <div class="row"> <div class="col-md-12"> <div class="card card-outline <c:if test="${crudMode == 1 || crudMode == 2}">card-success</c:if>"> <div class="card-header"> <h3 class="card-title"> <c:if test="${crudMode == null}"> <la:message key="labels.crud_title_list"/> </c:if> <c:if test="${crudMode == 1}"> <la:message key="labels.crud_title_create"/> </c:if> <c:if test="${crudMode == 2}"> <la:message key="labels.crud_title_edit"/> </c:if> <c:if test="${crudMode == 3}"> <la:message key="labels.crud_title_delete"/> </c:if> <c:if test="${crudMode == 4}"> <la:message key="labels.crud_title_details"/> </c:if> </h3> <div class="card-tools"> <div class="btn-group"> <c:choose> <c:when test="${crudMode == null}"> <la:link href="createnew" styleClass="btn btn-success btn-xs"> <em class="fa fa-plus"></em> <la:message key="labels.crud_link_create"/> </la:link> </c:when> <c:otherwise> <la:link href="/admin/searchlist/search?q=${f:u(q)}" styleClass="btn btn-primary btn-xs"> <em class="fa fa-th-list"></em> <la:message key="labels.crud_link_list"/> </la:link> </c:otherwise> </c:choose> </div> </div> </div> <div class="card-body"> <div> <la:info id="msg" message="true"> <div class="alert alert-info">${msg}</div> </la:info> <la:errors property="_global"/> </div> <c:if test="${crudMode==2}"> <div class="form-group row"> <label class="col-sm-3 text-sm-right col-form-label">_id</label> <div class="col-sm-9">${f:h(id)}</div> </div> <div class="form-group row"> <label for="doc.doc_id" class="col-sm-3 text-sm-right col-form-label">doc_id</label> <div class="col-sm-9"> ${f:h(doc.doc_id)} <la:hidden styleId="doc.doc_id" property="doc.doc_id"/> </div> </div> </c:if> <div class="form-group row"> <label for="doc.url" class="col-sm-3 text-sm-right col-form-label">url</label> <div class="col-sm-9"> <la:errors property="doc.url"/> <la:text styleId="doc.url" property="doc.url" styleClass="form-control" required="required" data-validation="required"/> </div> </div> <div class="form-group row"> <label for="doc.title" class="col-sm-3 text-sm-right col-form-label">title</label> <div class="col-sm-9"> <la:errors property="doc.title"/> <la:text styleId="doc.title" property="doc.title" styleClass="form-control" required="required" data-validation="required"/> </div> </div> <div class="form-group row"> <label for="doc.role" class="col-sm-3 text-sm-right col-form-label">role</label> <div class="col-sm-9"> <la:errors property="doc.role"/> <la:textarea styleId="doc.role" property="doc.role" styleClass="form-control" data-validation-help="1(username) | 2(groupname) | R(rolename) e.g. Rguest"/> </div> </div> <div class="form-group row"> <label for="doc.boost" class="col-sm-3 text-sm-right col-form-label">boost</label> <div class="col-sm-9"> <la:errors property="doc.boost"/> <la:text styleId="doc.boost" property="doc.boost" styleClass="form-control" title="Floating point number" required="required" data-validation="custom" data-validation-regexp="(\+|\-)?\d+(\.\d+)?((e|E)(\+|\-)?\d+)?" data-validation-help="number (Float)"/> </div> </div> <div class="form-group row"> <label for="doc.label" class="col-sm-3 text-sm-right col-form-label">label</label> <div class="col-sm-9"> <la:errors property="doc.label"/> <la:textarea styleId="doc.label" property="doc.label" styleClass="form-control"/> </div> </div> <div class="form-group row"> <label for="doc.lang" class="col-sm-3 text-sm-right col-form-label">lang</label> <div class="col-sm-9"> <la:errors property="doc.lang"/> <la:textarea styleId="doc.lang" property="doc.lang" styleClass="form-control"/> </div> </div> <div class="form-group row"> <label for="doc.mimetype" class="col-sm-3 text-sm-right col-form-label">mimetype</label> <div class="col-sm-9"> <la:errors property="doc.mimetype"/> <la:text styleId="doc.mimetype" property="doc.mimetype" styleClass="form-control"/> </div> </div> <div class="form-group row"> <label for="doc.filetype" class="col-sm-3 text-sm-right col-form-label">filetype</label> <div class="col-sm-9"> <la:errors property="doc.filetype"/> <la:text styleId="doc.filetype" property="doc.filetype" styleClass="form-control"/> </div> </div> <div class="form-group row"> <label for="doc.filename" class="col-sm-3 text-sm-right col-form-label">filename</label> <div class="col-sm-9"> <la:errors property="doc.filename"/> <la:text styleId="doc.filename" property="doc.filename" styleClass="form-control"/> </div> </div> <div class="form-group row"> <label for="doc.content" class="col-sm-3 text-sm-right col-form-label">content</label> <div class="col-sm-9"> <la:errors property="doc.content"/> <la:text styleId="doc.content" property="doc.content" styleClass="form-control"/> </div> </div> <div class="form-group row"> <label for="doc.has_cache" class="col-sm-3 text-sm-right col-form-label">has_cache</label> <div class="col-sm-9"> <la:errors property="doc.has_cache"/> <la:text styleId="doc.has_cache" property="doc.has_cache" styleClass="form-control"/> </div> </div> <div class="form-group row"> <label for="doc.cache" class="col-sm-3 text-sm-right col-form-label">cache</label> <div class="col-sm-9"> <la:errors property="doc.cache"/> <la:text styleId="doc.cache" property="doc.cache" styleClass="form-control"/> </div> </div> <div class="form-group row"> <label for="doc.digest" class="col-sm-3 text-sm-right col-form-label">digest</label> <div class="col-sm-9"> <la:errors property="doc.digest"/> <la:text styleId="doc.digest" property="doc.digest" styleClass="form-control"/> </div> </div> <div class="form-group row"> <label for="doc.host" class="col-sm-3 text-sm-right col-form-label">host</label> <div class="col-sm-9"> <la:errors property="doc.host"/> <la:text styleId="doc.host" property="doc.host" styleClass="form-control"/> </div> </div> <div class="form-group row"> <label for="doc.site" class="col-sm-3 text-sm-right col-form-label">site</label> <div class="col-sm-9"> <la:errors property="doc.site"/> <la:text styleId="doc.site" property="doc.site" styleClass="form-control"/> </div> </div> <div class="form-group row"> <label for="doc.segment" class="col-sm-3 text-sm-right col-form-label">segment</label> <div class="col-sm-9"> <la:errors property="doc.segment"/> <la:text styleId="doc.segment" property="doc.segment" styleClass="form-control"/> </div> </div> <div class="form-group row"> <label for="doc.config_id" class="col-sm-3 text-sm-right col-form-label">config_id</label> <div class="col-sm-9"> <la:errors property="doc.config_id"/> <la:text styleId="doc.config_id" property="doc.config_id" styleClass="form-control"/> </div> </div> <div class="form-group row"> <label for="doc.parent_id" class="col-sm-3 text-sm-right col-form-label">parent_id</label> <div class="col-sm-9"> <la:errors property="doc.parent_id"/> <la:text styleId="doc.parent_id" property="doc.parent_id" styleClass="form-control"/> </div> </div> <div class="form-group row"> <label for="doc.content_length" class="col-sm-3 text-sm-right col-form-label">content_length</label> <div class="col-sm-9"> <la:errors property="doc.content_length"/> <la:text styleId="doc.content_length" property="doc.content_length" styleClass="form-control" title="Integer" data-validation="custom" data-validation-regexp="^(\d+)?$" data-validation-help="number (Integer)"/> </div> </div> <div class="form-group row"> <label for="doc.favorite_count" class="col-sm-3 text-sm-right col-form-label">favorite_count</label> <div class="col-sm-9"> <la:errors property="doc.favorite_count"/> <la:text styleId="doc.favorite_count" property="doc.favorite_count" styleClass="form-control" title="Integer" data-validation="custom" data-validation-regexp="^(\d+)?$" data-validation-help="number (Integer)"/> </div> </div> <div class="form-group row"> <label for="doc.click_count" class="col-sm-3 text-sm-right col-form-label">click_count</label> <div class="col-sm-9"> <la:errors property="doc.click_count"/> <la:text styleId="doc.click_count" property="doc.click_count" styleClass="form-control" title="Integer" data-validation="custom" data-validation-regexp="^(\d+)?$" data-validation-help="number (Integer)"/> </div> </div> <div class="form-group row"> <label for="doc.created" class="col-sm-3 text-sm-right col-form-label">created</label> <div class="col-sm-9"> <la:errors property="doc.created"/> <la:text styleId="doc.created" property="doc.created" styleClass="form-control" title="yyyy-MM-dd'T'HH:mm:ss.SSS'Z'" data-validation="custom" data-validation-regexp="(^$|^[1-9]\d{3}\-\d\d\-\d\dT\d\d\:\d\d\:\d\d\.\d{3}Z$)" data-validation-help="yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"/> </div> </div> <div class="form-group row"> <label for="doc.timestamp" class="col-sm-3 text-sm-right col-form-label">timestamp</label> <div class="col-sm-9"> <la:errors property="doc.timestamp"/> <la:text styleId="doc.timestamp" property="doc.timestamp" styleClass="form-control" title="yyyy-MM-dd'T'HH:mm:ss.SSS'Z'" data-validation="custom" data-validation-regexp="(^$|^[1-9]\d{3}\-\d\d\-\d\dT\d\d\:\d\d\:\d\d\.\d{3}Z$)" data-validation-help="yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"/> </div> </div> <div class="form-group row"> <label for="doc.last_modified" class="col-sm-3 text-sm-right col-form-label">last_modified</label> <div class="col-sm-9"> <la:errors property="doc.last_modified"/> <la:text styleId="doc.last_modified" property="doc.last_modified" styleClass="form-control" title="yyyy-MM-dd'T'HH:mm:ss.SSS'Z'" data-validation="custom" data-validation-regexp="(^$|^[1-9]\d{3}\-\d\d\-\d\dT\d\d\:\d\d\:\d\d\.\d{3}Z$)" data-validation-help="yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"/> </div> </div> <div class="form-group row"> <label for="doc.expires" class="col-sm-3 text-sm-right col-form-label">expires</label> <div class="col-sm-9"> <la:errors property="doc.expires"/> <la:text styleId="doc.expires" property="doc.expires" styleClass="form-control" title="yyyy-MM-dd'T'HH:mm:ss.SSS'Z'" data-validation="custom" data-validation-regexp="(^$|^[1-9]\d{3}\-\d\d\-\d\dT\d\d\:\d\d\:\d\d\.\d{3}Z$)" data-validation-help="yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"/> </div> </div> <div class="form-group row"> <label for="doc.virtual_host" class="col-sm-3 text-sm-right col-form-label">virtual_host</label> <div class="col-sm-9"> <la:errors property="doc.virtual_host"/> <la:textarea styleId="doc.virtual_host" property="doc.virtual_host" styleClass="form-control"/> </div> </div> </div> <div class="card-footer"> <c:if test="${crudMode == 1}"> <la:link href="/admin/searchlist/search?q=${f:u(q)}" styleClass="btn btn-default"> <em class="fa fa-arrow-circle-left"></em> <la:message key="labels.crud_button_back"/> </la:link> <button type="submit" class="btn btn-success" name="create" value="<la:message key="labels.crud_button_create" />"> <em class="fa fa-plus"></em> <la:message key="labels.crud_button_create"/> </button> </c:if> <c:if test="${crudMode == 2}"> <la:link href="/admin/searchlist/search?q=${f:u(q)}" styleClass="btn btn-default"> <em class="fa fa-arrow-circle-left"></em> <la:message key="labels.crud_button_back"/> </la:link> <button type="submit" class="btn btn-success" name="update" value="<la:message key="labels.crud_button_update" />"> <em class="fa fa-pencil-alt"></em> <la:message key="labels.crud_button_update"/> </button> </c:if> </div> </div> </div> </div> </la:form> </section> </div> <jsp:include page="/WEB-INF/view/common/admin/footer.jsp"></jsp:include> </div> <jsp:include page="/WEB-INF/view/common/admin/foot.jsp"></jsp:include> <script src="${fe:url('/js/admin/plugins/form-validator/jquery.form-validator.min.js')}" type="text/javascript"></script> <script src="${fe:url('/js/admin/searchlist.js')}" type="text/javascript"></script> </body> </html>
{ "pile_set_name": "Github" }
<?php /* * Copyright 2016 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ /** * The "containers" collection of methods. * Typical usage is: * <code> * $tagmanagerService = new Google_Service_TagManager(...); * $containers = $tagmanagerService->containers; * </code> */ class Google_Service_TagManager_Resource_AccountsContainers extends Google_Service_Resource { /** * Creates a Container. (containers.create) * * @param string $accountId The GTM Account ID. * @param Google_Service_TagManager_Container $postBody * @param array $optParams Optional parameters. * @return Google_Service_TagManager_Container */ public function create($accountId, Google_Service_TagManager_Container $postBody, $optParams = array()) { $params = array('accountId' => $accountId, 'postBody' => $postBody); $params = array_merge($params, $optParams); return $this->call('create', array($params), "Google_Service_TagManager_Container"); } /** * Deletes a Container. (containers.delete) * * @param string $accountId The GTM Account ID. * @param string $containerId The GTM Container ID. * @param array $optParams Optional parameters. */ public function delete($accountId, $containerId, $optParams = array()) { $params = array('accountId' => $accountId, 'containerId' => $containerId); $params = array_merge($params, $optParams); return $this->call('delete', array($params)); } /** * Gets a Container. (containers.get) * * @param string $accountId The GTM Account ID. * @param string $containerId The GTM Container ID. * @param array $optParams Optional parameters. * @return Google_Service_TagManager_Container */ public function get($accountId, $containerId, $optParams = array()) { $params = array('accountId' => $accountId, 'containerId' => $containerId); $params = array_merge($params, $optParams); return $this->call('get', array($params), "Google_Service_TagManager_Container"); } /** * Lists all Containers that belongs to a GTM Account. * (containers.listAccountsContainers) * * @param string $accountId The GTM Account ID. * @param array $optParams Optional parameters. * @return Google_Service_TagManager_ListContainersResponse */ public function listAccountsContainers($accountId, $optParams = array()) { $params = array('accountId' => $accountId); $params = array_merge($params, $optParams); return $this->call('list', array($params), "Google_Service_TagManager_ListContainersResponse"); } /** * Updates a Container. (containers.update) * * @param string $accountId The GTM Account ID. * @param string $containerId The GTM Container ID. * @param Google_Service_TagManager_Container $postBody * @param array $optParams Optional parameters. * * @opt_param string fingerprint When provided, this fingerprint must match the * fingerprint of the container in storage. * @return Google_Service_TagManager_Container */ public function update($accountId, $containerId, Google_Service_TagManager_Container $postBody, $optParams = array()) { $params = array('accountId' => $accountId, 'containerId' => $containerId, 'postBody' => $postBody); $params = array_merge($params, $optParams); return $this->call('update', array($params), "Google_Service_TagManager_Container"); } }
{ "pile_set_name": "Github" }
<!doctype html> <html lang="en"> <head> <title>Code coverage report for lib/spdy-transport/protocol/spdy/</title> <meta charset="utf-8" /> <link rel="stylesheet" href="../../../../prettify.css" /> <link rel="stylesheet" href="../../../../base.css" /> <meta name="viewport" content="width=device-width, initial-scale=1"> <style type='text/css'> .coverage-summary .sorter { background-image: url(../../../../sort-arrow-sprite.png); } </style> </head> <body> <div class='wrapper'> <div class='pad1'> <h1> <a href="../../../../index.html">all files</a> lib/spdy-transport/protocol/spdy/ </h1> <div class='clearfix'> <div class='fl pad1y space-right2'> <span class="strong">90.65% </span> <span class="quiet">Statements</span> <span class='fraction'>475/524</span> </div> <div class='fl pad1y space-right2'> <span class="strong">79.67% </span> <span class="quiet">Branches</span> <span class='fraction'>196/246</span> </div> <div class='fl pad1y space-right2'> <span class="strong">93.24% </span> <span class="quiet">Functions</span> <span class='fraction'>69/74</span> </div> <div class='fl pad1y space-right2'> <span class="strong">91.94% </span> <span class="quiet">Lines</span> <span class='fraction'>468/509</span> </div> </div> </div> <div class='status-line high'></div> <div class="pad1"> <table class="coverage-summary"> <thead> <tr> <th data-col="file" data-fmt="html" data-html="true" class="file">File</th> <th data-col="pic" data-type="number" data-fmt="html" data-html="true" class="pic"></th> <th data-col="statements" data-type="number" data-fmt="pct" class="pct">Statements</th> <th data-col="statements_raw" data-type="number" data-fmt="html" class="abs"></th> <th data-col="branches" data-type="number" data-fmt="pct" class="pct">Branches</th> <th data-col="branches_raw" data-type="number" data-fmt="html" class="abs"></th> <th data-col="functions" data-type="number" data-fmt="pct" class="pct">Functions</th> <th data-col="functions_raw" data-type="number" data-fmt="html" class="abs"></th> <th data-col="lines" data-type="number" data-fmt="pct" class="pct">Lines</th> <th data-col="lines_raw" data-type="number" data-fmt="html" class="abs"></th> </tr> </thead> <tbody><tr> <td class="file high" data-value="constants.js"><a href="constants.js.html">constants.js</a></td> <td data-value="100" class="pic high"><div class="chart"><div class="cover-fill cover-full" style="width: 100%;"></div><div class="cover-empty" style="width:0%;"></div></div></td> <td data-value="100" class="pct high">100%</td> <td data-value="18" class="abs high">18/18</td> <td data-value="100" class="pct high">100%</td> <td data-value="0" class="abs high">0/0</td> <td data-value="100" class="pct high">100%</td> <td data-value="0" class="abs high">0/0</td> <td data-value="100" class="pct high">100%</td> <td data-value="18" class="abs high">18/18</td> </tr> <tr> <td class="file high" data-value="dictionary.js"><a href="dictionary.js.html">dictionary.js</a></td> <td data-value="100" class="pic high"><div class="chart"><div class="cover-fill cover-full" style="width: 100%;"></div><div class="cover-empty" style="width:0%;"></div></div></td> <td data-value="100" class="pct high">100%</td> <td data-value="6" class="abs high">6/6</td> <td data-value="100" class="pct high">100%</td> <td data-value="0" class="abs high">0/0</td> <td data-value="100" class="pct high">100%</td> <td data-value="0" class="abs high">0/0</td> <td data-value="100" class="pct high">100%</td> <td data-value="6" class="abs high">6/6</td> </tr> <tr> <td class="file high" data-value="framer.js"><a href="framer.js.html">framer.js</a></td> <td data-value="91.84" class="pic high"><div class="chart"><div class="cover-fill" style="width: 91%;"></div><div class="cover-empty" style="width:9%;"></div></div></td> <td data-value="91.84" class="pct high">91.84%</td> <td data-value="245" class="abs high">225/245</td> <td data-value="81.55" class="pct high">81.55%</td> <td data-value="103" class="abs high">84/103</td> <td data-value="93.18" class="pct high">93.18%</td> <td data-value="44" class="abs high">41/44</td> <td data-value="92.05" class="pct high">92.05%</td> <td data-value="239" class="abs high">220/239</td> </tr> <tr> <td class="file high" data-value="index.js"><a href="index.js.html">index.js</a></td> <td data-value="100" class="pic high"><div class="chart"><div class="cover-fill cover-full" style="width: 100%;"></div><div class="cover-empty" style="width:0%;"></div></div></td> <td data-value="100" class="pct high">100%</td> <td data-value="6" class="abs high">6/6</td> <td data-value="100" class="pct high">100%</td> <td data-value="0" class="abs high">0/0</td> <td data-value="100" class="pct high">100%</td> <td data-value="0" class="abs high">0/0</td> <td data-value="100" class="pct high">100%</td> <td data-value="6" class="abs high">6/6</td> </tr> <tr> <td class="file high" data-value="parser.js"><a href="parser.js.html">parser.js</a></td> <td data-value="88.05" class="pic high"><div class="chart"><div class="cover-fill" style="width: 88%;"></div><div class="cover-empty" style="width:12%;"></div></div></td> <td data-value="88.05" class="pct high">88.05%</td> <td data-value="226" class="abs high">199/226</td> <td data-value="79.14" class="pct medium">79.14%</td> <td data-value="139" class="abs medium">110/139</td> <td data-value="95.83" class="pct high">95.83%</td> <td data-value="24" class="abs high">23/24</td> <td data-value="90.78" class="pct high">90.78%</td> <td data-value="217" class="abs high">197/217</td> </tr> <tr> <td class="file high" data-value="zlib-pool.js"><a href="zlib-pool.js.html">zlib-pool.js</a></td> <td data-value="91.3" class="pic high"><div class="chart"><div class="cover-fill" style="width: 91%;"></div><div class="cover-empty" style="width:9%;"></div></div></td> <td data-value="91.3" class="pct high">91.3%</td> <td data-value="23" class="abs high">21/23</td> <td data-value="50" class="pct medium">50%</td> <td data-value="4" class="abs medium">2/4</td> <td data-value="83.33" class="pct high">83.33%</td> <td data-value="6" class="abs high">5/6</td> <td data-value="91.3" class="pct high">91.3%</td> <td data-value="23" class="abs high">21/23</td> </tr> </tbody> </table> </div><div class='push'></div><!-- for sticky footer --> </div><!-- /wrapper --> <div class='footer quiet pad2 space-top1 center small'> Code coverage generated by <a href="http://istanbul-js.org/" target="_blank">istanbul</a> at Thu Feb 02 2017 23:24:23 GMT+0000 (WET) </div> </div> <script src="../../../../prettify.js"></script> <script> window.onload = function () { if (typeof prettyPrint === 'function') { prettyPrint(); } }; </script> <script src="../../../../sorter.js"></script> </body> </html>
{ "pile_set_name": "Github" }
/* * C# Version Ported by Matt Bettcher and Ian Qvist 2009-2010 * * Original C++ Version Copyright (c) 2007 Eric Jordan * * This software is provided 'as-is', without any express or implied * warranty. In no event will the authors be held liable for any damages * arising from the use of this software. * Permission is granted to anyone to use this software for any purpose, * including commercial applications, and to alter it and redistribute it * freely, subject to the following restrictions: * 1. The origin of this software must not be misrepresented; you must not * claim that you wrote the original software. If you use this software * in a product, an acknowledgment in the product documentation would be * appreciated but is not required. * 2. Altered source versions must be plainly marked as such, and must not be * misrepresented as being the original software. * 3. This notice may not be removed or altered from any source distribution. */ using System; using System.Collections.Generic; using System.Diagnostics; using Microsoft.Xna.Framework; namespace FarseerPhysics.Common.Decomposition { /// <summary> /// Convex decomposition algorithm using ear clipping /// /// Properties: /// - Only works on simple polygons. /// - Does not support holes. /// - Running time is O(n^2), n = number of vertices. /// /// Source: http://www.ewjordan.com/earClip/ /// </summary> internal static class EarclipDecomposer { //box2D rev 32 - for details, see http://www.box2d.org/forum/viewtopic.php?f=4&t=83&start=50 /// <summary> /// Decompose the polygon into several smaller non-concave polygon. /// Each resulting polygon will have no more than Settings.MaxPolygonVertices vertices. /// </summary> /// <param name="vertices">The vertices.</param> /// <param name="tolerance">The tolerance.</param> public static List<Vertices> ConvexPartition(Vertices vertices, float tolerance = 0.001f) { Debug.Assert(vertices.Count > 3); Debug.Assert(!vertices.IsCounterClockWise()); return TriangulatePolygon(vertices, tolerance); } /// <summary> /// Triangulates a polygon using simple ear-clipping algorithm. Returns /// size of Triangle array unless the polygon can't be triangulated. /// This should only happen if the polygon self-intersects, /// though it will not _always_ return null for a bad polygon - it is the /// caller's responsibility to check for self-intersection, and if it /// doesn't, it should at least check that the return value is non-null /// before using. You're warned! /// /// Triangles may be degenerate, especially if you have identical points /// in the input to the algorithm. Check this before you use them. /// /// This is totally unoptimized, so for large polygons it should not be part /// of the simulation loop. /// </summary> /// <remarks> /// Only works on simple polygons. /// </remarks> static List<Vertices> TriangulatePolygon(Vertices vertices, float tolerance) { //FPE note: Check is needed as invalid triangles can be returned in recursive calls. if (vertices.Count < 3) return new List<Vertices>(); var results = new List<Vertices>(); //Recurse and split on pinch points Vertices pA, pB; var pin = new Vertices(vertices); if (ResolvePinchPoint(pin, out pA, out pB, tolerance)) { var mergeA = TriangulatePolygon(pA, tolerance); var mergeB = TriangulatePolygon(pB, tolerance); if (mergeA.Count == -1 || mergeB.Count == -1) throw new Exception("Can't triangulate your polygon."); for (int i = 0; i < mergeA.Count; ++i) results.Add(new Vertices(mergeA[i])); for (int i = 0; i < mergeB.Count; ++i) results.Add(new Vertices(mergeB[i])); return results; } var buffer = new Vertices[vertices.Count - 2]; var bufferSize = 0; var xrem = new float[vertices.Count]; var yrem = new float[vertices.Count]; for (int i = 0; i < vertices.Count; ++i) { xrem[i] = vertices[i].X; yrem[i] = vertices[i].Y; } var vNum = vertices.Count; while (vNum > 3) { // Find an ear var earIndex = -1; var earMaxMinCross = -10.0f; for (int i = 0; i < vNum; ++i) { if (IsEar(i, xrem, yrem, vNum)) { var lower = Remainder(i - 1, vNum); var upper = Remainder(i + 1, vNum); var d1 = new Vector2(xrem[upper] - xrem[i], yrem[upper] - yrem[i]); var d2 = new Vector2(xrem[i] - xrem[lower], yrem[i] - yrem[lower]); var d3 = new Vector2(xrem[lower] - xrem[upper], yrem[lower] - yrem[upper]); Nez.Vector2Ext.Normalize(ref d1); Nez.Vector2Ext.Normalize(ref d2); Nez.Vector2Ext.Normalize(ref d3); float cross12; MathUtils.Cross(ref d1, ref d2, out cross12); cross12 = Math.Abs(cross12); float cross23; MathUtils.Cross(ref d2, ref d3, out cross23); cross23 = Math.Abs(cross23); float cross31; MathUtils.Cross(ref d3, ref d1, out cross31); cross31 = Math.Abs(cross31); //Find the maximum minimum angle float minCross = Math.Min(cross12, Math.Min(cross23, cross31)); if (minCross > earMaxMinCross) { earIndex = i; earMaxMinCross = minCross; } } } // If we still haven't found an ear, we're screwed. // Note: sometimes this is happening because the // remaining points are collinear. Really these // should just be thrown out without halting triangulation. if (earIndex == -1) { for (int i = 0; i < bufferSize; i++) results.Add(buffer[i]); return results; } // Clip off the ear: // - remove the ear tip from the list --vNum; float[] newx = new float[vNum]; float[] newy = new float[vNum]; int currDest = 0; for (int i = 0; i < vNum; ++i) { if (currDest == earIndex) ++currDest; newx[i] = xrem[currDest]; newy[i] = yrem[currDest]; ++currDest; } // - add the clipped triangle to the triangle list int under = (earIndex == 0) ? (vNum) : (earIndex - 1); int over = (earIndex == vNum) ? 0 : (earIndex + 1); var toAdd = new Triangle(xrem[earIndex], yrem[earIndex], xrem[over], yrem[over], xrem[under], yrem[under]); buffer[bufferSize] = toAdd; ++bufferSize; // - replace the old list with the new one xrem = newx; yrem = newy; } var tooAdd = new Triangle(xrem[1], yrem[1], xrem[2], yrem[2], xrem[0], yrem[0]); buffer[bufferSize] = tooAdd; ++bufferSize; for (int i = 0; i < bufferSize; i++) results.Add(new Vertices(buffer[i])); return results; } /// <summary> /// Finds and fixes "pinch points," points where two polygon /// vertices are at the same point. /// /// If a pinch point is found, pin is broken up into poutA and poutB /// and true is returned; otherwise, returns false. /// /// Mostly for internal use. /// /// O(N^2) time, which sucks... /// </summary> /// <param name="pin">The pin.</param> /// <param name="poutA">The pout A.</param> /// <param name="poutB">The pout B.</param> /// <param name="tolerance"></param> static bool ResolvePinchPoint(Vertices pin, out Vertices poutA, out Vertices poutB, float tolerance) { poutA = new Vertices(); poutB = new Vertices(); if (pin.Count < 3) return false; bool hasPinchPoint = false; int pinchIndexA = -1; int pinchIndexB = -1; for (int i = 0; i < pin.Count; ++i) { for (int j = i + 1; j < pin.Count; ++j) { //Don't worry about pinch points where the points //are actually just dupe neighbors if (Math.Abs(pin[i].X - pin[j].X) < tolerance && Math.Abs(pin[i].Y - pin[j].Y) < tolerance && j != i + 1) { pinchIndexA = i; pinchIndexB = j; hasPinchPoint = true; break; } } if (hasPinchPoint) break; } if (hasPinchPoint) { int sizeA = pinchIndexB - pinchIndexA; if (sizeA == pin.Count) return false; //has dupe points at wraparound, not a problem here for (int i = 0; i < sizeA; ++i) { int ind = Remainder(pinchIndexA + i, pin.Count); // is this right poutA.Add(pin[ind]); } int sizeB = pin.Count - sizeA; for (int i = 0; i < sizeB; ++i) { int ind = Remainder(pinchIndexB + i, pin.Count); // is this right poutB.Add(pin[ind]); } } return hasPinchPoint; } /// <summary> /// Fix for obnoxious behavior for the % operator for negative numbers... /// </summary> /// <param name="x">The x.</param> /// <param name="modulus">The modulus.</param> /// <returns></returns> static int Remainder(int x, int modulus) { int rem = x % modulus; while (rem < 0) { rem += modulus; } return rem; } /// <summary> /// Checks if vertex i is the tip of an ear in polygon defined by xv[] and yv[]. /// </summary> /// <param name="i">The i.</param> /// <param name="xv">The xv.</param> /// <param name="yv">The yv.</param> /// <param name="xvLength">Length of the xv.</param> /// <remarks> /// Assumes clockwise orientation of polygon. /// </remarks> /// <returns> /// <c>true</c> if the specified i is ear; otherwise, <c>false</c>. /// </returns> static bool IsEar(int i, float[] xv, float[] yv, int xvLength) { float dx0, dy0, dx1, dy1; if (i >= xvLength || i < 0 || xvLength < 3) { return false; } int upper = i + 1; int lower = i - 1; if (i == 0) { dx0 = xv[0] - xv[xvLength - 1]; dy0 = yv[0] - yv[xvLength - 1]; dx1 = xv[1] - xv[0]; dy1 = yv[1] - yv[0]; lower = xvLength - 1; } else if (i == xvLength - 1) { dx0 = xv[i] - xv[i - 1]; dy0 = yv[i] - yv[i - 1]; dx1 = xv[0] - xv[i]; dy1 = yv[0] - yv[i]; upper = 0; } else { dx0 = xv[i] - xv[i - 1]; dy0 = yv[i] - yv[i - 1]; dx1 = xv[i + 1] - xv[i]; dy1 = yv[i + 1] - yv[i]; } float cross = dx0 * dy1 - dx1 * dy0; if (cross > 0) return false; var myTri = new Triangle(xv[i], yv[i], xv[upper], yv[upper], xv[lower], yv[lower]); for (int j = 0; j < xvLength; ++j) { if (j == i || j == lower || j == upper) continue; if (myTri.IsInside(xv[j], yv[j])) return false; } return true; } class Triangle : Vertices { //Constructor automatically fixes orientation to ccw public Triangle(float x1, float y1, float x2, float y2, float x3, float y3) { float cross = (x2 - x1) * (y3 - y1) - (x3 - x1) * (y2 - y1); if (cross > 0) { Add(new Vector2(x1, y1)); Add(new Vector2(x2, y2)); Add(new Vector2(x3, y3)); } else { Add(new Vector2(x1, y1)); Add(new Vector2(x3, y3)); Add(new Vector2(x2, y2)); } } public bool IsInside(float x, float y) { Vector2 a = this[0]; Vector2 b = this[1]; Vector2 c = this[2]; if (x < a.X && x < b.X && x < c.X) return false; if (x > a.X && x > b.X && x > c.X) return false; if (y < a.Y && y < b.Y && y < c.Y) return false; if (y > a.Y && y > b.Y && y > c.Y) return false; float vx2 = x - a.X; float vy2 = y - a.Y; float vx1 = b.X - a.X; float vy1 = b.Y - a.Y; float vx0 = c.X - a.X; float vy0 = c.Y - a.Y; float dot00 = vx0 * vx0 + vy0 * vy0; float dot01 = vx0 * vx1 + vy0 * vy1; float dot02 = vx0 * vx2 + vy0 * vy2; float dot11 = vx1 * vx1 + vy1 * vy1; float dot12 = vx1 * vx2 + vy1 * vy2; float invDenom = 1.0f / (dot00 * dot11 - dot01 * dot01); float u = (dot11 * dot02 - dot01 * dot12) * invDenom; float v = (dot00 * dot12 - dot01 * dot02) * invDenom; return ((u > 0) && (v > 0) && (u + v < 1)); } } } }
{ "pile_set_name": "Github" }
package xiaofei.library.hermestest; import android.content.Context; import xiaofei.library.hermes.annotation.ClassId; /** * Created by Xiaofei on 16/4/28. */ @ClassId("FileUtils") public interface IFileUtils { String getExternalCacheDir(Context context); }
{ "pile_set_name": "Github" }
print(1 < 2 < 3) print(1 < 2 < 3 < 4) print(1 > 2 < 3) print(1 < 2 > 3)
{ "pile_set_name": "Github" }
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"> <html xmlns="http://www.w3.org/1999/xhtml"> <head> <title>CSS Writing Modes Test: absolutely positioned non-replaced element - 'direction: ltr' and 'width' is 'auto' and 'left' and 'right' are not 'auto'</title> <link rel="author" title="Gérard Talbot" href="http://www.gtalbot.org/BrowserBugsSection/css21testsuite/" /> <link rel="help" href="http://www.w3.org/TR/css-writing-modes-3/#vertical-layout" title="7.1 Principles of Layout in Vertical Writing Modes" /> <link rel="help" href="http://www.w3.org/TR/CSS21/visudet.html#abs-non-replaced-height" title="10.6.4 Absolutely positioned, non-replaced elements" /> <link rel="match" href="abs-pos-non-replaced-vlr-007-ref.xht" /> <meta name="flags" content="ahem image" /> <meta name="assert" content="When 'direction' is 'ltr' and 'width' is 'auto' and 'left' and 'right' are not 'auto' and 'writing-mode' is 'vertical-lr', then solve for 'width'." /> <style type="text/css"><![CDATA[ @font-face { font-family: Ahem; src: url("../../../fonts/Ahem.ttf"); } ]]></style> <style type="text/css"><![CDATA[ html { writing-mode: vertical-lr; } div#containing-block { background: red url("support/bg-red-2col-3row-320x320.png"); color: transparent; direction: ltr; font: 80px/1 Ahem; height: 320px; position: relative; width: 320px; } div#containing-block > span { background-color: green; height: 1em; left: 1em; position: absolute; right: 2em; width: auto; } /* " Layout calculation rules (such as those in CSS2.1, Section 10.3) that apply to the horizontal dimension in horizontal writing modes instead apply to the vertical dimension in vertical writing modes. " 7.1 Principles of Layout in Vertical Writing Modes http://www.w3.org/TR/css-writing-modes-3/#vertical-layout So here, *left properties and *right properties are input into the §10.6.4 algorithms where *left properties refer to *top properties in the layout rules and where *right properties refer to *bottom properties in the layout rules. " 5. 'height' is 'auto', 'top' and 'bottom' are not 'auto', then 'auto' values for 'margin-top' and 'margin-bottom' are set to 0 and solve for 'height' " 'left' + 'margin-left' + 'border-left-width' + 'padding-left' + 'width' + 'padding-right' + 'border-right-width' + 'margin-right' + 'right' = width of containing block So: 160px : right + 0px : margin-right + 0px : border-right-width + 0px : padding-right + (solve) : width: auto + 0px : padding-left + 0px : border-left-width + 0px : margin-left + 80px : left ===================== 320px : width of containing block And so computed width value must be 80px . */ ]]></style> </head> <body> <p><img src="support/pass-cdts-abs-pos-non-replaced.png" width="246" height="36" alt="Image download support must be enabled" /></p> <div id="containing-block">1 2 34<span></span></div> </body> </html>
{ "pile_set_name": "Github" }
{ }
{ "pile_set_name": "Github" }