text
stringlengths 2
1.04M
| meta
dict |
---|---|
package zendesk.belvedere;
import android.annotation.SuppressLint;
import android.content.ContentResolver;
import android.content.Context;
import android.database.Cursor;
import android.os.Build;
import android.os.Bundle;
import android.provider.MediaStore;
import androidx.annotation.Nullable;
import java.util.Locale;
import static android.content.ContentResolver.QUERY_SORT_DIRECTION_DESCENDING;
class ImageStreamCursorProvider {
final static String[] PROJECTION = new String[]{
MediaStore.Images.ImageColumns._ID,
MediaStore.MediaColumns.DISPLAY_NAME,
MediaStore.MediaColumns.SIZE,
MediaStore.MediaColumns.WIDTH,
MediaStore.MediaColumns.HEIGHT
};
private final Context context;
private final int currentApiLevel;
/**
* Constructs an ImageStreamCursorProvider
*
* @param context A valid context
* @param currentApiLevel The API level of the device running this software
*/
ImageStreamCursorProvider(Context context, int currentApiLevel) {
this.context = context;
this.currentApiLevel = currentApiLevel;
}
/**
* Gets a cursor containing the maximum number of images to request. Can return null.
*
* @param count The maximum number of images to request
* @return A Cursor containing the images, or null
*/
@SuppressLint("NewApi")
@Nullable Cursor getCursor(int count) {
if (context == null) {
return null;
}
final String orderColumn = getOrderColumn();
Cursor cursor;
if (currentApiLevel >= Build.VERSION_CODES.O) {
Bundle queryArgs = new Bundle();
queryArgs.putInt(ContentResolver.QUERY_ARG_LIMIT, count);
queryArgs.putStringArray(ContentResolver.QUERY_ARG_SORT_COLUMNS, new String[]{orderColumn});
queryArgs.putInt(ContentResolver.QUERY_ARG_SORT_DIRECTION, QUERY_SORT_DIRECTION_DESCENDING);
cursor = context.getContentResolver().query(
MediaStore.Images.Media.EXTERNAL_CONTENT_URI,
PROJECTION,
queryArgs,
null);
} else {
final String order =
String.format(Locale.US, "%s DESC LIMIT %s", orderColumn, count);
cursor = context.getContentResolver().query(
MediaStore.Images.Media.EXTERNAL_CONTENT_URI,
PROJECTION,
null,
null,
order);
}
return cursor;
}
@SuppressLint("InlinedApi")
String getOrderColumn() {
return currentApiLevel >= Build.VERSION_CODES.Q
? MediaStore.Images.ImageColumns.DATE_TAKEN
: MediaStore.Images.ImageColumns.DATE_MODIFIED;
}
}
| {
"content_hash": "65a56f69ddfed0251e44f89788fc4bdb",
"timestamp": "",
"source": "github",
"line_count": 90,
"max_line_length": 104,
"avg_line_length": 31.666666666666668,
"alnum_prop": 0.632280701754386,
"repo_name": "zendesk/belvedere",
"id": "a0ee345dcc20504282d5a642b9869f8c83624b84",
"size": "2850",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "belvedere/src/main/java/zendesk/belvedere/ImageStreamCursorProvider.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "200931"
},
{
"name": "Kotlin",
"bytes": "16669"
},
{
"name": "Shell",
"bytes": "2045"
}
],
"symlink_target": ""
} |
"""Tests for `meta_dataset.models.experimental.reparameterizable_base`."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import itertools
from absl.testing import parameterized
from meta_dataset import test_utils
from meta_dataset.models.experimental import reparameterizable_base
import numpy as np
import tensorflow.compat.v1 as tf
tf.disable_eager_execution()
tf.config.experimental_run_functions_eagerly(True)
class ReparameterizableDense(reparameterizable_base.ReparameterizableModule,
tf.keras.layers.Dense):
pass
class ReparameterizableConv2D(reparameterizable_base.ReparameterizableModule,
tf.keras.layers.Conv2D):
pass
class ReparameterizableBatchNormalization(
reparameterizable_base.ReparameterizableModule,
tf.keras.layers.BatchNormalization):
pass
REPARAMETERIZABLE_MODULES = (
ReparameterizableDense,
ReparameterizableConv2D,
ReparameterizableBatchNormalization,
)
VARIABLE_REPARAMETERIZING_PREDICATES = (
# TODO(eringrant): Add `reparameterizable_base.is_variable` as an option
# here once the behavior of `ReparameterizableBatchNormalization` is
# smoothed out; currently, the layer attempts to update the reparameterized
# moving mean and moving variances, which causes the tests to fail.
reparameterizable_base.is_trainable_variable,)
MODULE_REPARAMETERIZING_PREDICATES = (
reparameterizable_base.is_batch_norm_module,)
VALID_MODULE_INIT_ARGS = {
'units': (12,),
'activation': (tf.nn.relu, tf.nn.tanh, None),
'kernel_initializer': ('ones',),
'use_bias': (True, False),
'num_filters': (32,),
'filters': (32,),
'kernel_size': (32,),
}
VALID_MODULE_CALL_ARGS = {}
VALID_MODULE_INPUT_SHAPE = {
ReparameterizableDense: (1, 32),
ReparameterizableConv2D: (1, 32, 32, 3),
ReparameterizableBatchNormalization: (1, 32),
}
# Transformations with constant Jacobian (e.g., linear transforms) should
# produce the same output gradient when variables are replaced.
# Note that transforms in this list may take an `activation` parameter
# that when set to a non-linearity such as `tf.nn.relu`, induces a non-constant
# Jacobian.
CONSTANT_JACOBIAN_TRANSFORMS = (
ReparameterizableDense,
ReparameterizableConv2D,
ReparameterizableBatchNormalization,
)
def has_constant_jacobian(layer):
return (any(isinstance(layer, cls) for cls in CONSTANT_JACOBIAN_TRANSFORMS)
and (not hasattr(layer, 'activation') or layer.activation is None or
'linear' in str(layer.activation)))
def _get_module_predicate_test_cases(module_cls, variable_predicate,
module_predicate, valid_module_init_args,
valid_module_call_args):
"""Return parameters of tests for `module_cls` with the given arguments."""
test_cases = []
for valid_init_kwargs, valid_call_kwargs in test_utils.get_valid_kwargs(
module_cls, valid_module_init_args, valid_module_call_args):
test_cases += [
dict((
('testcase_name', '{}_{}_{}_{}_{}'.format(
str(module_cls),
str(variable_predicate),
str(module_predicate),
str(valid_init_kwargs),
str(valid_call_kwargs),
)),
('module_cls', module_cls),
('variable_reparameterizing_predicate', variable_predicate),
('module_reparameterizing_predicate', module_predicate),
('module_init_kwargs', valid_init_kwargs),
('module_call_kwargs', valid_call_kwargs),
))
]
return test_cases
def get_module_test_cases(reparameterizable_modules=None,
module_reparameterizing_predicates=None,
variable_reparameterizing_predicates=None,
valid_module_init_args=None,
valid_module_call_args=None):
"""Return test parameters for `reparameterizable_modules` and predicates."""
if reparameterizable_modules is None:
reparameterizable_modules = REPARAMETERIZABLE_MODULES
if variable_reparameterizing_predicates is None:
variable_reparameterizing_predicates = VARIABLE_REPARAMETERIZING_PREDICATES
if module_reparameterizing_predicates is None:
module_reparameterizing_predicates = MODULE_REPARAMETERIZING_PREDICATES
if valid_module_init_args is None:
valid_module_init_args = VALID_MODULE_INIT_ARGS
if valid_module_call_args is None:
valid_module_call_args = VALID_MODULE_CALL_ARGS
test_cases = []
for variable_predicate, module_predicate in itertools.product(
variable_reparameterizing_predicates,
(*module_reparameterizing_predicates, None)):
if variable_predicate is None and module_predicate is None:
continue
for module_cls in reparameterizable_modules:
test_cases += _get_module_predicate_test_cases(module_cls,
variable_predicate,
module_predicate,
valid_module_init_args,
valid_module_call_args)
return test_cases
def _randomized_variables(variables):
# `variables` may contain duplicates due to the way `tf.Module._flatten`
# works (in particular, because a `tf.Variable` may be referenced by more
# than one attribute of a `tf.Module`. We ensure that only as many variables
# are generated as there are unique elements in `variables` by iterating over
# `set(variables)`.
variable_set = set(v.ref() for v in variables)
def rv(size):
return np.random.normal(scale=.01, size=size).astype(np.float32)
randomized = dict((v_ref, tf.Variable(rv(size=v_ref.deref().shape.as_list())))
for v_ref in variable_set)
return tuple(randomized[v.ref()] for v in variables)
def get_params_and_replacements(module, variable_predicate, module_predicate):
paths, variables = zip(*module.reparameterizables(
variable_predicate=variable_predicate,
module_predicate=module_predicate,
with_path=True))
replacement_variables = _randomized_variables(variables)
return paths, variables, replacement_variables
def _init_module(module_cls, module_init_kwargs):
"""Initialize and build a `module_cls` instance with `module_init_kwargs`."""
module = module_cls(**module_init_kwargs)
if hasattr(module, 'built'): # for e.g., Keras modules
module.build(VALID_MODULE_INPUT_SHAPE[module_cls])
return module
def _init_reference_module(module_cls, module_init_kwargs, paths, variables):
"""Create a mock `module_cls` instance with `variables` as attributes."""
reference_module = _init_module(module_cls, module_init_kwargs)
# Manually set attributes of this module via `getattr` and `setattr`.
for path, variable in zip(paths, variables):
descoped_module = reparameterizable_base.chained_getattr(
reference_module, path[:-1])
reparameterizable_base.corner_case_setattr(descoped_module, path[-1],
variable)
return reference_module
def _setup_modules(module_cls, variable_reparameterizing_predicate,
module_reparameterizing_predicate, module_init_kwargs):
"""Return `module_cls` instances for reparameterization and for reference."""
# Module to be tested.
module_to_reparameterize = _init_module(module_cls, module_init_kwargs)
# Replacement parameters.
paths, variables, replacement_variables = get_params_and_replacements(
module_to_reparameterize,
variable_reparameterizing_predicate,
module_reparameterizing_predicate,
)
# Reference modules.
before_reference_module = _init_reference_module(module_cls,
module_init_kwargs, paths,
variables)
after_reference_module = _init_reference_module(module_cls,
module_init_kwargs, paths,
replacement_variables)
return (
module_to_reparameterize,
before_reference_module,
after_reference_module,
variables,
replacement_variables,
)
# TODO(eringrant): Implement the following tests:
# - test that the correct Tensors are being accessed via `reparameterizables`.
# - add tests to include exact verification of results in simple
# cases (e.g., computing gradients with numpy for linear regression).
# - add gradient correctness check via finite differences.
class TestReparameterizableModule(tf.test.TestCase, parameterized.TestCase):
@parameterized.named_parameters(*get_module_test_cases())
def test_swap_and_revert_parameters(
self,
module_cls,
variable_reparameterizing_predicate,
module_reparameterizing_predicate,
module_init_kwargs,
module_call_kwargs,
):
try:
(module_to_reparameterize, before_reference_module,
after_reference_module, variables,
replacement_variables) = _setup_modules(
module_cls, variable_reparameterizing_predicate,
module_reparameterizing_predicate, module_init_kwargs)
except ValueError:
# TODO(eringrant): Assert that no variables are returned only in expected
# cases.
return
# Random inputs.
input_shape = VALID_MODULE_INPUT_SHAPE[module_cls]
inputs = tf.cast(np.random.normal(size=input_shape), tf.float32)
# Reference outputs.
reference_before_result = before_reference_module(inputs)
reference_after_result = after_reference_module(inputs)
# The output should conform before variable replacement.
before_reparameterization_result = module_to_reparameterize(inputs)
replacement_map = dict(
(v1.ref(), v2) for v1, v2 in zip(variables, replacement_variables))
with module_to_reparameterize.reparameterize(replacement_map):
# The output should conform after variable replacement.
after_reparameterization_result = module_to_reparameterize(inputs)
# The output should conform after variable reversion.
after_reversion_result = module_to_reparameterize(inputs)
with self.session(use_gpu=True) as sess:
self.evaluate(tf.global_variables_initializer())
# For reference.
reference_before_value = sess.run(reference_before_result)
reference_after_value = sess.run(reference_after_result)
# For testing.
(
before_reparameterization_value,
after_reparameterization_value,
after_reversion_value,
) = sess.run([
before_reparameterization_result,
after_reparameterization_result,
after_reversion_result,
])
# The outputs should differ (by a transformation defined by the module).
# Note this does not check that the transformation is correct.
self.assertNotAllClose(reference_before_value, reference_after_value)
# The output should conform before variable replacement.
self.assertAllEqual(before_reparameterization_value,
reference_before_value)
# The output should conform after variable replacement.
self.assertAllEqual(after_reparameterization_value, reference_after_value)
# The output should conform after variable reversion.
self.assertAllEqual(after_reversion_value, reference_before_value)
@parameterized.named_parameters(*get_module_test_cases())
def test_weight_gradients_after_swap_and_revert(
self, module_cls, variable_reparameterizing_predicate,
module_reparameterizing_predicate, module_init_kwargs,
module_call_kwargs):
try:
(module_to_reparameterize, before_reference_module,
after_reference_module, variables,
replacement_variables) = _setup_modules(
module_cls, variable_reparameterizing_predicate,
module_reparameterizing_predicate, module_init_kwargs)
except ValueError:
# TODO(eringrant): Assert that no variables are returned only in expected
# cases.
return
# Random inputs.
input_shape = VALID_MODULE_INPUT_SHAPE[module_cls]
inputs = tf.cast(np.random.normal(size=input_shape), tf.float32)
# Reference outputs.
reference_before_result = tf.gradients(
before_reference_module(inputs), variables)
reference_after_result = tf.gradients(
after_reference_module(inputs), replacement_variables)
replacement_map = dict(
(v1.ref(), v2) for v1, v2 in zip(variables, replacement_variables))
results = {
'before': {},
'after': {},
'none': {},
}
before_outputs = module_to_reparameterize(inputs)
results['before']['before_replacement_outside_context'] = tf.gradients(
before_outputs, variables)
results['none']['before_replacement_outside_context'] = tf.gradients(
before_outputs, replacement_variables)
with module_to_reparameterize.reparameterize(replacement_map):
results['before']['before_replacement_inside_context'] = tf.gradients(
before_outputs, variables)
results['none']['before_replacement_inside_context'] = tf.gradients(
before_outputs, replacement_variables)
during_outputs = module_to_reparameterize(inputs)
results['none']['during_replacement_inside_context'] = tf.gradients(
during_outputs, variables)
results['after']['during_replacement_inside_context'] = tf.gradients(
during_outputs, replacement_variables)
results['before']['before_replacement_outside_context2'] = tf.gradients(
before_outputs, variables)
results['none']['before_replacement_outside_context2'] = tf.gradients(
before_outputs, replacement_variables)
results['none']['during_replacement_outside_context'] = tf.gradients(
during_outputs, variables)
results['after']['during_replacement_outside_context'] = tf.gradients(
during_outputs, replacement_variables)
after_outputs = module_to_reparameterize(inputs)
results['before']['after_replacement_outside_context'] = tf.gradients(
after_outputs, variables)
results['none']['after_replacement_outside_context'] = tf.gradients(
after_outputs, replacement_variables)
for context in results['none']:
for x in results['none'][context]:
self.assertIsNone(x)
del results['none'] # Don't try to fetch Nones.
with self.session(use_gpu=True) as sess:
self.evaluate(tf.global_variables_initializer())
# For reference.
before_value, after_value, values = sess.run(
(reference_before_result, reference_after_result, results))
if has_constant_jacobian(module_to_reparameterize):
# The gradients should be the same.
self.assertAllClose(before_value, after_value)
else:
# The gradients should differ.
self.assertNotAllClose(before_value, after_value)
# The gradients should conform before variable replacement.
for grads in values['before']:
for grad, grad_ref in zip(results['before'][grads],
reference_before_result):
self.assertAllClose(grad, grad_ref, rtol=1e-05)
# The gradients should conform after variable replacement.
for grads in values['after']:
for grad, grad_ref in zip(results['after'][grads],
reference_after_result):
self.assertAllClose(grad, grad_ref, rtol=1e-05)
@parameterized.named_parameters(*get_module_test_cases())
def test_input_gradients_after_swap_and_revert(
self, module_cls, variable_reparameterizing_predicate,
module_reparameterizing_predicate, module_init_kwargs,
module_call_kwargs):
try:
(module_to_reparameterize, before_reference_module,
after_reference_module, variables,
replacement_variables) = _setup_modules(
module_cls, variable_reparameterizing_predicate,
module_reparameterizing_predicate, module_init_kwargs)
except ValueError:
# TODO(eringrant): Assert that no variables are returned only in expected
# cases.
return
# Random inputs.
input_shape = VALID_MODULE_INPUT_SHAPE[module_cls]
inputs = tf.cast(np.random.normal(size=input_shape), tf.float32)
# Reference outputs.
reference_before_result = tf.gradients(
before_reference_module(inputs), inputs)
reference_after_result = tf.gradients(
after_reference_module(inputs), inputs)
replacement_map = dict(
(v1.ref(), v2) for v1, v2 in zip(variables, replacement_variables))
results = {
'before': {},
'after': {},
}
before_outputs = module_to_reparameterize(inputs)
results['before']['before_replacement_outside_context'] = tf.gradients(
before_outputs, inputs)
with module_to_reparameterize.reparameterize(replacement_map):
results['before']['before_replacement_inside_context'] = tf.gradients(
before_outputs, inputs)
during_outputs = module_to_reparameterize(inputs)
results['after']['during_replacement_inside_context'] = tf.gradients(
during_outputs, inputs)
results['before']['before_replacement_outside_context2'] = tf.gradients(
before_outputs, inputs)
results['after']['during_replacement_outside_context'] = tf.gradients(
during_outputs, inputs)
after_outputs = module_to_reparameterize(inputs)
results['before']['after_replacement_outside_context'] = tf.gradients(
after_outputs, inputs)
with self.session(use_gpu=True) as sess:
self.evaluate(tf.global_variables_initializer())
# For reference.
before_value, after_value, values = sess.run(
(reference_before_result, reference_after_result, results))
# The input gradients should differ because the weights have changed.
self.assertNotAllClose(before_value, after_value)
# The gradients should conform before variable replacement.
for grads in values['before']:
for grad, grad_ref in zip(results['before'][grads],
reference_before_result):
self.assertAllClose(grad, grad_ref, rtol=1e-06)
# The gradients should conform after variable replacement.
for grads in values['after']:
for grad, grad_ref in zip(results['after'][grads],
reference_after_result):
self.assertAllClose(grad, grad_ref, rtol=1e-06)
if __name__ == '__main__':
tf.test.main()
| {
"content_hash": "115a0245b5ecee584c72ba5d7f42076b",
"timestamp": "",
"source": "github",
"line_count": 487,
"max_line_length": 80,
"avg_line_length": 38.56673511293634,
"alnum_prop": 0.6723458630603769,
"repo_name": "google-research/meta-dataset",
"id": "6457d98f880458d16e0fc6d579261c9e8c522fe4",
"size": "19406",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "meta_dataset/models/experimental/reparameterizable_base_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "391025"
},
{
"name": "Python",
"bytes": "1055970"
}
],
"symlink_target": ""
} |
package com.kumuluz.ee.jpa.common.jta;
import javax.persistence.EntityManager;
/**
* @author Tilen Faganel
* @since 2.4.0
*/
public class NonTxEntityManagerHolder {
private EntityManager em;
public EntityManager getEntityManager() {
return em;
}
public void setEntityManager(EntityManager em) {
this.em = em;
}
}
| {
"content_hash": "cacf91f0ca86ab7d9d53973bf26a14f1",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 52,
"avg_line_length": 17.047619047619047,
"alnum_prop": 0.6731843575418994,
"repo_name": "kumuluz/kumuluzee",
"id": "9b45586052dacc61705c64482e713dc2414da9e3",
"size": "1317",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "components/jpa/common/src/main/java/com/kumuluz/ee/jpa/common/jta/NonTxEntityManagerHolder.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "621378"
}
],
"symlink_target": ""
} |
package com.didichuxing.doraemonkit.weex.info;
import android.os.Bundle;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import android.view.View;
import android.widget.ListView;
import com.didichuxing.doraemonkit.kit.core.BaseFragment;
import com.didichuxing.doraemonkit.widget.titlebar.HomeTitleBar;
import com.didichuxing.doraemonkit.weex.R;
/**
* @author haojianglong
* @date 2019-06-18
*/
public class WeexInfoFragment extends BaseFragment {
private WeexInfoAdapter mAdapter;
@Override
protected int onRequestLayout() {
return R.layout.dk_fragment_info;
}
@Override
public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
initView();
}
private void initView() {
HomeTitleBar titleBar = findViewById(R.id.title_bar);
titleBar.setListener(new HomeTitleBar.OnTitleBarClickListener() {
@Override
public void onRightClick() {
getActivity().finish();
}
});
ListView listView = findViewById(R.id.info_list);
mAdapter = new WeexInfoAdapter(getContext());
mAdapter.setWeexInfos(WeexInfoHacker.getWeexInfos());
listView.setAdapter(mAdapter);
}
}
| {
"content_hash": "6709ac5d9b243df84a5f84c15e52c883",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 88,
"avg_line_length": 28.717391304347824,
"alnum_prop": 0.6979560938682816,
"repo_name": "didi/DoraemonKit",
"id": "0b32a3b7575c91aa9d362dd85ce83e3a6719898e",
"size": "1321",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Android/dokit-weex/src/main/java/com/didichuxing/doraemonkit/weex/info/WeexInfoFragment.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "AIDL",
"bytes": "570"
},
{
"name": "C",
"bytes": "31596"
},
{
"name": "C++",
"bytes": "11759"
},
{
"name": "CMake",
"bytes": "1598"
},
{
"name": "Dart",
"bytes": "271190"
},
{
"name": "HTML",
"bytes": "34347"
},
{
"name": "Java",
"bytes": "4256090"
},
{
"name": "JavaScript",
"bytes": "207266"
},
{
"name": "Kotlin",
"bytes": "1450043"
},
{
"name": "Less",
"bytes": "372"
},
{
"name": "Objective-C",
"bytes": "1788521"
},
{
"name": "Objective-C++",
"bytes": "12589"
},
{
"name": "Ruby",
"bytes": "9059"
},
{
"name": "Shell",
"bytes": "10258"
},
{
"name": "Swift",
"bytes": "41938"
},
{
"name": "Vue",
"bytes": "231621"
}
],
"symlink_target": ""
} |
package org.mibew.api;
import java.io.UnsupportedEncodingException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
/**
* @author inspirer
*/
public class Utils {
private static final String HEX_DIGITS = "0123456789abcdef";
public static String md5(String string) throws NoSuchAlgorithmException, UnsupportedEncodingException {
return md5(string.getBytes("utf-8"));
}
private static String md5(byte[] string) throws NoSuchAlgorithmException {
MessageDigest md = MessageDigest.getInstance( "MD5" );
md.update(string);
byte[] digest = md.digest();
StringBuilder sb = new StringBuilder();
for ( byte b : digest ) {
sb.append(HEX_DIGITS.charAt((b&0xff)>>4));
sb.append(HEX_DIGITS.charAt(b&0xf));
}
return sb.toString();
}
}
| {
"content_hash": "f032f28cd1110eff5053aa11132553ef",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 104,
"avg_line_length": 27.551724137931036,
"alnum_prop": 0.7271589486858573,
"repo_name": "Mibew/java",
"id": "ba8595d03700b95adf12afb88b92d5a727b4cdbe",
"size": "799",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/org.mibew.api/src/org/mibew/api/Utils.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C#",
"bytes": "66842"
},
{
"name": "CSS",
"bytes": "34838"
},
{
"name": "Java",
"bytes": "35211"
},
{
"name": "JavaScript",
"bytes": "452597"
},
{
"name": "PHP",
"bytes": "687158"
},
{
"name": "Perl",
"bytes": "10650"
},
{
"name": "Shell",
"bytes": "1453"
}
],
"symlink_target": ""
} |
package org.jetbrains.plugins.groovy.lang.groovydoc.lexer;
import com.intellij.lexer.LexerBase;
import com.intellij.lexer.MergingLexerAdapter;
import com.intellij.psi.tree.IElementType;
import com.intellij.psi.tree.TokenSet;
import com.intellij.util.text.CharArrayUtil;
import java.io.IOException;
/**
* @author ilyas
*/
public class GroovyDocLexer extends MergingLexerAdapter implements GroovyDocTokenTypes {
private static final TokenSet TOKENS_TO_MERGE = TokenSet.create(
mGDOC_COMMENT_DATA,
mGDOC_WHITESPACE
);
public GroovyDocLexer() {
super(new AsteriskStripperLexer(new _GroovyDocLexer()),
TOKENS_TO_MERGE);
}
private static class AsteriskStripperLexer extends LexerBase {
private final _GroovyDocLexer myFlexLexer;
private CharSequence myBuffer;
private int myBufferIndex;
private int myBufferEndOffset;
private int myTokenEndOffset;
private int myState;
private IElementType myTokenType;
private boolean myAfterLineBreak;
private boolean myInLeadingSpace;
public AsteriskStripperLexer(final _GroovyDocLexer lexer) {
myFlexLexer = lexer;
}
public final void start(CharSequence buffer, int startOffset, int endOffset, int initialState) {
myBuffer = buffer;
myBufferIndex = startOffset;
myBufferEndOffset = endOffset;
myTokenType = null;
myTokenEndOffset = startOffset;
myFlexLexer.reset(myBuffer, startOffset, endOffset, initialState);
}
public int getState() {
return myState;
}
public CharSequence getBufferSequence() {
return myBuffer;
}
public int getBufferEnd() {
return myBufferEndOffset;
}
public final IElementType getTokenType() {
locateToken();
return myTokenType;
}
public final int getTokenStart() {
locateToken();
return myBufferIndex;
}
public final int getTokenEnd() {
locateToken();
return myTokenEndOffset;
}
public final void advance() {
locateToken();
myTokenType = null;
}
protected final void locateToken() {
if (myTokenType != null) return;
_locateToken();
if (myTokenType == mGDOC_WHITESPACE) {
myAfterLineBreak = CharArrayUtil.containLineBreaks(myBuffer, getTokenStart(), getTokenEnd());
}
}
private void _locateToken() {
if (myTokenEndOffset == myBufferEndOffset) {
myTokenType = null;
myBufferIndex = myBufferEndOffset;
return;
}
myBufferIndex = myTokenEndOffset;
if (myAfterLineBreak) {
myAfterLineBreak = false;
while (myTokenEndOffset < myBufferEndOffset && myBuffer.charAt(myTokenEndOffset) == '*' &&
(myTokenEndOffset + 1 >= myBufferEndOffset || myBuffer.charAt(myTokenEndOffset + 1) != '/')) {
myTokenEndOffset++;
}
myInLeadingSpace = true;
if (myBufferIndex < myTokenEndOffset) {
myTokenType = mGDOC_ASTERISKS;
return;
}
}
if (myInLeadingSpace) {
myInLeadingSpace = false;
boolean lf = false;
while (myTokenEndOffset < myBufferEndOffset && Character.isWhitespace(myBuffer.charAt(myTokenEndOffset))) {
if (myBuffer.charAt(myTokenEndOffset) == '\n') lf = true;
myTokenEndOffset++;
}
final int state = myFlexLexer.yystate();
if (state == _GroovyDocLexer.COMMENT_DATA ||
myTokenEndOffset < myBufferEndOffset && (myBuffer.charAt(myTokenEndOffset) == '@' ||
myBuffer.charAt(myTokenEndOffset) == '{' ||
myBuffer.charAt(myTokenEndOffset) == '\"' ||
myBuffer.charAt(myTokenEndOffset) == '<')) {
myFlexLexer.yybegin(_GroovyDocLexer.COMMENT_DATA_START);
}
if (myBufferIndex < myTokenEndOffset) {
myTokenType = lf || state == _GroovyDocLexer.PARAM_TAG_SPACE || state == _GroovyDocLexer.TAG_DOC_SPACE || state == _GroovyDocLexer.INLINE_TAG_NAME || state == _GroovyDocLexer.DOC_TAG_VALUE_IN_PAREN
? mGDOC_WHITESPACE
: mGDOC_COMMENT_DATA;
return;
}
}
flexLocateToken();
}
private void flexLocateToken() {
try {
myState = myFlexLexer.yystate();
myFlexLexer.goTo(myBufferIndex);
myTokenType = myFlexLexer.advance();
myTokenEndOffset = myFlexLexer.getTokenEnd();
}
catch (IOException e) {
// Can't be
}
}
}
}
| {
"content_hash": "13bf60381cef0f7b89c972b44c7d70c0",
"timestamp": "",
"source": "github",
"line_count": 159,
"max_line_length": 207,
"avg_line_length": 28.352201257861637,
"alnum_prop": 0.6433007985803016,
"repo_name": "joewalnes/idea-community",
"id": "ec08cf5eb4d28de75d5f54343471ddba3fc5c29e",
"size": "5108",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "plugins/groovy/src/org/jetbrains/plugins/groovy/lang/groovydoc/lexer/GroovyDocLexer.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ActionScript",
"bytes": "387"
},
{
"name": "C",
"bytes": "136045"
},
{
"name": "C#",
"bytes": "103"
},
{
"name": "C++",
"bytes": "40449"
},
{
"name": "Emacs Lisp",
"bytes": "2507"
},
{
"name": "Erlang",
"bytes": "10"
},
{
"name": "Groovy",
"bytes": "361320"
},
{
"name": "Java",
"bytes": "89694599"
},
{
"name": "JavaScript",
"bytes": "978"
},
{
"name": "Objective-C",
"bytes": "1877"
},
{
"name": "PHP",
"bytes": "145"
},
{
"name": "Perl",
"bytes": "6523"
},
{
"name": "Python",
"bytes": "1699274"
},
{
"name": "Shell",
"bytes": "6965"
},
{
"name": "VimL",
"bytes": "5950"
}
],
"symlink_target": ""
} |
var
gutil = require('gulp-util'),
path = require('path'),
lintspaces = require('../index'),
options = {
newline: true,
indentation: 'tabs',
trailingspaces: true,
ignores: [
'js-comments'
]
}
;
require('should');
describe('main', function() {
it('the plugin should be valid ;-)', function(done) {
var
file = new gutil.File({
contents: new Buffer(''),
path: path.join(__dirname, '..', 'index.js')
}),
stream = lintspaces(options),
doneCalled = false
;
stream.on('end', function() {
if(!doneCalled) {
doneCalled = true;
done();
}
});
stream.on('error', function(error) {
if(!doneCalled) {
doneCalled = true;
done(error);
}
});
stream.write(file);
stream.end();
});
});
| {
"content_hash": "9bfa68bb69f0c48ac8ecb25434dac079",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 54,
"avg_line_length": 16.58695652173913,
"alnum_prop": 0.564875491480996,
"repo_name": "kjvalencik/gulp-lintspaces",
"id": "702c5789bb89a9c4713728d2be67d306dad43fab",
"size": "763",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/test_main.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "8672"
}
],
"symlink_target": ""
} |
package com.mana.innovative.rest.consumer;
import com.mana.innovative.dto.consumer.Customer;
import com.mana.innovative.dto.request.RequestParams;
import com.mana.innovative.service.consumer.CustomerService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import javax.annotation.Resource;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
@Component
@Path( "/{customer: (?i)customer}" )
public class CustomerResWebService {
/**
* The constant logger.
*/
private static final Logger logger = LoggerFactory.getLogger( CustomerResWebService.class );
/**
* The Customer service.
*/
@Resource
private CustomerService customerService;
/**
* Gets single customer details.
*
* @param customerId the customer id
* @param isError the is error
* @return the single customer details
*/
@GET
@Path( "/{customerId}" )
@Produces( { MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML } )
public Response getSingleCustomerDetails( @PathParam( "customerId" ) Long customerId,
@QueryParam( "is_error" ) @DefaultValue( "false" ) Boolean isError ) {
logger.debug( "Starting #getSingleCustomerDetails()" );
RequestParams requestParams = new RequestParams( );
requestParams.setIsError( isError );
return customerService.getCustomerByUserId( customerId, requestParams );
}
/**
* Create new customer.
*
* @param customer the customer
* @param isError the is error
* @return the response
*/
@POST
@Consumes( { MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML } )
@Produces( { MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML } )
public Response createNewCustomer( Customer customer,
@QueryParam( "is_error" ) @DefaultValue( "false" ) Boolean isError ) {
logger.debug( "Starting #createNewCustomer()" );
RequestParams requestParams = new RequestParams( );
requestParams.setIsError( isError );
return customerService.createCustomer( customer, requestParams );
}
/**
* Update specific customer details.
*
* @param customer the customer
* @param customerId the customer id
* @param isError the is error
* @return the response
*/
@PUT
@Path( "/{customerId}" )
@Consumes( { MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML } )
@Produces( { MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML } )
public Response updateSpecificCustomerDetails( Customer customer,
@PathParam( "customerId" ) Long customerId,
@QueryParam( "is_error" ) @DefaultValue( "false" ) Boolean isError ) {
logger.debug( "Starting #updateSpecificCustomerDetails()" );
RequestParams requestParams = new RequestParams( );
requestParams.setIsError( isError );
return customerService.updateCustomer( customer, requestParams );
}
/**
* Delete specific customer.
*
* @param customerId the customer id
* @param isError the is error
* @return the response
*/
@DELETE
@Path( "/{customerId}" )
public Response deleteSpecificCustomer( @PathParam( "customerId" ) Long customerId,
@QueryParam( "is_error" ) @DefaultValue( "false" ) Boolean isError ) {
logger.debug( "Starting #deleteSpecificCustomer()" );
RequestParams requestParams = new RequestParams( );
requestParams.setIsError( isError );
return customerService.deleteCustomerByUserId( customerId, requestParams );
}
}
| {
"content_hash": "69887c85ef6943984b0710247c692653",
"timestamp": "",
"source": "github",
"line_count": 124,
"max_line_length": 121,
"avg_line_length": 33.193548387096776,
"alnum_prop": 0.651846452866861,
"repo_name": "arkoghosh11/bloom-test",
"id": "938b9bff1242b788ce7688b49baf735559aaba0e",
"size": "4326",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bloom-rest/src/main/java/com/mana/innovative/rest/consumer/CustomerResWebService.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "247"
},
{
"name": "HTML",
"bytes": "6033"
},
{
"name": "Java",
"bytes": "2213623"
},
{
"name": "JavaScript",
"bytes": "1001"
}
],
"symlink_target": ""
} |
uint256 scrypt_salted_multiround_hash(const void* input, size_t inputlen, const void* salt, size_t saltlen, const unsigned int nRounds);
uint256 scrypt_salted_hash(const void* input, size_t inputlen, const void* salt, size_t saltlen);
uint256 scrypt_hash(const void* input, size_t inputlen);
uint256 scrypt_blockhash(const void* input);
#endif // SCRYPT_MINE_H
| {
"content_hash": "f8450bc9dcbd62a913a0ef01b2d897fa",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 136,
"avg_line_length": 60.333333333333336,
"alnum_prop": 0.7707182320441989,
"repo_name": "GregoryBetz/DarkSilk-Release-Candidate",
"id": "d81c45add0fa397a6064fd0d2dd10c876038144e",
"size": "485",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "src/scrypt.h",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "160918"
},
{
"name": "Batchfile",
"bytes": "3764"
},
{
"name": "C",
"bytes": "892656"
},
{
"name": "C++",
"bytes": "6976019"
},
{
"name": "CSS",
"bytes": "1127"
},
{
"name": "HTML",
"bytes": "50621"
},
{
"name": "Java",
"bytes": "2100"
},
{
"name": "Makefile",
"bytes": "50120"
},
{
"name": "NSIS",
"bytes": "6088"
},
{
"name": "Objective-C",
"bytes": "3023"
},
{
"name": "Objective-C++",
"bytes": "5844"
},
{
"name": "Protocol Buffer",
"bytes": "484"
},
{
"name": "Python",
"bytes": "198157"
},
{
"name": "QMake",
"bytes": "25733"
},
{
"name": "Shell",
"bytes": "379746"
}
],
"symlink_target": ""
} |
module("StopQueryService");
function initStopQueryService() {
return new SuperMap.REST.StopQueryService(GlobeParameter.trafficTransferURL, {
eventListeners: {
processCompleted: succeed,
processFailed: failed
}
});
function succeed(event) {
}
function failed(event) {
}
}
test("StopQueryService_noparams", function(){
expect(1);
var service = initStopQueryService();
var undefined = service.processAsync();
ok(typeof(undefined) === "undefined", "undefined");
});
asyncTest("StopQueryService_success_returnPosition", function(){
expect(6);
var service = initStopQueryService();
var params = new SuperMap.REST.StopQueryParameters({
keyWord: '人民',
returnPosition: true
});
service.processAsync(params);
setTimeout(function() {
try{
var result = service.lastResult;
ok(result !== null,"service.lastResult");
ok(result.transferStopInfos.length > 0, "result.transferStopInfos.length");
ok(result.transferStopInfos[0].position !== null,"result.transferStopInfos[0].position");
service.destroy();
ok(service.eventListeners === null,"service.eventListeners");
ok(service.events === null,"service.events");
ok(service.lastResult === null,"service.lastResult");
start();
}catch(excepion){
ok(false,"exception occcurs,message is:"+excepion.message)
start();
}
},6000);
});
asyncTest("StopQueryService_success_returnPosition_false", function(){
expect(6);
var service = initStopQueryService();
var params = new SuperMap.REST.StopQueryParameters({
keyWord: '人民',
returnPosition: false
});
service.processAsync(params);
setTimeout(function() {
try{
var result = service.lastResult;
ok(result !== null,"service.lastResult");
ok(result.transferStopInfos.length > 0, "result.transferStopInfos.length");
ok(result.transferStopInfos[0].position === null,"result.transferStopInfos[0].position");
service.destroy();
ok(service.eventListeners === null,"service.eventListeners");
ok(service.events === null,"service.events");
ok(service.lastResult === null,"service.lastResult");
start();
}catch(excepion){
ok(false,"exception occcurs,message is:"+excepion.message)
start();
}
},6000);
}); | {
"content_hash": "8cfe1507c4d8082e090a5f0d1665fe39",
"timestamp": "",
"source": "github",
"line_count": 79,
"max_line_length": 101,
"avg_line_length": 33.20253164556962,
"alnum_prop": 0.5951200914982844,
"repo_name": "SuperMap/iClient-for-JavaScript",
"id": "285caa6678e3f037085527efa121b8eabc698079",
"size": "2633",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/REST/TrafficTransferAnalyst/StopQueryServiceTest.js",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "8317"
},
{
"name": "CSS",
"bytes": "51211"
},
{
"name": "HTML",
"bytes": "108202"
},
{
"name": "Java",
"bytes": "3728"
},
{
"name": "JavaScript",
"bytes": "7621100"
},
{
"name": "PowerShell",
"bytes": "195478"
},
{
"name": "Shell",
"bytes": "11618"
}
],
"symlink_target": ""
} |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jutils.jhardware.info.processor;
import java.util.Map;
import org.jutils.jhardware.info.HardwareInfo;
import org.jutils.jhardware.model.ProcessorInfo;
/**
* Information related to CPU
*
* @author Javier Garcia Alonso
*/
public abstract class AbstractProcessorInfo implements HardwareInfo {
@Override
public ProcessorInfo getInfo() {
return buildFromDataMap(parseInfo());
}
protected abstract Map<String, String> parseInfo();
protected ProcessorInfo buildFromDataMap(Map<String, String> dataMap) {
ProcessorInfo info = new ProcessorInfo();
info.setFullInfo(dataMap);
if (dataMap != null && !dataMap.isEmpty()) {
info.setCacheSize(dataMap.get("cache size"));
info.setFamily(dataMap.get("cpu family"));
info.setMhz(dataMap.get("cpu MHz"));
info.setModel(dataMap.get("model"));
info.setModelName(dataMap.get("model name"));
info.setNumCores(dataMap.get("cpu cores"));
info.setStepping(dataMap.get("stepping"));
info.setTemperature(dataMap.get("temperature"));
info.setVendorId(dataMap.get("vendor_id"));
}
return info;
}
}
| {
"content_hash": "959047e23bda5df835e02fcd3d389740",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 75,
"avg_line_length": 34.71153846153846,
"alnum_prop": 0.6720221606648199,
"repo_name": "profesorfalken/jHardware",
"id": "87eeae5baeadb8f5294eb4e6748942287e4b9a13",
"size": "1805",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/java/org/jutils/jhardware/info/processor/AbstractProcessorInfo.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "113564"
}
],
"symlink_target": ""
} |
.. _implementers:
Implementing Filesystems
========================
With a little care, you can implement a PyFilesystem interface for any filesystem, which will allow it to work interchangeably with any of the built-in FS classes and tools.
To create a PyFilesystem interface, derive a class from :class:`~fs.base.FS` and implement the :ref:`essential-methods`. This should give you a working FS class.
Take care to copy the method signatures *exactly*, including default values. It is also essential that you follow the same logic with regards to exceptions, and only raise exceptions in :mod:`~fs.errors`.
Constructor
-----------
There are no particular requirements regarding how a PyFilesystem class is constructed, but be sure to call the base class ``__init__`` method with no parameters.
Thread Safety
-------------
All Filesystems should be *thread-safe*. The simplest way to achieve that is by using the ``_lock`` attribute supplied by the :class:`~fs.base.FS` constructor. This is a ``RLock`` object from the standard library, which you can use as a context manager, so methods you implement will start something like this::
with self._lock:
do_something()
You aren't *required* to use ``_lock``. Just as long as calling methods on the FS object from multiple threads doesn't break anything.
Python Versions
---------------
PyFilesystem supports Python2.7 and Python3.X. The differences between the two major Python versions are largely managed by the ``six`` library.
You aren't obligated to support the same versions of Python that PyFilesystem itself supports, but it is recommended if your project is for general use.
Testing Filesystems
-------------------
To test your implementation, you can borrow the test suite used to test the built in filesystems. If your code passes these tests, then you can be confident your implementation will work seamlessly.
Here's the simplest possible example to test a filesystem class called ``MyFS``::
import unittest
from fs.test import FSTestCases
class TestMyFS(FSTestCases, unittest.TestCase):
def make_fs(self):
# Return an instance of your FS object here
return MyFS()
You may also want to override some of the methods in the test suite for more targeted testing:
.. autoclass:: fs.test.FSTestCases
:members:
.. note::
As of version 2.4.11 this project uses `pytest <https://pytest.org/en/latest/>`_ to run its tests.
While it's completely compatible with ``unittest``-style tests, it's much more powerful and
feature-rich. We suggest you take advantage of it and its plugins in new tests you write, rather
than sticking to strict ``unittest`` features. For benefits and limitations, see `here <https://pytest.org/en/latest/unittest.html>`_.
.. _essential-methods:
Essential Methods
-----------------
The following methods MUST be implemented in a PyFilesystem interface.
* :meth:`~fs.base.FS.getinfo` Get info regarding a file or directory.
* :meth:`~fs.base.FS.listdir` Get a list of resources in a directory.
* :meth:`~fs.base.FS.makedir` Make a directory.
* :meth:`~fs.base.FS.openbin` Open a binary file.
* :meth:`~fs.base.FS.remove` Remove a file.
* :meth:`~fs.base.FS.removedir` Remove a directory.
* :meth:`~fs.base.FS.setinfo` Set resource information.
.. _non-essential-methods:
Non - Essential Methods
-----------------------
The following methods MAY be implemented in a PyFilesystem interface.
These methods have a default implementation in the base class, but may be overridden if you can supply a more optimal version.
Exactly which methods you should implement depends on how and where the data is stored. For network filesystems, a good candidate to implement, is the ``scandir`` method which would otherwise call a combination of ``listdir`` and ``getinfo`` for each file.
In the general case, it is a good idea to look at how these methods are implemented in :class:`~fs.base.FS`, and only write a custom version if it would be more efficient than the default.
* :meth:`~fs.base.FS.appendbytes`
* :meth:`~fs.base.FS.appendtext`
* :meth:`~fs.base.FS.close`
* :meth:`~fs.base.FS.copy`
* :meth:`~fs.base.FS.copydir`
* :meth:`~fs.base.FS.create`
* :meth:`~fs.base.FS.desc`
* :meth:`~fs.base.FS.download`
* :meth:`~fs.base.FS.exists`
* :meth:`~fs.base.FS.filterdir`
* :meth:`~fs.base.FS.getmeta`
* :meth:`~fs.base.FS.getospath`
* :meth:`~fs.base.FS.getsize`
* :meth:`~fs.base.FS.getsyspath`
* :meth:`~fs.base.FS.gettype`
* :meth:`~fs.base.FS.geturl`
* :meth:`~fs.base.FS.hassyspath`
* :meth:`~fs.base.FS.hasurl`
* :meth:`~fs.base.FS.isclosed`
* :meth:`~fs.base.FS.isempty`
* :meth:`~fs.base.FS.isdir`
* :meth:`~fs.base.FS.isfile`
* :meth:`~fs.base.FS.islink`
* :meth:`~fs.base.FS.lock`
* :meth:`~fs.base.FS.makedirs`
* :meth:`~fs.base.FS.move`
* :meth:`~fs.base.FS.movedir`
* :meth:`~fs.base.FS.open`
* :meth:`~fs.base.FS.opendir`
* :meth:`~fs.base.FS.readbytes`
* :meth:`~fs.base.FS.readtext`
* :meth:`~fs.base.FS.removetree`
* :meth:`~fs.base.FS.scandir`
* :meth:`~fs.base.FS.settimes`
* :meth:`~fs.base.FS.touch`
* :meth:`~fs.base.FS.upload`
* :meth:`~fs.base.FS.validatepath`
* :meth:`~fs.base.FS.writebytes`
* :meth:`~fs.base.FS.writefile`
* :meth:`~fs.base.FS.writetext`
.. _helper-methods:
Helper Methods
--------------
These methods SHOULD NOT be implemented.
Implementing these is highly unlikely to be worthwhile.
* :meth:`~fs.base.FS.check`
* :meth:`~fs.base.FS.getbasic`
* :meth:`~fs.base.FS.getdetails`
* :meth:`~fs.base.FS.hash`
* :meth:`~fs.base.FS.match`
* :meth:`~fs.base.FS.tree`
| {
"content_hash": "03184c3fe57c18384baed9ea246ed549",
"timestamp": "",
"source": "github",
"line_count": 149,
"max_line_length": 311,
"avg_line_length": 37.51006711409396,
"alnum_prop": 0.706566469851494,
"repo_name": "PyFilesystem/pyfilesystem2",
"id": "bb055d6904577ec514387d1be5b39430d1ad02a9",
"size": "5589",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/source/implementers.rst",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "692"
},
{
"name": "Python",
"bytes": "729117"
}
],
"symlink_target": ""
} |
<?php
/**
* Created by PhpStorm.
* User: dwendlandt
* Date: 17/06/14
* Time: 19:02
*/
namespace Elastification\Client\Tests\Unit\Request\V1x;
use Elastification\Client\Exception\RequestException;
use Elastification\Client\Request\RequestMethods;
use Elastification\Client\Request\V1x\DeleteDocumentRequest;
class DeleteDocumentRequestTest extends \PHPUnit_Framework_TestCase
{
const INDEX = 'test-index';
const TYPE = 'test-type';
const RESPONSE_CLASS = 'Elastification\Client\Response\V1x\DeleteDocumentResponse';
/**
* @var \PHPUnit_Framework_MockObject_MockObject
*/
private $serializer;
/**
* @var DeleteDocumentRequest
*/
private $request;
protected function setUp()
{
parent::setUp();
$this->serializer = $this->getMockBuilder('Elastification\Client\Serializer\SerializerInterface')
->disableOriginalConstructor()
->getMock();
$this->request = new DeleteDocumentRequest(self::INDEX, self::TYPE, $this->serializer);
}
protected function tearDown()
{
$this->serializer = null;
$this->request = null;
parent::tearDown();
}
public function testInstance()
{
$this->assertInstanceOf(
'Elastification\Client\Request\RequestInterface',
$this->request
);
$this->assertInstanceOf(
'Elastification\Client\Request\Shared\AbstractDeleteDocumentRequest',
$this->request
);
$this->assertInstanceOf(
'Elastification\Client\Request\V1x\DeleteDocumentRequest',
$this->request
);
}
public function testGetIndex()
{
$this->assertSame(self::INDEX, $this->request->getIndex());
}
public function testGetType()
{
$this->assertSame(self::TYPE, $this->request->getType());
}
public function testGetMethod()
{
$this->assertSame(RequestMethods::DELETE, $this->request->getMethod());
}
public function testSetIdGetAction()
{
$id = 'my document id';
$this->request->setId($id);
$this->assertSame($id, $this->request->getAction());
}
public function testGetActionException()
{
try {
$this->request->getAction();
} catch (RequestException $exception) {
$this->assertSame('id can not be empty for this request', $exception->getMessage());
return;
}
$this->fail();
}
public function testSetIdException()
{
$id = '';
try {
$this->request->setId($id);
} catch (RequestException $exception) {
$this->assertSame('Id can not be empty', $exception->getMessage());
return;
}
$this->fail();
}
public function testGetSerializer()
{
$this->assertSame($this->serializer, $this->request->getSerializer());
}
public function testGetSerializerParams()
{
$this->assertTrue(is_array($this->request->getSerializerParams()));
$this->assertEmpty($this->request->getSerializerParams());
}
public function testSetGetBody()
{
$body = 'my test body';
$this->request->setBody($body);
$this->assertNull($this->request->getBody());
}
public function testGetSupportedClass()
{
$this->assertSame(self::RESPONSE_CLASS, $this->request->getSupportedClass());
}
public function testCreateResponse()
{
$rawData = 'raw data for testing';
$response = $this->request->createResponse($rawData, $this->serializer);
$this->assertInstanceOf(self::RESPONSE_CLASS, $response);
}
}
| {
"content_hash": "5103f1b7e40d117775065f0ef584cc53",
"timestamp": "",
"source": "github",
"line_count": 149,
"max_line_length": 105,
"avg_line_length": 25.02013422818792,
"alnum_prop": 0.6048819742489271,
"repo_name": "elastification/php-client",
"id": "7713fcf2fe6f25586cb3e73d680c26912a8ccd1b",
"size": "3728",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/Unit/Request/V1x/DeleteDocumentRequestTest.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PHP",
"bytes": "1878381"
}
],
"symlink_target": ""
} |
include ../Makefile.variables
include Makefile.inc
all: syscalls libc shellCodeModule shellDataModule
shellCodeModule:
cd ShellModule; make
shellDataModule:
printf "This is sample data." >> shellDataModule.bin && dd if=/dev/zero bs=1 count=1 >> shellDataModule.bin
libc:
cd libc; make
clean:
cd ShellModule; make clean
cd syscalls; make clean
cd libc; make clean
rm -rf *.bin
syscalls:
cd syscalls; make
.PHONY: syscalls libc shellCodeModule all clean
| {
"content_hash": "a8b0ff155b95055b81a97626f2c709c8",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 108,
"avg_line_length": 19.458333333333332,
"alnum_prop": 0.7623126338329764,
"repo_name": "sebikul/UnicOS",
"id": "10aeb1f572cf39417c74b9a56f5799c87bf1ea4f",
"size": "467",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Userland/Makefile",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "78997"
},
{
"name": "C",
"bytes": "3956343"
},
{
"name": "C++",
"bytes": "234275"
},
{
"name": "Groff",
"bytes": "2314281"
},
{
"name": "Logos",
"bytes": "31729"
},
{
"name": "Makefile",
"bytes": "5110"
},
{
"name": "Objective-C",
"bytes": "240592"
},
{
"name": "Python",
"bytes": "63355"
},
{
"name": "Shell",
"bytes": "21495"
},
{
"name": "XC",
"bytes": "30784"
},
{
"name": "XS",
"bytes": "29823"
}
],
"symlink_target": ""
} |
require File.dirname(__FILE__) + "/../../test_helper.rb"
class Ubiquo::UbiquoUserProfilesControllerTest < ActionController::TestCase
def test_should_get_edit
login(:josep)
get :edit
assert_response :success
assert_equal ubiquo_users(:josep).id, assigns(:ubiquo_user).id
end
def test_should_update_ubiquo_user
login(:josep)
put :update, :ubiquo_user => { :name => "name", :surname => "surname", :email => "[email protected]", :password => 'newpass', :password_confirmation => 'newpass'}
assert_redirected_to ubiquo.edit_ubiquo_user_profile_path
assert_equal ubiquo_users(:josep).id, assigns(:ubiquo_user).id
end
end
| {
"content_hash": "b2d48ba8972e2507dec350e58c038e61",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 162,
"avg_line_length": 38.411764705882355,
"alnum_prop": 0.6906584992343032,
"repo_name": "gnuine/ubiquo",
"id": "28e710151cadc2e658c98562e3c77b717e8a8d26",
"size": "653",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "ubiquo_authentication/test/functional/ubiquo/ubiquo_user_profiles_controller_test.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "5956"
},
{
"name": "JavaScript",
"bytes": "236340"
},
{
"name": "Ruby",
"bytes": "1478854"
}
],
"symlink_target": ""
} |
static class Keywords
{
struct kwinfo { const char* kw; Token token; };
static kwinfo keywords[];
int count;
bool bsearch(const char* key, int& idx)
{
idx = 0;
int low = 0;
int high = count - 1;
while (low <= high)
{
idx = (low + high) / 2;
int comp = strcmp(keywords[idx].kw, key);
if (comp < 0)
low = idx + 1;
else if (comp > 0)
high = idx - 1;
else
return true;
}
idx = low;
return false;
}
public:
Keywords()
{
for (kwinfo* k = keywords; k->kw != NULL; k++)
{
#ifdef DEBUG
if (count > 0)
if (strcmp(k->kw, (k - 1)->kw) <= 0)
fatal(0x4001, "Keyword verification failed");
#endif
count++;
}
}
Token find(const char* s)
{
int index;
if (bsearch(s, index))
return keywords[index].token;
else
return tokUndefined;
}
} keywords;
Keywords::kwinfo Keywords::keywords[] =
{
// NOTE: this list must be kept in sorted order
{"and", tokAnd},
{"as", tokAs},
{"assert", tokAssert},
{"begin", tokBegin},
{"break", tokBreak},
{"case", tokCase},
{"class", tokClass},
{"const", tokConst},
{"continue", tokContinue},
{"def", tokDef},
{"default", tokDefault},
{"del", tokDel},
{"dump", tokDump},
{"elif", tokElif},
{"else", tokElse},
{"exit", tokExit},
{"for", tokFor},
{"if", tokIf},
{"in", tokIn},
{"ins", tokIns},
{"is", tokIs},
{"not", tokNot},
{"or", tokOr},
{"return", tokReturn},
{"shl", tokShl},
{"shr", tokShr},
{"switch", tokSwitch},
{"this", tokThis},
{"typeof", tokTypeOf},
{"var", tokVar},
{"while", tokWhile},
{"xor", tokXor},
{NULL, tokUndefined}
};
InputRecorder::InputRecorder()
: buf(NULL), offs(0), prevpos(0) { }
InputRecorder::~InputRecorder() throw()
{ }
void InputRecorder::event(char* newbuf, memint newtail, memint)
{
if (newbuf == buf && newtail > offs)
{
data.append(buf + offs, newtail - offs);
offs = newtail;
}
else {
buf = newbuf;
offs = newtail;
}
}
void InputRecorder::clear()
{
buf = NULL;
offs = 0;
prevpos = 0;
data.clear();
}
Parser::Parser(buffifo* inp)
: input(inp), linenum(1),
prevIdent(), saveToken(tokUndefined),
token(tokUndefined), strValue(), intValue(0) { }
Parser::~Parser()
{ }
void Parser::error(const str& msg)
{ throw emessage(msg); }
void Parser::error(const char* msg)
{ error(str(msg)); }
str Parser::errorLocation() const
{
str msg;
if (!strValue.empty())
msg += " near '" + to_displayable(to_printable(strValue)) + "'";
return msg;
}
const charset wsChars = "\t ";
const charset identFirst = "A-Za-z_";
const charset identRest = "0-9A-Za-z_";
const charset digits = "0-9";
const charset printableChars = "~20-~7E~81-~FE";
const charset commentChars = printableChars + wsChars;
inline bool is_eol_char(char c)
{ return c == '\n' || c == '\r'; }
void Parser::skipWs()
{ input->skip(wsChars); }
void Parser::skipEol()
{
assert(input->eol());
input->skip_eol();
linenum++;
}
void Parser::parseStringLiteral()
{
static const charset stringChars = printableChars - charset("'\\");
static const charset hexDigits = "0-9A-Fa-f";
strValue.clear();
while (true)
{
strValue += input->token(stringChars);
if (input->eof())
error("Unexpected end of file in string literal");
char c = input->get();
if (is_eol_char(c))
error("Unexpected end of line in string literal");
if (c == '\'')
return;
else if (c == '\\')
{
switch (c = input->get())
{
case 't': strValue += '\t'; break;
case 'r': strValue += '\r'; break;
case 'n': strValue += '\n'; break;
case 'x':
{
str s;
if (hexDigits[input->preview()])
{
s += input->get();
if (hexDigits[input->preview()])
s += input->get();
bool e, o;
ularge value = from_string(s.c_str(), &e, &o, 16);
strValue += char(value);
}
else
error("Bad hex sequence");
}
break;
default: strValue += c; break;
}
}
else
error("Illegal character in string literal " + to_printable(c));
}
}
void Parser::skipMultilineComment()
{
static const charset skipChars = commentChars - '*';
while (true)
{
input->skip(skipChars);
if (input->eol())
{
if (input->eof())
error("Unexpected end of file in comments");
skipEol();
continue;
}
char e = input->get();
if (e == '*')
{
if (input->preview() == '/')
{
input->get();
break;
}
}
else
error("Illegal character in comments " + to_printable(e));
}
}
void Parser::skipSinglelineComment()
{
input->skip(commentChars);
if (!input->eol())
error("Illegal character in comments " + to_printable(input->preview()));
}
Token Parser::next()
{
assert(token != tokPrevIdent);
if (recorder.active())
recorder.prevpos = input->tellg();
restart:
strValue.clear();
intValue = 0;
skipWs();
int c = input->preview();
// --- EOF ---
if (c == -1)
{
strValue = "<EOF>";
return token = tokEof;
}
// --- EOL ---
else if (is_eol_char(c))
{
skipEol();
skipWs();
if (input->eol())
goto restart;
strValue = "<EOL>";
return token = tokSep;
}
// --- Identifier or keyword ---
if (identFirst[c])
{
strValue = input->get();
strValue += input->token(identRest);
Token tok = keywords.find(strValue.c_str());
if (tok != tokUndefined)
return token = tok;
else
return token = tokIdent;
}
// --- Number ---
else if (digits[c])
{
bool e, o;
strValue = input->token(identRest);
str s = strValue;
bool isHex = s.size() > 2 && s[0] == '0' && s[1] == 'x';
if (isHex)
s.erase(0, 2);
ularge v = from_string(s.c_str(), &e, &o, isHex ? 16 : 10);
if (e)
error("'" + strValue + "' is not a valid number");
if (o || (v > ularge(INTEGER_MAX) + 1))
error("Numeric overflow (" + strValue + ")");
intValue = uinteger(v);
return token = tokIntValue;
}
// --- Special chars and sequences ---
else
{
strValue = input->get();
switch (c)
{
case '\\':
input->skip(wsChars);
if (!input->eol())
error("New line expected after '\\'");
skipEol();
goto restart;
case ',': return token = tokComma;
case '.':
if (input->get_if('.'))
{
if (input->get_if('.'))
return token = tokEllipsis;
return token = tokRange;
}
return token = tokPeriod;
case '\'': parseStringLiteral(); return token = tokStrValue;
case ';': return token = tokSemi;
case ':': return token = tokColon;
case '+': return token = (input->get_if('=') ? tokAddAssign : tokPlus);
case '-': return token = (input->get_if('=') ? tokSubAssign : tokMinus);
case '*': return token = (input->get_if('=') ? tokMulAssign : tokMul);
case '/':
if (input->get_if('/'))
{
skipSinglelineComment();
goto restart;
}
else if (input->get_if('*'))
{
skipMultilineComment();
goto restart;
}
return token = (input->get_if('=') ? tokDivAssign : tokDiv);
case '%': return token = (input->get_if('=') ? tokModAssign : tokMod);
case '[': return token = tokLSquare;
case ']': return token = tokRSquare;
case '(': return token = tokLParen;
case ')': return token = tokRParen;
case '{': return token = tokLCurly;
case '}': return token = tokRCurly;
case '<':
if (input->get_if('='))
return token = tokLessEq;
else if (input->get_if('<'))
return token = tokPush;
// else if (input->get_if('>'))
// return token = tokNotEq;
else
return token = tokLAngle;
case '>':
if (input->get_if('='))
return token = tokGreaterEq;
else if (input->get_if('>'))
return token = tokPull;
else
return token = tokRAngle;
case '=': return token = (input->get_if('=') ? tokEqual : tokAssign);
case '|': return token = (input->get_if('=') ? tokCatAssign : tokCat);
case '^': return token = tokCaret;
case '@': return token = tokAt;
// case '#': return token = tokHash;
case '?': return token = tokQuestion;
case '!': return token = (input->get_if('=') ? tokNotEq : tokExclam);
}
}
error("Illegal character " + to_printable(c));
return tokUndefined;
}
void Parser::undoIdent(const str& ident)
{
prevIdent = ident;
saveToken = token;
token = tokPrevIdent;
}
void Parser::redoIdent()
{
prevIdent.clear();
token = saveToken;
saveToken = tokUndefined;
}
void Parser::skipMultiBlockBegin(const char* errmsg)
{
skipWsSeps();
expect(tokLCurly, errmsg);
skipWsSeps();
}
void Parser::skipMultiBlockEnd()
{
skipWsSeps();
expect(tokRCurly, "'}'");
}
str Parser::getIdentifier()
{
if (token != tokIdent)
error("Identifier expected");
str s = strValue;
next();
return s;
}
void Parser::expect(Token tok, const char* errName)
{
if (token != tok)
error(str(errName) + " expected");
next();
}
void Parser::skipLParen()
{ expect(tokLParen, "'('"); }
void Parser::skipRParen()
{ expect(tokRParen, "')'"); }
bool Parser::isEos()
{
return token == tokSep || token == tokSemi || eof() || token == tokRCurly;
}
void Parser::skipEos()
{
if (token == tokSep || token == tokSemi)
next();
else if (!eof() && token != tokRCurly)
error("End of statement expected");
}
void Parser::skipToEos()
{
while (!eof() && token != tokSep && token != tokSemi
&& token != tokRCurly)
next();
}
integer Parser::getLineNum() const
{
if (token == tokSep)
return linenum - 1;
else
return linenum;
}
void Parser::beginRecording()
{
assert(!recorder.active());
skipWs();
input->set_bufevent(&recorder);
}
str Parser::endRecording()
{
assert(recorder.active());
input->set_bufevent(NULL);
// Because the input stream is always ahead by one token, we need to trim it
recorder.data.pop(input->tellg() - recorder.prevpos);
str result = recorder.data;
recorder.clear();
return result;
}
bool isValidIdent(const str& s)
{
if (s.empty())
return false;
if (!identFirst[s[0]])
return false;
for (memint i = 1; i < s.size(); i++)
if (!identRest[s[i]])
return false;
return true;
}
| {
"content_hash": "c8c259b6f5f1f7f247a98d4696288b48",
"timestamp": "",
"source": "github",
"line_count": 516,
"max_line_length": 81,
"avg_line_length": 23.434108527131784,
"alnum_prop": 0.483460138934833,
"repo_name": "Playermet/shannon",
"id": "0ae6dcf8212826837132bdec06cb6835f0f5a90b",
"size": "12115",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "src/parser.cpp",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "162"
},
{
"name": "C++",
"bytes": "350925"
},
{
"name": "Makefile",
"bytes": "2330"
},
{
"name": "Shell",
"bytes": "921"
}
],
"symlink_target": ""
} |
ACCEPTED
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
Sp. pl. 1:136. 1753
#### Original name
null
### Remarks
null | {
"content_hash": "3df53ba1829f06288e64c00697e92d5b",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 39,
"avg_line_length": 11.461538461538462,
"alnum_prop": 0.6778523489932886,
"repo_name": "mdoering/backbone",
"id": "689462a7f5926d0009034e3ce8230b11370efe31",
"size": "195",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Magnoliophyta/Magnoliopsida/Boraginales/Boraginaceae/Symphytum/Symphytum tuberosum/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
<?php
namespace Nette\Utils;
use Nette,
RecursiveIteratorIterator;
/**
* Finder allows searching through directory trees using iterator.
*
* <code>
* Finder::findFiles('*.php')
* ->size('> 10kB')
* ->from('.')
* ->exclude('temp');
* </code>
*
* @author David Grudl
*/
class Finder extends Nette\Object implements \IteratorAggregate
{
/** @var array */
private $paths = array();
/** @var array of filters */
private $groups;
/** @var filter for recursive traversing */
private $exclude = array();
/** @var int */
private $order = RecursiveIteratorIterator::SELF_FIRST;
/** @var int */
private $maxDepth = -1;
/** @var array */
private $cursor;
/**
* Begins search for files matching mask and all directories.
* @param mixed
* @return Finder
*/
public static function find($mask)
{
if (!is_array($mask)) {
$mask = func_get_args();
}
$finder = new static;
return $finder->select(array(), 'isDir')->select($mask, 'isFile');
}
/**
* Begins search for files matching mask.
* @param mixed
* @return Finder
*/
public static function findFiles($mask)
{
if (!is_array($mask)) {
$mask = func_get_args();
}
$finder = new static;
return $finder->select($mask, 'isFile');
}
/**
* Begins search for directories matching mask.
* @param mixed
* @return Finder
*/
public static function findDirectories($mask)
{
if (!is_array($mask)) {
$mask = func_get_args();
}
$finder = new static;
return $finder->select($mask, 'isDir');
}
/**
* Creates filtering group by mask & type selector.
* @param array
* @param string
* @return self
*/
private function select($masks, $type)
{
$this->cursor = & $this->groups[];
$pattern = self::buildPattern($masks);
if ($type || $pattern) {
$this->filter(function($file) use ($type, $pattern) {
return !$file->isDot()
&& (!$type || $file->$type())
&& (!$pattern || preg_match($pattern, '/' . strtr($file->getSubPathName(), '\\', '/')));
});
}
return $this;
}
/**
* Searchs in the given folder(s).
* @param string|array
* @return self
*/
public function in($path)
{
if (!is_array($path)) {
$path = func_get_args();
}
$this->maxDepth = 0;
return $this->from($path);
}
/**
* Searchs recursively from the given folder(s).
* @param string|array
* @return self
*/
public function from($path)
{
if ($this->paths) {
throw new Nette\InvalidStateException('Directory to search has already been specified.');
}
if (!is_array($path)) {
$path = func_get_args();
}
$this->paths = $path;
$this->cursor = & $this->exclude;
return $this;
}
/**
* Shows folder content prior to the folder.
* @return self
*/
public function childFirst()
{
$this->order = RecursiveIteratorIterator::CHILD_FIRST;
return $this;
}
/**
* Converts Finder pattern to regular expression.
* @param array
* @return string
*/
private static function buildPattern($masks)
{
$pattern = array();
// TODO: accept regexp
foreach ($masks as $mask) {
$mask = rtrim(strtr($mask, '\\', '/'), '/');
$prefix = '';
if ($mask === '') {
continue;
} elseif ($mask === '*') {
return NULL;
} elseif ($mask[0] === '/') { // absolute fixing
$mask = ltrim($mask, '/');
$prefix = '(?<=^/)';
}
$pattern[] = $prefix . strtr(preg_quote($mask, '#'),
array('\*\*' => '.*', '\*' => '[^/]*', '\?' => '[^/]', '\[\!' => '[^', '\[' => '[', '\]' => ']', '\-' => '-'));
}
return $pattern ? '#/(' . implode('|', $pattern) . ')\z#i' : NULL;
}
/********************* iterator generator ****************d*g**/
/**
* Returns iterator.
* @return \Iterator
*/
public function getIterator()
{
if (!$this->paths) {
throw new Nette\InvalidStateException('Call in() or from() to specify directory to search.');
} elseif (count($this->paths) === 1) {
return $this->buildIterator($this->paths[0]);
} else {
$iterator = new \AppendIterator();
$iterator->append($workaround = new \ArrayIterator(array('workaround PHP bugs #49104, #63077')));
foreach ($this->paths as $path) {
$iterator->append($this->buildIterator($path));
}
unset($workaround[0]);
return $iterator;
}
}
/**
* Returns per-path iterator.
* @param string
* @return \Iterator
*/
private function buildIterator($path)
{
if (PHP_VERSION_ID < 50301) {
$iterator = new Nette\Utils\RecursiveDirectoryIteratorFixed($path);
} else {
$iterator = new \RecursiveDirectoryIterator($path, \RecursiveDirectoryIterator::FOLLOW_SYMLINKS);
}
if ($this->exclude) {
$filters = $this->exclude;
$iterator = new Nette\Iterators\RecursiveFilter($iterator, function($file) use ($filters) {
if (!$file->isDot() && !$file->isFile()) {
foreach ($filters as $filter) {
if (!call_user_func($filter, $file)) {
return FALSE;
}
}
}
return TRUE;
});
}
if ($this->maxDepth !== 0) {
$iterator = new RecursiveIteratorIterator($iterator, $this->order);
$iterator->setMaxDepth($this->maxDepth);
}
if ($this->groups) {
$groups = $this->groups;
$iterator = new Nette\Iterators\Filter($iterator, function($file) use ($groups) {
foreach ($groups as $filters) {
foreach ($filters as $filter) {
if (!call_user_func($filter, $file)) {
continue 2;
}
}
return TRUE;
}
return FALSE;
});
}
return $iterator;
}
/********************* filtering ****************d*g**/
/**
* Restricts the search using mask.
* Excludes directories from recursive traversing.
* @param mixed
* @return self
*/
public function exclude($masks)
{
if (!is_array($masks)) {
$masks = func_get_args();
}
$pattern = self::buildPattern($masks);
if ($pattern) {
$this->filter(function($file) use ($pattern) {
return !preg_match($pattern, '/' . strtr($file->getSubPathName(), '\\', '/'));
});
}
return $this;
}
/**
* Restricts the search using callback.
* @param callable
* @return self
*/
public function filter($callback)
{
$this->cursor[] = $callback;
return $this;
}
/**
* Limits recursion level.
* @param int
* @return self
*/
public function limitDepth($depth)
{
$this->maxDepth = $depth;
return $this;
}
/**
* Restricts the search by size.
* @param string "[operator] [size] [unit]" example: >=10kB
* @param int
* @return self
*/
public function size($operator, $size = NULL)
{
if (func_num_args() === 1) { // in $operator is predicate
if (!preg_match('#^(?:([=<>!]=?|<>)\s*)?((?:\d*\.)?\d+)\s*(K|M|G|)B?\z#i', $operator, $matches)) {
throw new Nette\InvalidArgumentException('Invalid size predicate format.');
}
list(, $operator, $size, $unit) = $matches;
static $units = array('' => 1, 'k' => 1e3, 'm' => 1e6, 'g' => 1e9);
$size *= $units[strtolower($unit)];
$operator = $operator ? $operator : '=';
}
return $this->filter(function($file) use ($operator, $size) {
return Finder::compare($file->getSize(), $operator, $size);
});
}
/**
* Restricts the search by modified time.
* @param string "[operator] [date]" example: >1978-01-23
* @param mixed
* @return self
*/
public function date($operator, $date = NULL)
{
if (func_num_args() === 1) { // in $operator is predicate
if (!preg_match('#^(?:([=<>!]=?|<>)\s*)?(.+)\z#i', $operator, $matches)) {
throw new Nette\InvalidArgumentException('Invalid date predicate format.');
}
list(, $operator, $date) = $matches;
$operator = $operator ? $operator : '=';
}
$date = Nette\DateTime::from($date)->format('U');
return $this->filter(function($file) use ($operator, $date) {
return Finder::compare($file->getMTime(), $operator, $date);
});
}
/**
* Compares two values.
* @param mixed
* @param mixed
* @return bool
*/
public static function compare($l, $operator, $r)
{
switch ($operator) {
case '>':
return $l > $r;
case '>=':
return $l >= $r;
case '<':
return $l < $r;
case '<=':
return $l <= $r;
case '=':
case '==':
return $l == $r;
case '!':
case '!=':
case '<>':
return $l != $r;
default:
throw new Nette\InvalidArgumentException("Unknown operator $operator.");
}
}
}
if (PHP_VERSION_ID < 50301) {
/** @internal */
class RecursiveDirectoryIteratorFixed extends \RecursiveDirectoryIterator
{
function hasChildren()
{
return parent::hasChildren(TRUE);
}
}
}
| {
"content_hash": "2e7b9d8c69d223028ef7e1b1ec734739",
"timestamp": "",
"source": "github",
"line_count": 396,
"max_line_length": 115,
"avg_line_length": 21.517676767676768,
"alnum_prop": 0.5734068771270978,
"repo_name": "johnymachine/moje-predsevzeti",
"id": "ca6769bb132ef82f420fc4c18613398db2990cc3",
"size": "8786",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "libs/nette/nette/Nette/Utils/Finder.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "12126"
},
{
"name": "JavaScript",
"bytes": "28798"
},
{
"name": "PHP",
"bytes": "1096429"
}
],
"symlink_target": ""
} |
<?php
use Illuminate\Support\Str;
use Faker\Generator as Faker;
/*
|--------------------------------------------------------------------------
| Model Factories
|--------------------------------------------------------------------------
|
| This directory should contain each of the model factory definitions for
| your application. Factories provide a convenient way to generate new
| model instances for testing / seeding your application's database.
|
*/
$factory->define(OpenNotion\Models\User::class, function (Faker $faker) {
return [
'name' => $faker->name,
'email' => $faker->unique()->safeEmail,
'email_verified_at' => now(),
'password' => '$2y$10$TKh8H1.PfQx37YgCzwiKb.KjNyWgaHb9cbcoQgdIVFlYg7B77UdFm', // secret
'remember_token' => Str::random(10),
];
});
| {
"content_hash": "d37f56a9571316d0ad96de63dd253480",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 95,
"avg_line_length": 32.72,
"alnum_prop": 0.5488997555012225,
"repo_name": "halfpetal/OpenNotion",
"id": "b898a16d9b12df151c8aa62f2572c7d01ccb29b3",
"size": "818",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "database/factories/UserFactory.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "249621"
},
{
"name": "PHP",
"bytes": "107471"
},
{
"name": "Vue",
"bytes": "552"
}
],
"symlink_target": ""
} |
class CreateOrganizationInvitations < ActiveRecord::Migration
def change
create_table :organization_invitations do |t|
t.belongs_to :organization, index: true
t.belongs_to :user, index: true
t.timestamps
end
end
end
| {
"content_hash": "2632bed46e6884f6fa06cdd8bb71cd81",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 61,
"avg_line_length": 24.7,
"alnum_prop": 0.708502024291498,
"repo_name": "CucumisSativus/galeopterus",
"id": "8b8bd8b015be7ced1903c8243402f13d1dc20377",
"size": "247",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "db/migrate/20141126202859_create_organization_invitations.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2699"
},
{
"name": "CoffeeScript",
"bytes": "63"
},
{
"name": "JavaScript",
"bytes": "710"
},
{
"name": "Ruby",
"bytes": "72739"
}
],
"symlink_target": ""
} |
#include <config/config_class.h>
#include <config/config_exporter.h>
#include <config/config_object.h>
bool
ConfigObject::activate(void) const
{
return (instance_->activate(this));
}
void
ConfigObject::marshall(ConfigExporter *exp) const
{
class_->marshall(exp, instance_);
}
bool
ConfigObject::set(const std::string& mname, const std::string& vstr)
{
return (class_->set(this, mname, vstr));
}
| {
"content_hash": "b585aa0c629d7e0ab9167c5c03395101",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 68,
"avg_line_length": 17.52173913043478,
"alnum_prop": 0.7220843672456576,
"repo_name": "diegows/wanproxy",
"id": "4811fc9c206ea301d517142cd8d4f05103547237",
"size": "1750",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "config/config_object.cc",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Assembly",
"bytes": "935775"
},
{
"name": "Awk",
"bytes": "120493"
},
{
"name": "C",
"bytes": "54615824"
},
{
"name": "C++",
"bytes": "1777707"
},
{
"name": "Objective-C",
"bytes": "75702"
},
{
"name": "Shell",
"bytes": "29861"
}
],
"symlink_target": ""
} |
#pragma once
#include <aws/ds/DirectoryService_EXPORTS.h>
#include <aws/ds/DirectoryServiceRequest.h>
#include <aws/core/utils/memory/stl/AWSString.h>
#include <aws/core/utils/memory/stl/AWSVector.h>
#include <utility>
namespace Aws
{
namespace DirectoryService
{
namespace Model
{
/**
*/
class AWS_DIRECTORYSERVICE_API RemoveIpRoutesRequest : public DirectoryServiceRequest
{
public:
RemoveIpRoutesRequest();
// Service request name is the Operation name which will send this request out,
// each operation should has unique request name, so that we can get operation's name from this request.
// Note: this is not true for response, multiple operations may have the same response name,
// so we can not get operation's name from response.
inline virtual const char* GetServiceRequestName() const override { return "RemoveIpRoutes"; }
Aws::String SerializePayload() const override;
Aws::Http::HeaderValueCollection GetRequestSpecificHeaders() const override;
/**
* <p>Identifier (ID) of the directory from which you want to remove the IP
* addresses.</p>
*/
inline const Aws::String& GetDirectoryId() const{ return m_directoryId; }
/**
* <p>Identifier (ID) of the directory from which you want to remove the IP
* addresses.</p>
*/
inline bool DirectoryIdHasBeenSet() const { return m_directoryIdHasBeenSet; }
/**
* <p>Identifier (ID) of the directory from which you want to remove the IP
* addresses.</p>
*/
inline void SetDirectoryId(const Aws::String& value) { m_directoryIdHasBeenSet = true; m_directoryId = value; }
/**
* <p>Identifier (ID) of the directory from which you want to remove the IP
* addresses.</p>
*/
inline void SetDirectoryId(Aws::String&& value) { m_directoryIdHasBeenSet = true; m_directoryId = std::move(value); }
/**
* <p>Identifier (ID) of the directory from which you want to remove the IP
* addresses.</p>
*/
inline void SetDirectoryId(const char* value) { m_directoryIdHasBeenSet = true; m_directoryId.assign(value); }
/**
* <p>Identifier (ID) of the directory from which you want to remove the IP
* addresses.</p>
*/
inline RemoveIpRoutesRequest& WithDirectoryId(const Aws::String& value) { SetDirectoryId(value); return *this;}
/**
* <p>Identifier (ID) of the directory from which you want to remove the IP
* addresses.</p>
*/
inline RemoveIpRoutesRequest& WithDirectoryId(Aws::String&& value) { SetDirectoryId(std::move(value)); return *this;}
/**
* <p>Identifier (ID) of the directory from which you want to remove the IP
* addresses.</p>
*/
inline RemoveIpRoutesRequest& WithDirectoryId(const char* value) { SetDirectoryId(value); return *this;}
/**
* <p>IP address blocks that you want to remove.</p>
*/
inline const Aws::Vector<Aws::String>& GetCidrIps() const{ return m_cidrIps; }
/**
* <p>IP address blocks that you want to remove.</p>
*/
inline bool CidrIpsHasBeenSet() const { return m_cidrIpsHasBeenSet; }
/**
* <p>IP address blocks that you want to remove.</p>
*/
inline void SetCidrIps(const Aws::Vector<Aws::String>& value) { m_cidrIpsHasBeenSet = true; m_cidrIps = value; }
/**
* <p>IP address blocks that you want to remove.</p>
*/
inline void SetCidrIps(Aws::Vector<Aws::String>&& value) { m_cidrIpsHasBeenSet = true; m_cidrIps = std::move(value); }
/**
* <p>IP address blocks that you want to remove.</p>
*/
inline RemoveIpRoutesRequest& WithCidrIps(const Aws::Vector<Aws::String>& value) { SetCidrIps(value); return *this;}
/**
* <p>IP address blocks that you want to remove.</p>
*/
inline RemoveIpRoutesRequest& WithCidrIps(Aws::Vector<Aws::String>&& value) { SetCidrIps(std::move(value)); return *this;}
/**
* <p>IP address blocks that you want to remove.</p>
*/
inline RemoveIpRoutesRequest& AddCidrIps(const Aws::String& value) { m_cidrIpsHasBeenSet = true; m_cidrIps.push_back(value); return *this; }
/**
* <p>IP address blocks that you want to remove.</p>
*/
inline RemoveIpRoutesRequest& AddCidrIps(Aws::String&& value) { m_cidrIpsHasBeenSet = true; m_cidrIps.push_back(std::move(value)); return *this; }
/**
* <p>IP address blocks that you want to remove.</p>
*/
inline RemoveIpRoutesRequest& AddCidrIps(const char* value) { m_cidrIpsHasBeenSet = true; m_cidrIps.push_back(value); return *this; }
private:
Aws::String m_directoryId;
bool m_directoryIdHasBeenSet;
Aws::Vector<Aws::String> m_cidrIps;
bool m_cidrIpsHasBeenSet;
};
} // namespace Model
} // namespace DirectoryService
} // namespace Aws
| {
"content_hash": "393dac07f6774a48dc8d57d17c66c5ec",
"timestamp": "",
"source": "github",
"line_count": 140,
"max_line_length": 150,
"avg_line_length": 34.26428571428571,
"alnum_prop": 0.6685428392745466,
"repo_name": "awslabs/aws-sdk-cpp",
"id": "a190a950b6c133760398372dccc0bf2b322af0b7",
"size": "4916",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "aws-cpp-sdk-ds/include/aws/ds/model/RemoveIpRoutesRequest.h",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "7596"
},
{
"name": "C++",
"bytes": "61740540"
},
{
"name": "CMake",
"bytes": "337520"
},
{
"name": "Java",
"bytes": "223122"
},
{
"name": "Python",
"bytes": "47357"
}
],
"symlink_target": ""
} |
<?php
namespace Railken\Lem\Tests\App\Repositories;
use Railken\Lem\Repository;
use Railken\Lem\Tests\App\Models\User;
class UserRepository extends Repository
{
/**
* return whatever or not the email is unique.
*
* @param string $email
* @param User $user
*
* @return bool
*/
public function isUniqueEmail($email, User $user)
{
return 0 == $this->getQuery()->where('email', $email)->where('id', '!=', $user->id)->count();
}
}
| {
"content_hash": "44abd2fb1c202d988b89d81d53a5718a",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 101,
"avg_line_length": 22.272727272727273,
"alnum_prop": 0.6081632653061224,
"repo_name": "railken/laravel-manager",
"id": "f4b8ece4409f4dfde813f3cf42615a0733ac8a0c",
"size": "490",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/App/Repositories/UserRepository.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PHP",
"bytes": "132564"
}
],
"symlink_target": ""
} |
Foundation is the most advanced responsive front-end framework in the world. Quickly go from prototype to production, building sites or apps that work on any kind of device with Foundation. Includes layout constructs, like a fully customizable, responsive grid, commonly used JavaScript plugins, and full A11Y support.
## Usage in Meteor
- [Scss guide](meteor-README.md/#scss-guide)
- [JavaScript guide](meteor-README.md/#javascript-guide)
## Scss Guide
### 1. Add the package
```
meteor add zurb:foundation-sites
```
### 2. In your main .scss file (in your app):
Import foundation:
```
@import '{zurb:foundation-sites}/scss/foundation';
```
Each component has an export mixin which prints out the CSS for that component. If you're cool with having everything, you just need one line of code:
```
@include foundation-everything;
```
Or you can comment out the components you don't need:
```
@import 'foundation';
// Global styles
@include foundation-global-styles;
@include foundation-forms;
@include foundation-typography;
// Grids (choose one)
@include foundation-xy-grid-classes;
// @include foundation-grid;
// @include foundation-flex-grid;
// Generic components
@include foundation-button;
@include foundation-button-group;
@include foundation-close-button;
@include foundation-label;
@include foundation-progress-bar;
@include foundation-slider;
@include foundation-switch;
@include foundation-table;
// Basic components
@include foundation-badge;
@include foundation-breadcrumbs;
@include foundation-callout;
@include foundation-card;
@include foundation-dropdown;
@include foundation-pagination;
@include foundation-tooltip;
// Containers
@include foundation-accordion;
@include foundation-media-object;
@include foundation-orbit;
@include foundation-responsive-embed;
@include foundation-tabs;
@include foundation-thumbnail;
// Menu-based containers
@include foundation-menu;
@include foundation-menu-icon;
@include foundation-accordion-menu;
@include foundation-drilldown-menu;
@include foundation-dropdown-menu;
// Layout components
@include foundation-off-canvas;
@include foundation-reveal;
@include foundation-sticky;
@include foundation-title-bar;
@include foundation-top-bar;
// Helpers
@include foundation-float-classes;
// @include foundation-flex-classes;
@include foundation-visibility-classes;
// @include foundation-prototype-classes;
```
Note: For now there is a Motion-UI library added in the package (css, js files). It is needed for some Foundation plugins. Maybe in the future it will be separated package.
### 3. Overwrite Foundation settings
If you want you can copy `_settings.scss` file into your project. You can change settings and import it in your main .scss file (in your app):
```
@import 'settings'; // example when the _settings.scss file is in the same folder as your main .scss file
@import '{zurb:foundation-sites}/scss/foundation';
@include foundation-everything; // or individual ones
```
**Important:** In the _settings.scss (the copied one in your app) you need to replace `@import 'util/util'` with `@import '{zurb:foundation-sites}/scss/util/util'`
## JavaScript Guide
You can use `$(document).foundation()` when you want to initialize some plugins in one Meteor Template. You could do something like:
```
Template.main.onRendered(function () {
$(document).foundation();
});
```
**But in Meteor it is better to have more control over it. So, you could use Foundation plugins API.**
Let's take a look at the example with the Reveal plugin.
#### HTML part
```html
<body>
{{> myReveal}}
</body>
```
```html
<template name="myReveal">
<p><a data-open="myReveal">Click me for a modal</a></p>
<div class="reveal" id="myReveal">
<h1>Awesome. I Have It.</h1>
<p class="lead">Your couch. It is mine.</p>
<p>I'm a cool paragraph that lives inside of an even cooler modal. Wins!</p>
<button class="close-button" data-close aria-label="Close reveal" type="button">
<span aria-hidden="true">×</span>
</button>
</div>
</template>
```
#### JavaScript part
```javascript
Template.myReveal.onRendered(function () {
this.myRevealInstance = new Foundation.Reveal($('#myReveal'));
});
Template.myReveal.onDestroyed(function () {
let reveal = this.myRevealInstance;
if (reveal) {
reveal.destroy();
}
});
```
As you can see it is better to create small templates for plugins and initiate the plugins separately in the `onRendered` lifecycle hook. You should also remember to destroy the plugin using `onDestroyed`lifecycle hook on its template.
You will find more info about particular plugins on its docs page here: [https://get.foundation/sites/docs/](https://get.foundation/sites/docs/)
#### Known problems
1. **Conflicts with Meteor events**.
Solution: Try to always wrap Foundation's DOM nodes into another ones in your Meteor templates. This applies only to nodes on which are initialized Foundation's JS plugins and which are the first nodes in the Meteor templates with attached custom Meteor events. For more details read the last comments here: [#7248](https://github.com/foundation/foundation-sites/issues/7248)
| {
"content_hash": "59a6169236a1df3b83e1bb3a79eb9d3e",
"timestamp": "",
"source": "github",
"line_count": 168,
"max_line_length": 375,
"avg_line_length": 30.636904761904763,
"alnum_prop": 0.7482028366038469,
"repo_name": "DaSchTour/foundation-sites",
"id": "822f25689979f5efba317da3073413aace61accd",
"size": "5206",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "meteor-README.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "753369"
},
{
"name": "JavaScript",
"bytes": "195750"
},
{
"name": "SCSS",
"bytes": "385849"
}
],
"symlink_target": ""
} |
#if !defined(BOOST_PP_IS_ITERATING)
// Copyright David Abrahams 2001.
// Distributed under the Boost Software License, Version 1.0. (See
// accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
# ifndef POINTER_HOLDER_DWA20011215_HPP
# define POINTER_HOLDER_DWA20011215_HPP
# include <boost/get_pointer.hpp>
# include <boost/type.hpp>
# include <boost/python/instance_holder.hpp>
# include <boost/python/object/inheritance_query.hpp>
# include <boost/python/object/forward.hpp>
# include <boost/python/pointee.hpp>
# include <boost/python/type_id.hpp>
# include <boost/python/detail/wrapper_base.hpp>
# include <boost/python/detail/force_instantiate.hpp>
# include <boost/python/detail/preprocessor.hpp>
# include <boost/python/detail/type_traits.hpp>
# include <boost/mpl/if.hpp>
# include <boost/mpl/apply.hpp>
# include <boost/preprocessor/comma_if.hpp>
# include <boost/preprocessor/iterate.hpp>
# include <boost/preprocessor/repeat.hpp>
# include <boost/preprocessor/debug/line.hpp>
# include <boost/preprocessor/enum_params.hpp>
# include <boost/preprocessor/repetition/enum_binary_params.hpp>
# include <boost/detail/workaround.hpp>
namespace boost { namespace python {
template <class T> class wrapper;
}}
namespace boost { namespace python { namespace objects {
#define BOOST_PYTHON_UNFORWARD_LOCAL(z, n, _) BOOST_PP_COMMA_IF(n) objects::do_unforward(a##n,0)
template <class Pointer, class Value>
struct pointer_holder : instance_holder
{
typedef Value value_type;
pointer_holder(Pointer);
// Forward construction to the held object
# define BOOST_PP_ITERATION_PARAMS_1 (4, (0, BOOST_PYTHON_MAX_ARITY, <boost/python/object/pointer_holder.hpp>, 1))
# include BOOST_PP_ITERATE()
private: // types
private: // required holder implementation
void* holds(type_info, bool null_ptr_only);
template <class T>
inline void* holds_wrapped(type_info dst_t, wrapper<T>*,T* p)
{
return python::type_id<T>() == dst_t ? p : 0;
}
inline void* holds_wrapped(type_info, ...)
{
return 0;
}
private: // data members
Pointer m_p;
};
template <class Pointer, class Value>
struct pointer_holder_back_reference : instance_holder
{
private:
typedef typename python::pointee<Pointer>::type held_type;
public:
typedef Value value_type;
// Not sure about this one -- can it work? The source object
// undoubtedly does not carry the correct back reference pointer.
pointer_holder_back_reference(Pointer);
// Forward construction to the held object
# define BOOST_PP_ITERATION_PARAMS_1 (4, (0, BOOST_PYTHON_MAX_ARITY, <boost/python/object/pointer_holder.hpp>, 2))
# include BOOST_PP_ITERATE()
private: // required holder implementation
void* holds(type_info, bool null_ptr_only);
private: // data members
Pointer m_p;
};
# undef BOOST_PYTHON_UNFORWARD_LOCAL
template <class Pointer, class Value>
inline pointer_holder<Pointer,Value>::pointer_holder(Pointer p)
#if __cplusplus < 201103L
: m_p(p)
#else
: m_p(std::move(p))
#endif
{
}
template <class Pointer, class Value>
inline pointer_holder_back_reference<Pointer,Value>::pointer_holder_back_reference(Pointer p)
#if __cplusplus < 201103L
: m_p(p)
#else
: m_p(std::move(p))
#endif
{
}
template <class Pointer, class Value>
void* pointer_holder<Pointer, Value>::holds(type_info dst_t, bool null_ptr_only)
{
typedef typename boost::python::detail::remove_const< Value >::type non_const_value;
if (dst_t == python::type_id<Pointer>()
&& !(null_ptr_only && get_pointer(this->m_p))
)
return &this->m_p;
Value* p0
# if BOOST_WORKAROUND(__SUNPRO_CC, BOOST_TESTED_AT(0x590))
= static_cast<Value*>( get_pointer(this->m_p) )
# else
= get_pointer(this->m_p)
# endif
;
non_const_value* p = const_cast<non_const_value*>( p0 );
if (p == 0)
return 0;
if (void* wrapped = holds_wrapped(dst_t, p, p))
return wrapped;
type_info src_t = python::type_id<non_const_value>();
return src_t == dst_t ? p : find_dynamic_type(p, src_t, dst_t);
}
template <class Pointer, class Value>
void* pointer_holder_back_reference<Pointer, Value>::holds(type_info dst_t, bool null_ptr_only)
{
if (dst_t == python::type_id<Pointer>()
&& !(null_ptr_only && get_pointer(this->m_p))
)
return &this->m_p;
if (!get_pointer(this->m_p))
return 0;
Value* p = get_pointer(m_p);
if (dst_t == python::type_id<held_type>())
return p;
type_info src_t = python::type_id<Value>();
return src_t == dst_t ? p : find_dynamic_type(p, src_t, dst_t);
}
}}} // namespace boost::python::objects
# endif // POINTER_HOLDER_DWA20011215_HPP
/* --------------- pointer_holder --------------- */
// For gcc 4.4 compatability, we must include the
// BOOST_PP_ITERATION_DEPTH test inside an #else clause.
#else // BOOST_PP_IS_ITERATING
#if BOOST_PP_ITERATION_DEPTH() == 1 && BOOST_PP_ITERATION_FLAGS() == 1
# if !(BOOST_WORKAROUND(__MWERKS__, > 0x3100) \
&& BOOST_WORKAROUND(__MWERKS__, BOOST_TESTED_AT(0x3201)))
# line BOOST_PP_LINE(__LINE__, pointer_holder.hpp)
# endif
# define N BOOST_PP_ITERATION()
# if (N != 0)
template< BOOST_PP_ENUM_PARAMS_Z(1, N, class A) >
# endif
pointer_holder(PyObject* self BOOST_PP_COMMA_IF(N) BOOST_PP_ENUM_BINARY_PARAMS_Z(1, N, A, a))
: m_p(new Value(
BOOST_PP_REPEAT_1ST(N, BOOST_PYTHON_UNFORWARD_LOCAL, nil)
))
{
python::detail::initialize_wrapper(self, get_pointer(this->m_p));
}
# undef N
/* --------------- pointer_holder_back_reference --------------- */
#elif BOOST_PP_ITERATION_DEPTH() == 1 && BOOST_PP_ITERATION_FLAGS() == 2
# if !(BOOST_WORKAROUND(__MWERKS__, > 0x3100) \
&& BOOST_WORKAROUND(__MWERKS__, BOOST_TESTED_AT(0x3201)))
# line BOOST_PP_LINE(__LINE__, pointer_holder.hpp(pointer_holder_back_reference))
# endif
# define N BOOST_PP_ITERATION()
# if (N != 0)
template < BOOST_PP_ENUM_PARAMS_Z(1, N, class A) >
# endif
pointer_holder_back_reference(
PyObject* p BOOST_PP_COMMA_IF(N) BOOST_PP_ENUM_BINARY_PARAMS_Z(1, N, A, a))
: m_p(new held_type(
p BOOST_PP_COMMA_IF(N) BOOST_PP_REPEAT_1ST(N, BOOST_PYTHON_UNFORWARD_LOCAL, nil)
))
{}
# undef N
#endif // BOOST_PP_ITERATION_DEPTH()
#endif
| {
"content_hash": "cd26276f13e0ee4353fd5d2e1fd2ef3b",
"timestamp": "",
"source": "github",
"line_count": 227,
"max_line_length": 115,
"avg_line_length": 28.629955947136565,
"alnum_prop": 0.6467148792121865,
"repo_name": "UCL-BLIC/legion-buildscripts",
"id": "c5caefe62ce76c5966c3c2bf7a9308219f01ecc6",
"size": "6499",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "cytofpipe/v1.2/Rlibs/BH/include/boost/python/object/pointer_holder.hpp",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "12560"
},
{
"name": "C",
"bytes": "3308085"
},
{
"name": "C++",
"bytes": "260929670"
},
{
"name": "CSS",
"bytes": "809845"
},
{
"name": "HTML",
"bytes": "41994240"
},
{
"name": "JavaScript",
"bytes": "1545980"
},
{
"name": "Jupyter Notebook",
"bytes": "3274374"
},
{
"name": "Lua",
"bytes": "38898"
},
{
"name": "M4",
"bytes": "374"
},
{
"name": "MATLAB",
"bytes": "454761"
},
{
"name": "Makefile",
"bytes": "5624"
},
{
"name": "Perl",
"bytes": "270283"
},
{
"name": "Python",
"bytes": "598"
},
{
"name": "R",
"bytes": "5472830"
},
{
"name": "Rebol",
"bytes": "6622"
},
{
"name": "Shell",
"bytes": "139698"
},
{
"name": "Tcl",
"bytes": "87998"
},
{
"name": "TeX",
"bytes": "450043"
}
],
"symlink_target": ""
} |
"""Support for UV data from openuv.io."""
import asyncio
from pyopenuv import Client
from pyopenuv.errors import OpenUvError
from homeassistant.const import (
ATTR_ATTRIBUTION,
CONF_API_KEY,
CONF_BINARY_SENSORS,
CONF_ELEVATION,
CONF_LATITUDE,
CONF_LONGITUDE,
CONF_SENSORS,
)
from homeassistant.core import callback
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import aiohttp_client
from homeassistant.helpers.dispatcher import (
async_dispatcher_connect,
async_dispatcher_send,
)
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.service import verify_domain_control
from .const import (
DATA_CLIENT,
DATA_LISTENER,
DATA_PROTECTION_WINDOW,
DATA_UV,
DOMAIN,
LOGGER,
)
DEFAULT_ATTRIBUTION = "Data provided by OpenUV"
NOTIFICATION_ID = "openuv_notification"
NOTIFICATION_TITLE = "OpenUV Component Setup"
TOPIC_UPDATE = f"{DOMAIN}_data_update"
PLATFORMS = ["binary_sensor", "sensor"]
async def async_setup(hass, config):
"""Set up the OpenUV component."""
hass.data[DOMAIN] = {DATA_CLIENT: {}, DATA_LISTENER: {}}
return True
async def async_setup_entry(hass, config_entry):
"""Set up OpenUV as config entry."""
_verify_domain_control = verify_domain_control(hass, DOMAIN)
try:
websession = aiohttp_client.async_get_clientsession(hass)
openuv = OpenUV(
Client(
config_entry.data[CONF_API_KEY],
config_entry.data.get(CONF_LATITUDE, hass.config.latitude),
config_entry.data.get(CONF_LONGITUDE, hass.config.longitude),
websession,
altitude=config_entry.data.get(CONF_ELEVATION, hass.config.elevation),
)
)
await openuv.async_update()
hass.data[DOMAIN][DATA_CLIENT][config_entry.entry_id] = openuv
except OpenUvError as err:
LOGGER.error("Config entry failed: %s", err)
raise ConfigEntryNotReady from err
hass.config_entries.async_setup_platforms(config_entry, PLATFORMS)
@_verify_domain_control
async def update_data(service):
"""Refresh all OpenUV data."""
LOGGER.debug("Refreshing all OpenUV data")
await openuv.async_update()
async_dispatcher_send(hass, TOPIC_UPDATE)
@_verify_domain_control
async def update_uv_index_data(service):
"""Refresh OpenUV UV index data."""
LOGGER.debug("Refreshing OpenUV UV index data")
await openuv.async_update_uv_index_data()
async_dispatcher_send(hass, TOPIC_UPDATE)
@_verify_domain_control
async def update_protection_data(service):
"""Refresh OpenUV protection window data."""
LOGGER.debug("Refreshing OpenUV protection window data")
await openuv.async_update_protection_data()
async_dispatcher_send(hass, TOPIC_UPDATE)
for service, method in [
("update_data", update_data),
("update_uv_index_data", update_uv_index_data),
("update_protection_data", update_protection_data),
]:
hass.services.async_register(DOMAIN, service, method)
return True
async def async_unload_entry(hass, config_entry):
"""Unload an OpenUV config entry."""
unload_ok = await hass.config_entries.async_unload_platforms(
config_entry, PLATFORMS
)
if unload_ok:
hass.data[DOMAIN][DATA_CLIENT].pop(config_entry.entry_id)
return unload_ok
async def async_migrate_entry(hass, config_entry):
"""Migrate the config entry upon new versions."""
version = config_entry.version
data = {**config_entry.data}
LOGGER.debug("Migrating from version %s", version)
# 1 -> 2: Remove unused condition data:
if version == 1:
data.pop(CONF_BINARY_SENSORS, None)
data.pop(CONF_SENSORS, None)
version = config_entry.version = 2
hass.config_entries.async_update_entry(config_entry, data=data)
LOGGER.debug("Migration to version %s successful", version)
return True
class OpenUV:
"""Define a generic OpenUV object."""
def __init__(self, client):
"""Initialize."""
self.client = client
self.data = {}
async def async_update_protection_data(self):
"""Update binary sensor (protection window) data."""
try:
resp = await self.client.uv_protection_window()
self.data[DATA_PROTECTION_WINDOW] = resp["result"]
except OpenUvError as err:
LOGGER.error("Error during protection data update: %s", err)
self.data[DATA_PROTECTION_WINDOW] = {}
async def async_update_uv_index_data(self):
"""Update sensor (uv index, etc) data."""
try:
data = await self.client.uv_index()
self.data[DATA_UV] = data
except OpenUvError as err:
LOGGER.error("Error during uv index data update: %s", err)
self.data[DATA_UV] = {}
async def async_update(self):
"""Update sensor/binary sensor data."""
tasks = [self.async_update_protection_data(), self.async_update_uv_index_data()]
await asyncio.gather(*tasks)
class OpenUvEntity(Entity):
"""Define a generic OpenUV entity."""
def __init__(self, openuv):
"""Initialize."""
self._attrs = {ATTR_ATTRIBUTION: DEFAULT_ATTRIBUTION}
self._available = True
self._name = None
self.openuv = openuv
@property
def available(self) -> bool:
"""Return True if entity is available."""
return self._available
@property
def extra_state_attributes(self):
"""Return the state attributes."""
return self._attrs
@property
def name(self):
"""Return the name of the entity."""
return self._name
async def async_added_to_hass(self):
"""Register callbacks."""
@callback
def update():
"""Update the state."""
self.update_from_latest_data()
self.async_write_ha_state()
self.async_on_remove(async_dispatcher_connect(self.hass, TOPIC_UPDATE, update))
self.update_from_latest_data()
def update_from_latest_data(self):
"""Update the sensor using the latest data."""
raise NotImplementedError
| {
"content_hash": "8219555dbbd60c1dee06a0652104cf62",
"timestamp": "",
"source": "github",
"line_count": 206,
"max_line_length": 88,
"avg_line_length": 30.631067961165048,
"alnum_prop": 0.6419968304278922,
"repo_name": "kennedyshead/home-assistant",
"id": "e1af166a3c2964cb8ed4a7eff5a6c8051cb09117",
"size": "6310",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "homeassistant/components/openuv/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1795"
},
{
"name": "Python",
"bytes": "33970989"
},
{
"name": "Shell",
"bytes": "4900"
}
],
"symlink_target": ""
} |
/**
* @file
*/
#include "src/statement/Assert.hpp"
#include "src/visitor/all.hpp"
birch::Assert::Assert(Expression* cond, Location* loc) :
Statement(loc),
Conditioned(cond) {
//
}
void birch::Assert::accept(Visitor* visitor) const {
visitor->visit(this);
}
| {
"content_hash": "99d830ee138effc7a9965b1d171f1cf8",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 56,
"avg_line_length": 17.125,
"alnum_prop": 0.656934306569343,
"repo_name": "lawmurray/Birch",
"id": "80c8665ba77714887d9f1450b230c5b84fc3f0da",
"size": "274",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "birch/src/statement/Assert.cpp",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "757466"
},
{
"name": "CSS",
"bytes": "444"
},
{
"name": "Dockerfile",
"bytes": "3285"
},
{
"name": "HTML",
"bytes": "3657"
},
{
"name": "M4",
"bytes": "118062"
},
{
"name": "Makefile",
"bytes": "15786"
},
{
"name": "Shell",
"bytes": "3405"
}
],
"symlink_target": ""
} |
namespace llvm {
class Module;
class Function;
class BasicBlock;
class ProfileInfoLoader {
const std::string &Filename;
Module &M;
std::vector<std::string> CommandLines;
std::vector<unsigned> FunctionCounts;
std::vector<unsigned> BlockCounts;
std::vector<unsigned> EdgeCounts;
std::vector<unsigned> OptimalEdgeCounts;
std::vector<unsigned> BBTrace;
bool Warned;
public:
// ProfileInfoLoader ctor - Read the specified profiling data file, exiting
// the program if the file is invalid or broken.
ProfileInfoLoader(const char *ToolName, const std::string &Filename,
Module &M);
static const unsigned Uncounted;
unsigned getNumExecutions() const { return CommandLines.size(); }
const std::string &getExecution(unsigned i) const { return CommandLines[i]; }
const std::string &getFileName() const { return Filename; }
// getRawFunctionCounts - This method is used by consumers of function
// counting information.
//
const std::vector<unsigned> &getRawFunctionCounts() const {
return FunctionCounts;
}
// getRawBlockCounts - This method is used by consumers of block counting
// information.
//
const std::vector<unsigned> &getRawBlockCounts() const {
return BlockCounts;
}
// getEdgeCounts - This method is used by consumers of edge counting
// information.
//
const std::vector<unsigned> &getRawEdgeCounts() const {
return EdgeCounts;
}
// getEdgeOptimalCounts - This method is used by consumers of optimal edge
// counting information.
//
const std::vector<unsigned> &getRawOptimalEdgeCounts() const {
return OptimalEdgeCounts;
}
};
} // End llvm namespace
#endif
| {
"content_hash": "767d5df4cf8d5222b49bbbe383fd7740",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 79,
"avg_line_length": 27.387096774193548,
"alnum_prop": 0.7102473498233216,
"repo_name": "ssaroha/node-webrtc",
"id": "9e0c393c428fbc873bc2e8991557bb9881abe172",
"size": "2451",
"binary": false,
"copies": "34",
"ref": "refs/heads/develop",
"path": "third_party/webrtc/include/chromium/src/third_party/swiftshader/third_party/LLVM/include/llvm/Analysis/ProfileInfoLoader.h",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Batchfile",
"bytes": "6179"
},
{
"name": "C",
"bytes": "2679"
},
{
"name": "C++",
"bytes": "54327"
},
{
"name": "HTML",
"bytes": "434"
},
{
"name": "JavaScript",
"bytes": "42707"
},
{
"name": "Python",
"bytes": "3835"
}
],
"symlink_target": ""
} |
<!DOCTYPE HTML>
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (9-ea) on Sun Oct 30 18:56:31 UTC 2016 -->
<title>EXTABGR (LWJGL 3.1.0 - OpenGL)</title>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<meta name="dc.created" content="2016-10-30">
<link rel="stylesheet" type="text/css" href="../../../javadoc.css" title="Style">
<link rel="stylesheet" type="text/css" href="../../../jquery/jquery-ui.css" title="Style">
<script type="text/javascript" src="../../../script.js"></script>
<script type="text/javascript" src="../../../jquery/jszip/dist/jszip.min.js"></script>
<script type="text/javascript" src="../../../jquery/jszip-utils/dist/jszip-utils.min.js"></script>
<!--[if IE]>
<script type="text/javascript" src="../../../jquery/jszip-utils/dist/jszip-utils-ie.min.js"></script>
<![endif]-->
<script type="text/javascript" src="../../../jquery/jquery-1.10.2.js"></script>
<script type="text/javascript" src="../../../jquery/jquery-ui.js"></script>
</head>
<body>
<script type="text/javascript"><!--
try {
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="EXTABGR (LWJGL 3.1.0 - OpenGL)";
}
}
catch(err) {
}
//-->
var pathtoroot = "../../../";loadScripts(document, 'script');</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<header role="banner">
<nav role="navigation">
<div class="fixedNav">
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a id="navbar.top">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
<a id="navbar.top.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../org/lwjgl/opengl/package-summary.html">Package</a></li>
<li class="navBarCell1Rev">Class</li>
<li><a href="../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../index-all.html">Index</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="../../../org/lwjgl/opengl/EXT422Pixels.html" title="class in org.lwjgl.opengl"><span class="typeNameLink">Prev Class</span></a></li>
<li><a href="../../../org/lwjgl/opengl/EXTBGRA.html" title="class in org.lwjgl.opengl"><span class="typeNameLink">Next Class</span></a></li>
</ul>
<ul class="navList">
<li><a href="../../../index.html?org/lwjgl/opengl/EXTABGR.html" target="_top">Frames</a></li>
<li><a href="EXTABGR.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<ul class="navListSearch">
<li><span>SEARCH: </span>
<input type="text" id="search" value=" " disabled="disabled">
<input type="reset" id="reset" value=" " disabled="disabled">
</li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
</div>
<div>
<ul class="subNavList">
<li>Summary: </li>
<li>Nested | </li>
<li><a href="#field.summary">Field</a> | </li>
<li>Constr | </li>
<li><a href="#methods.inherited.from.class.java.lang.Object">Method</a></li>
</ul>
<ul class="subNavList">
<li>Detail: </li>
<li><a href="#field.detail">Field</a> | </li>
<li>Constr | </li>
<li>Method</li>
</ul>
</div>
<a id="skip.navbar.top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
</div>
<div class="navPadding"> </div>
</nav>
</header>
<!-- ======== START OF CLASS DATA ======== -->
<main role="main">
<div class="header">
<div class="subTitle"><span class="packageLabelInClass">Package</span> <a href="../../../org/lwjgl/opengl/package-summary.html" target="classFrame">org.lwjgl.opengl</a></div>
<h2 title="Class EXTABGR" class="title">Class EXTABGR</h2>
</div>
<div class="contentContainer">
<ul class="inheritance">
<li>java.lang.Object</li>
<li>
<ul class="inheritance">
<li>org.lwjgl.opengl.EXTABGR</li>
</ul>
</li>
</ul>
<div class="description">
<ul class="blockList">
<li class="blockList">
<hr>
<br>
<pre>public final class <span class="typeNameLabel">EXTABGR</span>
extends java.lang.Object</pre>
<div class="block">Native bindings to the <a href="http://www.opengl.org/registry/specs/EXT/abgr.txt">EXT_abgr</a> extension.
<p>EXT_abgr extends the list of host-memory color formats. Specifically, it provides a reverse-order alternative to image format RGBA. The ABGR component
order matches the cpack Iris GL format on big-endian machines.</p></div>
</li>
</ul>
</div>
<div class="summary">
<ul class="blockList">
<li class="blockList">
<!-- =========== FIELD SUMMARY =========== -->
<section role="region">
<ul class="blockList">
<li class="blockList"><a id="field.summary">
<!-- -->
</a>
<h3>Field Summary</h3>
<table class="memberSummary">
<caption><span>Fields</span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Field and Description</th>
</tr>
<tr class="altColor">
<td class="colFirst"><code>static int</code></td>
<td class="colLast"><code><span class="memberNameLink"><a href="../../../org/lwjgl/opengl/EXTABGR.html#GL_ABGR_EXT">GL_ABGR_EXT</a></span></code>
<div class="block">Accepted by the <code>format</code> parameter of DrawPixels, GetTexImage, ReadPixels, TexImage1D, and TexImage2D.</div>
</td>
</tr>
</table>
</li>
</ul>
</section>
<!-- ========== METHOD SUMMARY =========== -->
<section role="region">
<ul class="blockList">
<li class="blockList"><a id="method.summary">
<!-- -->
</a>
<h3>Method Summary</h3>
<ul class="blockList">
<li class="blockList"><a id="methods.inherited.from.class.java.lang.Object">
<!-- -->
</a>
<h3>Methods inherited from class java.lang.Object</h3>
<code>equals, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait</code></li>
</ul>
</li>
</ul>
</section>
</li>
</ul>
</div>
<div class="details">
<ul class="blockList">
<li class="blockList">
<!-- ============ FIELD DETAIL =========== -->
<section role="region">
<ul class="blockList">
<li class="blockList"><a id="field.detail">
<!-- -->
</a>
<h3>Field Detail</h3>
<a id="GL_ABGR_EXT">
<!-- -->
</a>
<ul class="blockListLast">
<li class="blockList">
<h4>GL_ABGR_EXT</h4>
<pre>public static final int GL_ABGR_EXT</pre>
<div class="block">Accepted by the <code>format</code> parameter of DrawPixels, GetTexImage, ReadPixels, TexImage1D, and TexImage2D.</div>
<dl>
<dt><span class="seeLabel">See Also:</span></dt>
<dd><a href="../../../constant-values.html#org.lwjgl.opengl.EXTABGR.GL_ABGR_EXT">Constant Field Values</a></dd>
</dl>
</li>
</ul>
</li>
</ul>
</section>
</li>
</ul>
</div>
</div>
</main>
<!-- ========= END OF CLASS DATA ========= -->
<footer role="contentinfo">
<nav role="navigation">
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a id="navbar.bottom">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
<a id="navbar.bottom.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../org/lwjgl/opengl/package-summary.html">Package</a></li>
<li class="navBarCell1Rev">Class</li>
<li><a href="../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../index-all.html">Index</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="../../../org/lwjgl/opengl/EXT422Pixels.html" title="class in org.lwjgl.opengl"><span class="typeNameLink">Prev Class</span></a></li>
<li><a href="../../../org/lwjgl/opengl/EXTBGRA.html" title="class in org.lwjgl.opengl"><span class="typeNameLink">Next Class</span></a></li>
</ul>
<ul class="navList">
<li><a href="../../../index.html?org/lwjgl/opengl/EXTABGR.html" target="_top">Frames</a></li>
<li><a href="EXTABGR.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
</div>
<div>
<ul class="subNavList">
<li>Summary: </li>
<li>Nested | </li>
<li><a href="#field.summary">Field</a> | </li>
<li>Constr | </li>
<li><a href="#methods.inherited.from.class.java.lang.Object">Method</a></li>
</ul>
<ul class="subNavList">
<li>Detail: </li>
<li><a href="#field.detail">Field</a> | </li>
<li>Constr | </li>
<li>Method</li>
</ul>
</div>
<a id="skip.navbar.bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
</nav>
<p class="legalCopy"><small><i>Copyright LWJGL. All Rights Reserved. <a href="https://www.lwjgl.org/license">License terms</a>.</i></small></p>
</footer>
</body>
</html>
| {
"content_hash": "bdc97fdaed35ab448c15233e20101782",
"timestamp": "",
"source": "github",
"line_count": 283,
"max_line_length": 179,
"avg_line_length": 32.840989399293285,
"alnum_prop": 0.6370776845276522,
"repo_name": "VirtualGamer/SnowEngine",
"id": "bccbe1f3197323f03f9978c8be5f1c0bf579a244",
"size": "9294",
"binary": false,
"copies": "1",
"ref": "refs/heads/Development",
"path": "Dependencies/opengl/docs/javadoc/org/lwjgl/opengl/EXTABGR.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "GLSL",
"bytes": "3506"
},
{
"name": "Java",
"bytes": "229846"
}
],
"symlink_target": ""
} |
package org.keycloak.testsuite.events;
import org.junit.After;
import org.junit.Assert;
import org.junit.Test;
import org.keycloak.events.admin.OperationType;
import org.keycloak.representations.idm.AdminEventRepresentation;
import org.keycloak.representations.idm.AuthDetailsRepresentation;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
/**
* @author <a href="mailto:[email protected]">Giriraj Sharma</a>
* @author Stan Silvert [email protected] (C) 2016 Red Hat Inc.
*/
public class AdminEventStoreProviderTest extends AbstractEventsTest {
@After
public void after() {
testing().clearAdminEventStore();
}
@Test
public void save() {
testing().onAdminEvent(create("realmId", OperationType.CREATE, "realmId", "clientId", "userId", "127.0.0.1", "/admin/realms/master", "error"), false);
}
@Test
public void query() {
long oldest = System.currentTimeMillis() - 30000;
long newest = System.currentTimeMillis() + 30000;
testing().onAdminEvent(create("realmId", OperationType.CREATE, "realmId", "clientId", "userId", "127.0.0.1", "/admin/realms/master", "error"), false);
testing().onAdminEvent(create(newest, "realmId", OperationType.ACTION, "realmId", "clientId", "userId", "127.0.0.1", "/admin/realms/master", "error"), false);
testing().onAdminEvent(create(newest, "realmId", OperationType.ACTION, "realmId", "clientId", "userId2", "127.0.0.1", "/admin/realms/master", "error"), false);
testing().onAdminEvent(create("realmId2", OperationType.CREATE, "realmId2", "clientId", "userId", "127.0.0.1", "/admin/realms/master", "error"), false);
testing().onAdminEvent(create(oldest, "realmId", OperationType.CREATE, "realmId", "clientId2", "userId", "127.0.0.1", "/admin/realms/master", "error"), false);
testing().onAdminEvent(create("realmId", OperationType.CREATE, "realmId", "clientId", "userId2", "127.0.0.1", "/admin/realms/master", "error"), false);
Assert.assertEquals(5, testing().getAdminEvents(null, null, null, "clientId", null, null, null, null, null, null, null).size());
Assert.assertEquals(5, testing().getAdminEvents(null, null, "realmId", null, null, null, null, null, null, null, null).size());
Assert.assertEquals(4, testing().getAdminEvents(null, toList(OperationType.CREATE), null, null, null, null, null, null, null, null, null).size());
Assert.assertEquals(6, testing().getAdminEvents(null, toList(OperationType.CREATE, OperationType.ACTION), null, null, null, null, null, null, null, null, null).size());
Assert.assertEquals(4, testing().getAdminEvents(null, null, null, null, "userId", null, null, null, null, null, null).size());
Assert.assertEquals(1, testing().getAdminEvents(null, toList(OperationType.ACTION), null, null, "userId", null, null, null, null, null, null).size());
Assert.assertEquals(2, testing().getAdminEvents(null, null, null, null, null, null, null, null, null, null, 2).size());
Assert.assertEquals(1, testing().getAdminEvents(null, null, null, null, null, null, null, null, null, 5, null).size());
Assert.assertEquals(newest, testing().getAdminEvents(null, null, null, null, null, null, null, null, null, null, 1).get(0).getTime());
Assert.assertEquals(oldest, testing().getAdminEvents(null, null, null, null, null, null, null, null, null, 5, 1).get(0).getTime());
testing().clearAdminEventStore("realmId");
testing().clearAdminEventStore("realmId2");
Assert.assertEquals(0, testing().getAdminEvents(null, null, null, null, null, null, null, null, null, null, null).size());
String d1 = new String("2015-03-04");
String d2 = new String("2015-03-05");
String d3 = new String("2015-03-06");
String d4 = new String("2015-03-07");
String d5 = new String("2015-03-01");
String d6 = new String("2015-03-03");
String d7 = new String("2015-03-08");
String d8 = new String("2015-03-10");
SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
Date date1 = null, date2 = null, date3 = null, date4 = null;
try {
date1 = formatter.parse(d1);
date2 = formatter.parse(d2);
date3 = formatter.parse(d3);
date4 = formatter.parse(d4);
} catch (ParseException e) {
e.printStackTrace();
}
testing().onAdminEvent(create(date1, "realmId", OperationType.CREATE, "realmId", "clientId", "userId", "127.0.0.1", "/admin/realms/master", "error"), false);
testing().onAdminEvent(create(date1, "realmId", OperationType.CREATE, "realmId", "clientId", "userId", "127.0.0.1", "/admin/realms/master", "error"), false);
testing().onAdminEvent(create(date2, "realmId", OperationType.ACTION, "realmId", "clientId", "userId", "127.0.0.1", "/admin/realms/master", "error"), false);
testing().onAdminEvent(create(date2, "realmId", OperationType.ACTION, "realmId", "clientId", "userId", "127.0.0.1", "/admin/realms/master", "error"), false);
testing().onAdminEvent(create(date3, "realmId", OperationType.UPDATE, "realmId", "clientId", "userId2", "127.0.0.1", "/admin/realms/master", "error"), false);
testing().onAdminEvent(create(date3, "realmId", OperationType.DELETE, "realmId", "clientId", "userId2", "127.0.0.1", "/admin/realms/master", "error"), false);
testing().onAdminEvent(create(date4, "realmId2", OperationType.CREATE, "realmId2", "clientId2", "userId2", "127.0.0.1", "/admin/realms/master", "error"), false);
testing().onAdminEvent(create(date4, "realmId2", OperationType.CREATE, "realmId2", "clientId2", "userId2", "127.0.0.1", "/admin/realms/master", "error"), false);
Assert.assertEquals(6, testing().getAdminEvents(null, null, null, "clientId", null, null, null, null, null, null, null).size());
Assert.assertEquals(2, testing().getAdminEvents(null, null, null, "clientId2", null, null, null, null, null, null, null).size());
Assert.assertEquals(6, testing().getAdminEvents(null, null, "realmId", null, null, null, null, null, null, null, null).size());
Assert.assertEquals(2, testing().getAdminEvents(null, null, "realmId2", null, null, null, null, null, null, null, null).size());
Assert.assertEquals(4, testing().getAdminEvents(null, null, null, null, "userId", null, null, null, null, null, null).size());
Assert.assertEquals(4, testing().getAdminEvents(null, null, null, null, "userId2", null, null, null, null, null, null).size());
Assert.assertEquals(2, testing().getAdminEvents(null, toList(OperationType.ACTION), null, null, null, null, null, null, null, null, null).size());
Assert.assertEquals(6, testing().getAdminEvents(null, toList(OperationType.CREATE, OperationType.ACTION), null, null, null, null, null, null, null, null, null).size());
Assert.assertEquals(1, testing().getAdminEvents(null, toList(OperationType.UPDATE), null, null, null, null, null, null, null, null, null).size());
Assert.assertEquals(1, testing().getAdminEvents(null, toList(OperationType.DELETE), null, null, null, null, null, null, null, null, null).size());
Assert.assertEquals(4, testing().getAdminEvents(null, toList(OperationType.CREATE), null, null, null, null, null, null, null, null, null).size());
Assert.assertEquals(8, testing().getAdminEvents(null, null, null, null, null, null, null, d1, null, null, null).size());
Assert.assertEquals(8, testing().getAdminEvents(null, null, null, null, null, null, null, null, d4, null, null).size());
Assert.assertEquals(4, testing().getAdminEvents(null, null, null, null, null, null, null, d3, null, null, null).size());
Assert.assertEquals(4, testing().getAdminEvents(null, null, null, null, null, null, null, null, d2, null, null).size());
Assert.assertEquals(0, testing().getAdminEvents(null, null, null, null, null, null, null, d7, null, null, null).size());
Assert.assertEquals(0, testing().getAdminEvents(null, null, null, null, null, null, null, null, d6, null, null).size());
Assert.assertEquals(8, testing().getAdminEvents(null, null, null, null, null, null, null, d1, d4, null, null).size());
Assert.assertEquals(6, testing().getAdminEvents(null, null, null, null, null, null, null, d2, d4, null, null).size());
Assert.assertEquals(4, testing().getAdminEvents(null, null, null, null, null, null, null, d1, d2, null, null).size());
Assert.assertEquals(4, testing().getAdminEvents(null, null, null, null, null, null, null, d3, d4, null, null).size());
Assert.assertEquals(0, testing().getAdminEvents(null, null, null, null, null, null, null, d5, d6, null, null).size());
Assert.assertEquals(0, testing().getAdminEvents(null, null, null, null, null, null, null, d7, d8, null, null).size());
}
@Test
public void queryResourcePath() {
long oldest = System.currentTimeMillis() - 30000;
long newest = System.currentTimeMillis() + 30000;
testing().onAdminEvent(create("realmId", OperationType.CREATE, "realmId", "clientId", "userId", "127.0.0.1", "/admin/realms/master", "error"), false);
testing().onAdminEvent(create(newest, "realmId", OperationType.ACTION, "realmId", "clientId", "userId", "127.0.0.1", "/admin/realms/master", "error"), false);
testing().onAdminEvent(create(newest, "realmId", OperationType.ACTION, "realmId", "clientId", "userId2", "127.0.0.1", "/admin/realms/master", "error"), false);
testing().onAdminEvent(create("realmId2", OperationType.CREATE, "realmId2", "clientId", "userId", "127.0.0.1", "/admin/realms/master", "error"), false);
testing().onAdminEvent(create(oldest, "realmId", OperationType.CREATE, "realmId", "clientId2", "userId", "127.0.0.1", "/admin/realms/master", "error"), false);
testing().onAdminEvent(create("realmId", OperationType.CREATE, "realmId", "clientId", "userId2", "127.0.0.1", "/admin/realms/master", "error"), false);
Assert.assertEquals(6, testing().getAdminEvents(null, null, null, null, null, null, "/admin/*", null, null, null, null).size());
Assert.assertEquals(6, testing().getAdminEvents(null, null, null, null, null, null, "*/realms/*", null, null, null, null).size());
Assert.assertEquals(6, testing().getAdminEvents(null, null, null, null, null, null, "*/master", null, null, null, null).size());
Assert.assertEquals(6, testing().getAdminEvents(null, null, null, null, null, null, "/admin/realms/*", null, null, null, null).size());
Assert.assertEquals(6, testing().getAdminEvents(null, null, null, null, null, null, "*/realms/master", null, null, null, null).size());
Assert.assertEquals(6, testing().getAdminEvents(null, null, null, null, null, null, "/admin/*/master", null, null, null, null).size());
Assert.assertEquals(6, testing().getAdminEvents(null, null, null, null, null, null, "/ad*/*/master", null, null, null, null).size());
}
@Test
public void clear() {
testing().onAdminEvent(create(System.currentTimeMillis() - 30000, "realmId", OperationType.CREATE, "realmId", "clientId", "userId", "127.0.0.1", "/admin/realms/master", "error"), false);
testing().onAdminEvent(create(System.currentTimeMillis() - 20000, "realmId", OperationType.CREATE, "realmId", "clientId", "userId", "127.0.0.1", "/admin/realms/master", "error"), false);
testing().onAdminEvent(create(System.currentTimeMillis(), "realmId", OperationType.CREATE, "realmId", "clientId", "userId", "127.0.0.1", "/admin/realms/master", "error"), false);
testing().onAdminEvent(create(System.currentTimeMillis(), "realmId", OperationType.CREATE, "realmId", "clientId", "userId", "127.0.0.1", "/admin/realms/master", "error"), false);
testing().onAdminEvent(create(System.currentTimeMillis() - 30000, "realmId2", OperationType.CREATE, "realmId2", "clientId", "userId", "127.0.0.1", "/admin/realms/master", "error"), false);
testing().clearAdminEventStore("realmId");
Assert.assertEquals(1, testing().getAdminEvents(null, null, null, null, null, null, null, null, null, null, null).size());
}
@Test
public void clearOld() {
testing().onAdminEvent(create(System.currentTimeMillis() - 30000, "realmId", OperationType.CREATE, "realmId", "clientId", "userId", "127.0.0.1", "/admin/realms/master", "error"), false);
testing().onAdminEvent(create(System.currentTimeMillis() - 20000, "realmId", OperationType.CREATE, "realmId", "clientId", "userId", "127.0.0.1", "/admin/realms/master", "error"), false);
testing().onAdminEvent(create(System.currentTimeMillis(), "realmId", OperationType.CREATE, "realmId", "clientId", "userId", "127.0.0.1", "/admin/realms/master", "error"), false);
testing().onAdminEvent(create(System.currentTimeMillis(), "realmId", OperationType.CREATE, "realmId", "clientId", "userId", "127.0.0.1", "/admin/realms/master", "error"), false);
testing().onAdminEvent(create(System.currentTimeMillis() - 30000, "realmId", OperationType.CREATE, "realmId", "clientId", "userId", "127.0.0.1", "/admin/realms/master", "error"), false);
testing().clearAdminEventStore("realmId", System.currentTimeMillis() - 10000);
Assert.assertEquals(2, testing().getAdminEvents(null, null, null, null, null, null, null, null, null, null, null).size());
}
private AdminEventRepresentation create(String realmId, OperationType operation, String authRealmId, String authClientId, String authUserId, String authIpAddress, String resourcePath, String error) {
return create(System.currentTimeMillis(), realmId, operation, authRealmId, authClientId, authUserId, authIpAddress, resourcePath, error);
}
private AdminEventRepresentation create(Date date, String realmId, OperationType operation, String authRealmId, String authClientId, String authUserId, String authIpAddress, String resourcePath, String error) {
return create(date.getTime(), realmId, operation, authRealmId, authClientId, authUserId, authIpAddress, resourcePath, error);
}
private AdminEventRepresentation create(long time, String realmId, OperationType operation, String authRealmId, String authClientId, String authUserId, String authIpAddress, String resourcePath, String error) {
AdminEventRepresentation e = new AdminEventRepresentation();
e.setTime(time);
e.setRealmId(realmId);
e.setOperationType(operation.toString());
AuthDetailsRepresentation authDetails = new AuthDetailsRepresentation();
authDetails.setRealmId(authRealmId);
authDetails.setClientId(authClientId);
authDetails.setUserId(authUserId);
authDetails.setIpAddress(authIpAddress);
e.setAuthDetails(authDetails);
e.setResourcePath(resourcePath);
e.setError(error);
return e;
}
}
| {
"content_hash": "5902b74f105a2533046fcb0bc869f383",
"timestamp": "",
"source": "github",
"line_count": 200,
"max_line_length": 214,
"avg_line_length": 75.235,
"alnum_prop": 0.6745530670565562,
"repo_name": "agolPL/keycloak",
"id": "0db44be9582e231e79619c1988afd1e2d9d7bdbc",
"size": "15721",
"binary": false,
"copies": "22",
"ref": "refs/heads/master",
"path": "testsuite/integration-arquillian/tests/base/src/test/java/org/keycloak/testsuite/events/AdminEventStoreProviderTest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "AMPL",
"bytes": "3104"
},
{
"name": "ApacheConf",
"bytes": "22819"
},
{
"name": "Batchfile",
"bytes": "2114"
},
{
"name": "CSS",
"bytes": "345395"
},
{
"name": "FreeMarker",
"bytes": "63757"
},
{
"name": "HTML",
"bytes": "452064"
},
{
"name": "Java",
"bytes": "10983060"
},
{
"name": "JavaScript",
"bytes": "737973"
},
{
"name": "Shell",
"bytes": "11085"
},
{
"name": "XSLT",
"bytes": "116830"
}
],
"symlink_target": ""
} |
using System.Net.Sockets;
using Kyru.Core;
using ProtoBuf;
namespace Kyru.Network.TcpMessages.ServerState
{
internal sealed class GetObjectState : IServerState
{
private readonly NetworkStream stream;
private readonly KyruApplication app;
private readonly GetObjectRequest getObjectRequest;
internal GetObjectState(NetworkStream stream, KyruApplication app, GetObjectRequest getObjectRequest)
{
this.stream = stream;
this.app = app;
this.getObjectRequest = getObjectRequest;
}
public IServerState Process()
{
var bytes = app.LocalObjectStorage.GetBytes(getObjectRequest.ObjectId);
var response = new GetObjectResponse();
if (bytes == null)
{
response.Error = Error.NotFound;
Serializer.SerializeWithLengthPrefix(stream, response, PrefixStyle.Base128);
return null;
}
response.Error = Error.Success;
response.Length = (uint) bytes.Length;
Serializer.SerializeWithLengthPrefix(stream, response, PrefixStyle.Base128);
stream.Write(bytes, 0, bytes.Length);
return null;
}
}
} | {
"content_hash": "f93d641f2e853182659e5b0115754928",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 103,
"avg_line_length": 26,
"alnum_prop": 0.7243589743589743,
"repo_name": "zr40/kyru-dotnet",
"id": "073ec880dca94a21cabc1dc021c17f5efd481d0e",
"size": "1092",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Kyru/Network/TcpMessages/ServerState/GetObjectState.cs",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C#",
"bytes": "143850"
},
{
"name": "JavaScript",
"bytes": "6632"
}
],
"symlink_target": ""
} |
namespace $ {
export function $mol_guard_defined<T>(value: T): value is NonNullable<T> {
return value !== null && value !== undefined
}
}
| {
"content_hash": "64df2bd41ba626fcd812eecb1ac38bea",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 75,
"avg_line_length": 28.4,
"alnum_prop": 0.6549295774647887,
"repo_name": "eigenmethod/mol",
"id": "38a587e990df59b0070c7045d40cea0391107f31",
"size": "142",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "guard/defined.ts",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "43617"
},
{
"name": "HTML",
"bytes": "11349"
},
{
"name": "JavaScript",
"bytes": "12995"
},
{
"name": "TypeScript",
"bytes": "600477"
}
],
"symlink_target": ""
} |
Some configurations are needed by the SDK and are independent of bot type (i.e.
Hubot, Botkit, Rasa etc).
At the time of this writing only the Hubot adapter is production ready, but more
are in development.
In local development, the following can be set in an `.env` file. In production
they would need to be set on server startup.
`*` Required settings
Environment Variable | Description
:---- | :----
`ROCKETCHAT_URL` * | URL of the Rocket.Chat instance to connect to. Can be specified as `host:port`, `http://host:port` or `https://host:port`.
`ROCKETCHAT_USE_SSL` | Force bot to connect with SSL. If unset, it will try and detect from URL protocol.
`ROCKETCHAT_AUTH` | Default is `password`. Set to 'ldap' to enable LDAP login for bot users.
`ROCKETCHAT_USER` * | The bot's username (account name users will summon the bot with). Must be registered on your Rocket.Chat server and granted `bot` role.
`ROCKETCHAT_PASSWORD` *| The bot user's password.
`LISTEN_ON_ALL_PUBLIC` | Stream callbacks receive messages from all public channels (true/false). Defaults to `false` (only rooms the bot has joined).
`ROCKETCHAT_ROOM` | Stream callbacks receive messages from these channel name/s. Defaults to `GENERAL`. Accepts comma separated list.
`RESPOND_TO_LIVECHAT` | Stream callbacks receive messages from Livechat (true/false). Defaults to `false`.
`RESPOND_TO_DM` | Stream callbacks receive DMs with bot (true/false). Defaults to `false`.
`RESPOND_TO_EDITED` | Stream callbacks receive edited messages (true/false). Defaults to `false`.
**SDK Development** |
`ROOM_CACHE_SIZE` | Size of cache (LRU) for room (ID or name) lookups.
`ROOM_CACHE_MAX_AGE` | Max age of cache for room lookups.
`DM_ROOM_CACHE_SIZE` | Size of cache for Direct Message room lookups.
`DM_ROOM_CACHE_MAX_AGE`| Max age of cache for DM lookups.
`INTEGRATION_ID` | ID applied to message object to integration source. Defaults to `js.SDK`
`ADMIN_USERNAME` | Admin user password for API calls used in SDK tests.
`ADMIN_PASS` | Admin user password for API calls used in SDK tests.
**Hubot Specific** |
`HUBOT_NAME` | Name of the bot. Hubot listeners can respond to this.
`HUBOT_ALIAS` | Another name to respond to. If unset, the adapter sets the `ROCKETCHAT_USER` as an alias, to ensure bots respond when addressed using their username.
`EXTERNAL_SCRIPTS` | Hubot scripts to require as NPM modules. Used only in some Docker instances.
`HUBOT_LOG_LEVEL` | `debug`, `info`, `warning` or `error`. Default `info`.
## Common configuration
It is common to set up a bot to listen and respond to direct messages and all
new public channels and private groups. Use the following options:
- `LISTEN_ON_ALL_PUBLIC=true`
- `ROCKETCHAT_ROOM=''`
- `RESPOND_TO_DM=true`
Be aware you *must* add the bot's user as a member of the new private group(s)
before it will respond.
## Important notes
- `ROCKETCHAT_ROOM` should be set to empty (with `ROCKETCHAT_ROOM=''`) when using
`LISTEN_ON_ALL_PUBLIC`. This option also allows the bot to listen and respond to
messages _from all newly created private groups_ where the bot's user has been
added as a member.
| {
"content_hash": "a1ab422d96783e39aa99d84b64569cb8",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 174,
"avg_line_length": 58.45454545454545,
"alnum_prop": 0.7237947122861587,
"repo_name": "RocketChat/Rocket.Chat.Docs",
"id": "e539908182921dc13f67cf729c49dade864a3536",
"size": "3247",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bots/configure-bot-environment/README.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "6066"
},
{
"name": "HTML",
"bytes": "6879"
},
{
"name": "JavaScript",
"bytes": "7173"
},
{
"name": "Ruby",
"bytes": "7462"
},
{
"name": "Shell",
"bytes": "360"
}
],
"symlink_target": ""
} |
#region Copyright
// Copyright (c) 2016 Daniel Alan Hill. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#endregion
using DanielAHill.Reflection;
// ReSharper disable once CheckNamespace
namespace System
{
public static class TypeExtensions
{
public static ITypeDetails GetTypeDetails(this Type type)
{
return TypeDetailsFactory.Get(type);
}
}
public static class ObjectExtensions
{
public static ITypeDetails GetTypeDetails(this object item)
{
if (item == null) throw new ArgumentNullException(nameof(item));
return item.GetType().GetTypeDetails();
}
}
}
| {
"content_hash": "8f661dff6de1ac46472be4f9a79d25be",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 76,
"avg_line_length": 31.657894736842106,
"alnum_prop": 0.6957605985037406,
"repo_name": "DanielAHill/Reflection",
"id": "e35c27dc81dc7eba8e40ec8ac58aff2e2db5ff12",
"size": "1205",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "src/DanielAHill.Reflection/TypeExtensions.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C#",
"bytes": "27296"
},
{
"name": "PowerShell",
"bytes": "2951"
}
],
"symlink_target": ""
} |
<!-- Navigation -->
<nav class="navbar navbar-default navbar-fixed-top" id="georeport_nav">
<div class="navbar-header page-scroll" id="header_logo">
<button type="button" class="navbar-toggle" data-toggle="collapse" data-target="#bs-example-navbar-collapse-1">
<span class="sr-only">Toggle navigation</span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
<!--<a class="navbar-brand" href="#page-top">{{ site.title }}</a>
<a href="http://www.state.gov"><img src="img/doslogo5.png" alt="" height="65" width="65"></a>-->
<a href="http://www.state.gov"><img src="{{site.baseurl}}/img/dos_eagle.png" alt="Department of State Eagle logo" height="55" width="55"></a>
<a href="{{ site.baseurl }}/"><img src="{{site.baseurl}}/img/logos/2C_LogoWhite2.png" alt="Secondary Cities logo" height="45" width="180px" style="margin-left: 10px;"></a>
</div>
<div class="navbar-header page-scroll" id="city_title">
<div class="col12 clearfix " style="padding-bottom: 5px">
<div class="caption">
<div class="caption-content">
<!--<i class="fa fa-search-plus fa-3x"></i>-->
<i class="" ><h1 class="story-title">{{ page.title }}</h1></i>
</div>
</div>
</div>
<div class="tab-group col12 clearfix" style="margin-top: 8px;padding-bottom: 7px;text-transform:none">
<div class="slider tabs col10 pad0x margin1 mobile-cols space-bottom1 clearfix">
<!--<a id="id-edit" class="col3 page-fade-link" href="//osm.moabi.org/edit?editor=id#map=5/-2.855/22.830">Edit</a>-->
</div>
</div>
</div>
<div class="container" style="padding-left: 0px;height: 74px;">
<!-- Brand and toggle get grouped for better mobile display -->
<!-- Collect the nav links, forms, and other content for toggling -->
<div class="collapse navbar-collapse" id="bs-example-navbar-collapse-1">
<ul class="nav navbar-nav navbar-right" style="font-size:11.5px;padding-top: 15px">
<li class="hidden">
<a href="#page-top"></a>
</li>
<li class="page-scroll">
<a href="{{ site.baseurl }}/#about">About</a>
</li>
<li class="page-scroll">
<a href="{{ site.baseurl }}/faq/">FAQ</a>
</li>
<li class="page-scroll">
<a href="{{ site.baseurl }}/#2Cstory">Story</a>
</li>
<!--
<li class="page-scroll">
<a href="#portfolio">Cities</a>
</li>
-->
<li class="page-scroll">
<a href="{{ site.baseurl }}/#partners">Partners</a>
</li>
<li class="page-scroll">
<a href="{{ site.baseurl }}/#data">Data</a>
</li>
<li class="page-scroll">
<a href="{{site.baseurl}}/events/">Events</a>
</li>
<li class="page-scroll">
<a href="{{site.baseurl}}/resources/">Resources</a>
</li>
<!-- <li class="page-scroll">
<a href="#materials">Materials</a>
</li> -->
</ul>
</div>
<!-- /.navbar-collapse -->
</div>
<!-- /.container-fluid -->
</nav>
| {
"content_hash": "dcdc63a188c6d0253b620baf6a8fc8fb",
"timestamp": "",
"source": "github",
"line_count": 87,
"max_line_length": 183,
"avg_line_length": 43.275862068965516,
"alnum_prop": 0.46879150066401065,
"repo_name": "state-hiu/SecondaryCities2",
"id": "362afb7584fa92f788bab22fbcba1da67fd557d4",
"size": "3765",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "_includes/navbar_story.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "147553"
},
{
"name": "HTML",
"bytes": "420964"
},
{
"name": "JavaScript",
"bytes": "142897"
},
{
"name": "Less",
"bytes": "283280"
},
{
"name": "Ruby",
"bytes": "61"
},
{
"name": "SCSS",
"bytes": "171264"
}
],
"symlink_target": ""
} |
#region Copyright & License
// Copyright © 2012 - 2013 François Chabot, Yves Dierick
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#endregion
using System;
using NUnit.Framework.Constraints;
namespace Be.Stateless.BizTalk.Unit.Constraints
{
public static class ConstraintExtensions
{
public static DelayedConstraint After(this Constraint constraint, TimeSpan delay)
{
return constraint.After(Convert.ToInt32(delay.TotalMilliseconds));
}
public static DelayedConstraint After(this Constraint constraint, TimeSpan delay, TimeSpan pollingInterval)
{
return constraint.After(Convert.ToInt32(delay.TotalMilliseconds), Convert.ToInt32(pollingInterval.TotalMilliseconds));
}
}
}
| {
"content_hash": "51c1007ab527673003b6c091550c7824",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 121,
"avg_line_length": 34.611111111111114,
"alnum_prop": 0.7495987158908507,
"repo_name": "icraftsoftware/BizTalk.Factory",
"id": "3daf2b255a9e15eac4d40401d1cf43d3d043a369",
"size": "1250",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/BizTalk.Unit/Unit/Constraints/ConstraintExtensions.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ASP",
"bytes": "132"
},
{
"name": "C#",
"bytes": "4313312"
},
{
"name": "CSS",
"bytes": "10963"
},
{
"name": "HTML",
"bytes": "17209"
},
{
"name": "JavaScript",
"bytes": "55621"
},
{
"name": "PLSQL",
"bytes": "15448"
},
{
"name": "PowerShell",
"bytes": "20992"
},
{
"name": "TSQL",
"bytes": "102246"
},
{
"name": "XSLT",
"bytes": "49602"
}
],
"symlink_target": ""
} |
Home={init:function(){console.log("Objeto Literal Iniciado")}},Home.init(); | {
"content_hash": "902677e8be94a178e6a74bcd20117ffe",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 75,
"avg_line_length": 75,
"alnum_prop": 0.7333333333333333,
"repo_name": "stehleao/dctb-utfpr",
"id": "ac6b602b66416ff4d50e1d6b8011f0397eeb1a82",
"size": "75",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "programacao-web-2/tasks/stephanie-leao/gulp/js/script.min.js",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "170691"
},
{
"name": "CSS",
"bytes": "250512"
},
{
"name": "HTML",
"bytes": "364395"
},
{
"name": "JavaScript",
"bytes": "347246"
},
{
"name": "PHP",
"bytes": "283381"
},
{
"name": "Vue",
"bytes": "2902"
}
],
"symlink_target": ""
} |
"""
Simple utility for splitting user input. This is used by both inputsplitter and
prefilter.
Authors:
* Brian Granger
* Fernando Perez
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2008-2011 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
import re
import sys
from IPython.utils import py3compat
from IPython.utils.encoding import get_stream_enc
#-----------------------------------------------------------------------------
# Main function
#-----------------------------------------------------------------------------
# RegExp for splitting line contents into pre-char//first word-method//rest.
# For clarity, each group in on one line.
# WARNING: update the regexp if the escapes in interactiveshell are changed, as
# they are hardwired in.
# Although it's not solely driven by the regex, note that:
# ,;/% only trigger if they are the first character on the line
# ! and !! trigger if they are first char(s) *or* follow an indent
# ? triggers as first or last char.
line_split = re.compile("""
^(\s*) # any leading space
([,;/%]|!!?|\?\??)? # escape character or characters
\s*(%{0,2}[\w\.\*]*) # function/method, possibly with leading %
# to correctly treat things like '?%magic'
(.*?$|$) # rest of line
""", re.VERBOSE)
def split_user_input(line, pattern=None):
"""Split user input into initial whitespace, escape character, function part
and the rest.
"""
# We need to ensure that the rest of this routine deals only with unicode
encoding = get_stream_enc(sys.stdin, 'utf-8')
line = py3compat.cast_unicode(line, encoding)
if pattern is None:
pattern = line_split
match = pattern.match(line)
if not match:
# print "match failed for line '%s'" % line
try:
ifun, the_rest = line.split(None, 1)
except ValueError:
# print "split failed for line '%s'" % line
ifun, the_rest = line, u''
pre = re.match('^(\s*)(.*)', line).groups()[0]
esc = ""
else:
pre, esc, ifun, the_rest = match.groups()
# print 'line:<%s>' % line # dbg
# print 'pre <%s> ifun <%s> rest <%s>' % (pre,ifun.strip(),the_rest) # dbg
return pre, esc or '', ifun.strip(), the_rest.lstrip()
class LineInfo(object):
"""A single line of input and associated info.
Includes the following as properties:
line
The original, raw line
continue_prompt
Is this line a continuation in a sequence of multiline input?
pre
Any leading whitespace.
esc
The escape character(s) in pre or the empty string if there isn't one.
Note that '!!' and '??' are possible values for esc. Otherwise it will
always be a single character.
ifun
The 'function part', which is basically the maximal initial sequence
of valid python identifiers and the '.' character. This is what is
checked for alias and magic transformations, used for auto-calling,
etc. In contrast to Python identifiers, it may start with "%" and contain
"*".
the_rest
Everything else on the line.
"""
def __init__(self, line, continue_prompt=False):
self.line = line
self.continue_prompt = continue_prompt
self.pre, self.esc, self.ifun, self.the_rest = split_user_input(line)
self.pre_char = self.pre.strip()
if self.pre_char:
self.pre_whitespace = '' # No whitespace allowd before esc chars
else:
self.pre_whitespace = self.pre
def ofind(self, ip):
"""Do a full, attribute-walking lookup of the ifun in the various
namespaces for the given IPython InteractiveShell instance.
Return a dict with keys: {found, obj, ospace, ismagic}
Note: can cause state changes because of calling getattr, but should
only be run if autocall is on and if the line hasn't matched any
other, less dangerous handlers.
Does cache the results of the call, so can be called multiple times
without worrying about *further* damaging state.
"""
return ip._ofind(self.ifun)
def __str__(self):
return "LineInfo [%s|%s|%s|%s]" % (self.pre, self.esc, self.ifun, self.the_rest)
| {
"content_hash": "6b2e58fa7c6acf98c33e7b65493d373d",
"timestamp": "",
"source": "github",
"line_count": 138,
"max_line_length": 88,
"avg_line_length": 34.471014492753625,
"alnum_prop": 0.5642211477822157,
"repo_name": "mattvonrocketstein/smash",
"id": "55e76d9e89b340e918e5cf8f3785b6f92f45c033",
"size": "4775",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "smashlib/ipy3x/core/splitinput.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "162188"
},
{
"name": "HTML",
"bytes": "32106"
},
{
"name": "JavaScript",
"bytes": "1615935"
},
{
"name": "Makefile",
"bytes": "550"
},
{
"name": "Python",
"bytes": "4934398"
},
{
"name": "Shell",
"bytes": "2990"
}
],
"symlink_target": ""
} |
import argparse
from paac import train
parser = argparse.ArgumentParser(description='parameters_setting')
parser.add_argument('--lr', type=float, default=0.00025, metavar='LR',
help='learning rate (default: 0.0001)')
parser.add_argument('--gamma', type=float, default=0.99, metavar='G',
help='discount factor for rewards (default: 0.99)')
parser.add_argument('--num-workers', type=int, default=4, metavar='N',
help='number of workers(default: 4)')
parser.add_argument('--num-envs', type=int, default=4, metavar='W',
help='number of environments a worker holds(default: 4)')
parser.add_argument('--n-steps', type=int, default=5, metavar='NS',
help='number of forward steps in PAAC (default: 5)')
parser.add_argument('--env-name', default='BreakoutDeterministic-v4', metavar='ENV',
help='environment to train on (default: BreakoutDeterministic-v4)')
parser.add_argument('--max-train-steps', type=int, default=500000, metavar='MS',
help='max training step to train PAAC (default: 500000)')
parser.add_argument('--clip-grad-norm', type=int, default=3.0, metavar='MS',
help='globally clip gradient norm(default: 3.0)')
parser.add_argument('--record', type=bool, default=False, metavar='R',
help='record scores of every environment (default: False)')
if __name__ == "__main__":
args = parser.parse_args()
train(args) | {
"content_hash": "2cde8199f2678803b0d4e6f5a3c2159d",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 87,
"avg_line_length": 53.642857142857146,
"alnum_prop": 0.6344873501997337,
"repo_name": "pianomania/paac-pytorch",
"id": "295f719c18344f2540ca51349d390e0e35366782",
"size": "1502",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "main.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "11994"
}
],
"symlink_target": ""
} |
/*jshint node:true*/
'use strict';
var express = require('express');
var app = express();
var bodyParser = require('body-parser');
var favicon = require('serve-favicon');
var logger = require('morgan');
var port = process.env.PORT || 8001;
var four0four = require('./utils/404')();
var environment = process.env.NODE_ENV;
app.use(favicon(__dirname + '/favicon.ico'));
app.use(bodyParser.urlencoded({
extended: true
}));
app.use(bodyParser.json());
app.use(logger('dev'));
app.use('/api', require('./routes'));
console.log('About to crank up node');
console.log('PORT=' + port);
console.log('NODE_ENV=' + environment);
switch (environment) {
case 'build':
console.log('** BUILD **');
app.use(express.static('./build/'));
// Any invalid calls for templateUrls are under app/* and should return 404
app.use('/app/*', function (req, res, next) {
four0four.send404(req, res);
});
// Any deep link calls should return index.html
app.use('/*', express.static('./build/index.html'));
break;
default:
console.log('** DEV **');
app.use(express.static('./src/client/'));
app.use(express.static('./'));
app.use(express.static('./tmp'));
// Any invalid calls for templateUrls are under app/* and should return 404
app.use('/app/*', function (req, res, next) {
four0four.send404(req, res);
});
// Any deep link calls should return index.html
app.use('/*', express.static('./src/client/index.html'));
break;
}
app.listen(port, function () {
console.log('Express server listening on port ' + port);
console.log('env = ' + app.get('env') +
'\n__dirname = ' + __dirname +
'\nprocess.cwd = ' + process.cwd());
});
| {
"content_hash": "3aa24ba7fb0edc34f12208652390ecff",
"timestamp": "",
"source": "github",
"line_count": 57,
"max_line_length": 79,
"avg_line_length": 30.140350877192983,
"alnum_prop": 0.6216530849825378,
"repo_name": "fplgusmao/semanticCitiesAnalytics",
"id": "8eedd8d99fe11cf5c5a73485b2d5ca8bfe57536e",
"size": "1718",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/server/app.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "41493"
},
{
"name": "HTML",
"bytes": "69637"
},
{
"name": "JavaScript",
"bytes": "355447"
},
{
"name": "PHP",
"bytes": "1069"
}
],
"symlink_target": ""
} |
<?php
use Illuminate\Foundation\Testing\WithoutMiddleware;
use Illuminate\Foundation\Testing\DatabaseMigrations;
use Illuminate\Foundation\Testing\DatabaseTransactions;
class ProjectPageTest extends TestCase
{
public function testProjectIndex()
{
try
{
$this->visit('/login')
# Test auth/login page
->type('[email protected]', 'email')
->type('helloworld', 'password')
->press('Login')
# Test dashboard
->see('All Projects')
->see('Create Project')
->see('Search Projects')
->dontSee('Welcome to StormSafe')
# Test index page
->visit('/projects')
->See('All Projects')
->see('Create project')
->see('Search projects')
->dontSee('Welcome to StormSafe')
;
}
catch(Exception $e)
{
echo 'Message: '.$e->getMessage();
}
}
public function testProjectCreate()
{
try
{
$this->visit('/login')
# Test auth/login page
->type('[email protected]', 'email')
->type('helloworld', 'password')
->press('Login')
# Test dashboard
->see('All Projects')
->see('Create Project')
->see('Search Projects')
->dontSee('Welcome to StormSafe')
# Test create project
->visit('/project/create')
->type('Oakland Airport', 'name')
->type('Runway Renovation', 'description')
->type('1 Airport Dr', 'address')
->type('Oakland', 'city')
->type('CA', 'state')
->type('94621', 'zipcode')
->type('37.7125689','latitude') # 37.7125689,
->type('-122.2197428','longitude') # -122.2197428,
->check('active') # true,
->type('Lorem Ipsum','tracking_number') # str_random(10),
->type('Lorem Ipsum','cost_center') # str_random(10),
->type('Lorem Ipsum','project_phase') # str_random(10),
->type('Lorem Ipsum','wdid_number') # str_random(10),
->type('Lorem Ipsum','cgp_number') # str_random(10),
->type('2','risk_level') # 2,
->type('Lorem Ipsum','owner_company_name') # str_random(10),
->type('Lorem Ipsum','owner_company_description') # str_random(10),
->type('94621','owner_company_zipcode') # 94621,
->type('Lorem Ipsum','owner_company_address') # str_random(10),
->type('Lorem Ipsum','owner_company_city') # str_random(10),
->type('CA','owner_company_state') # 'CA',
->type('Lorem Ipsum','owner_representative') # str_random(10),
->type('Lorem Ipsum','owner_title') # str_random(10),
->type('Lorem Ipsum','owner_phone') # str_random(10),
->type('Lorem Ipsum','owner_email') # str_random(10),
->type('Lorem Ipsum','contractor_company_name') # str_random(10),
->type('Lorem Ipsum','contractor_company_description') # str_random(10),
->type('94621','contractor_company_zipcode') # 94621,
->type('Lorem Ipsum','contractor_company_address') # str_random(10),
->type('Lorem Ipsum','contractor_company_city') # str_random(10),
->type('CA','contractor_company_state') # 'CA',
->type('Lorem Ipsum','contractor_representative') # str_random(10),
->type('Lorem Ipsum','contractor_title') # str_random(10),
->type('Lorem Ipsum','contractor_phone') # str_random(10),
->type('Lorem Ipsum','contractor_email') # str_random(10),
->type('Lorem Ipsum','wpcm_company_name') # str_random(10),
->type('Lorem Ipsum','wpcm_company_description') # str_random(10),
->type('94621','wpcm_company_zipcode') # 94621,
->type('Lorem Ipsum','wpcm_company_address') # str_random(10),
->type('Lorem Ipsum','wpcm_company_city') # str_random(10),
->type('CA','wpcm_company_state') # 'CA',
->type('Lorem Ipsum','wpcm_representative') # str_random(10),
->type('Lorem Ipsum','wpcm_title') # str_random(10),
->type('Lorem Ipsum','wpcm_phone') # str_random(10),
->type('Lorem Ipsum','wpcm_email') # str_random(10),
->type('Lorem Ipsum','qsp_company_name') # str_random(10),
->type('Lorem Ipsum','qsp_company_description') # str_random(10),
->type('94621','qsp_company_zipcode') # 94621,
->type('Lorem Ipsum','qsp_company_address') # str_random(10),
->type('Lorem Ipsum','qsp_company_city') # str_random(10),
->type('CA','qsp_company_state') # 'CA',
->type('Lorem Ipsum','qsp_representative') # str_random(10),
->type('Lorem Ipsum','qsp_title') # str_random(10),
->type('Lorem Ipsum','qsp_phone') # str_random(10),
->type('Lorem Ipsum','qsp_email') # Lorem Ipsum
->press('Create Project')
->see('Project created successfully!')
/*
*/
;
}
catch(Exception $e)
{
echo 'Message: '.$e->getMessage();
}
}
public function testProjectShow()
{
try
{
$this->visit('/login')
# Test auth/login page
->type('[email protected]', 'email')
->type('helloworld', 'password')
->press('Login')
# Test dashboard
->see('All Projects')
->see('Create Project')
->see('Search Projects')
->dontSee('Welcome to StormSafe')
# Test show project page
->visit('/project/show/1')
->see('Show Project')
;
}
catch(Exception $e)
{
echo 'Message: '.$e->getMessage();
}
}
public function testProjectEdit()
{
try
{
$this->visit('/login')
# Test auth/login page
->type('[email protected]', 'email')
->type('helloworld', 'password')
->press('Login')
# Test dashboard
->see('All Projects')
->see('Create Project')
->see('Search Projects')
->dontSee('Welcome to StormSafe')
# Test edit project page
->visit('/project/edit/1')
->see('Edit Project')
->type('Oakland Airport', 'name')
->type('Runway Renovation', 'description')
->type('1 Airport Dr', 'address')
->type('Oakland', 'city')
->type('CA', 'state')
->type(94621, 'zipcode')
->press('Save Changes')
->see('Project updated successfully!')
;
}
catch(Exception $e)
{
echo 'Message: '.$e->getMessage();
}
}
public function testProjectDelete()
{
try
{
$this->visit('/login')
# Test auth/login page
->type('[email protected]', 'email')
->type('helloworld', 'password')
->press('Login')
# Test dashboard
->see('All Projects')
->see('Create Project')
->see('Search Projects')
->dontSee('Welcome to StormSafe')
# Test confirm-delete project page
->visit('/project/confirm-delete/1')
->see('Delete Project?')
;
}
catch(Exception $e)
{
echo 'Message: '.$e->getMessage();
}
}
public function testProjectSearch()
{
try
{
$this->visit('/login')
# Test auth/login page
->type('[email protected]', 'email')
->type('helloworld', 'password')
->press('Login')
# Test dashboard
->see('All Projects')
->see('Create Project')
->see('Search Projects')
->dontSee('Welcome to StormSafe')
# Test search project page
->visit('/project/search')
->see('Search Projects')
;
}
catch(Exception $e)
{
echo 'Message: '.$e->getMessage();
}
}
public function testProjectForecast()
{
try
{
$this->visit('/login')
# Test auth/login page
->type('[email protected]', 'email')
->type('helloworld', 'password')
->press('Login')
# Test dashboard
->see('All Projects')
->see('Create Project')
->see('Search Projects')
->dontSee('Welcome to StormSafe')
# Test search project page
->visit('/project/show/1')
->see('10-Day Forecast')
->see('Wunderground API')
->see('Forecast')
->see('Chance of Rain')
;
}
catch(Exception $e)
{
echo 'Message: '.$e->getMessage();
}
}
}
| {
"content_hash": "9ae6c85b074b65e15b800856c1994ff8",
"timestamp": "",
"source": "github",
"line_count": 271,
"max_line_length": 88,
"avg_line_length": 36.535055350553506,
"alnum_prop": 0.4551055448944551,
"repo_name": "walteryu/dwa15-p4",
"id": "25a561afc28319ea653c9b72a7ef7841255fddf8",
"size": "9901",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/ProjectPageTest.php",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
package com.haulmont.cuba.core.sys.connectionpoolinfo;
import javax.management.*;
import java.util.regex.Pattern;
public interface ConnectionPoolInfo {
/**
* @return connection pool name
*/
String getPoolName();
/**
* @return registered bean name for connection pool
*/
ObjectName getRegisteredMBeanName();
/**
* @return return regex pattern to find connection pool ObjectName
*/
Pattern getRegexPattern();
/**
* @return name of active connections attribute
*/
String getActiveConnectionsAttrName();
/**
* @return name of idle connections attribute
*/
String getIdleConnectionsAttrName();
/**
* @return name of total available connections (active and idle) attribute
*/
String getTotalConnectionsAttrName();
}
| {
"content_hash": "d0c4a7bcf96f5ddc8a937d00c8f6c245",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 78,
"avg_line_length": 21.763157894736842,
"alnum_prop": 0.6638452237001209,
"repo_name": "cuba-platform/cuba",
"id": "29ae46abeebe3c2b6e1f6bbb9fede98488763ddc",
"size": "1428",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "modules/core/src/com/haulmont/cuba/core/sys/connectionpoolinfo/ConnectionPoolInfo.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "77"
},
{
"name": "CSS",
"bytes": "68"
},
{
"name": "FreeMarker",
"bytes": "3996"
},
{
"name": "GAP",
"bytes": "20634"
},
{
"name": "Groovy",
"bytes": "897992"
},
{
"name": "HTML",
"bytes": "6464"
},
{
"name": "Java",
"bytes": "20605191"
},
{
"name": "PLSQL",
"bytes": "30678"
},
{
"name": "PLpgSQL",
"bytes": "1333"
},
{
"name": "SCSS",
"bytes": "306671"
},
{
"name": "Shell",
"bytes": "88"
},
{
"name": "XSLT",
"bytes": "63258"
}
],
"symlink_target": ""
} |
@echo off
taskkill/F /IM openfin.exe
taskkill /IM node.exe /F
del /q "%localappdata%\OpenFin\cache\*"
FOR /D %%p IN ("%localappdata%\OpenFin\cache\*.*") DO rmdir "%%p" /s /q
del /q "%localappdata%\OpenFin\apps\*"
FOR /D %%p IN ("%localappdata%\OpenFin\apps\*.*") DO rmdir "%%p" /s /q
del /q "%localappdata%\OpenFin\runtime\*"
FOR /D %%p IN ("%localappdata%\OpenFin\runtime\*.*") DO rmdir "%%p" /s /q
del /q "%localappdata%\OpenFin\logs\*"
FOR /D %%p IN ("%localappdata%\OpenFin\logs\*.*") DO rmdir "%%p" /s /q
del /q "%localappdata%\OpenFin\cache.dat"
del /q "%localappdata%\OpenFin\OpenFinRVM.exe"
| {
"content_hash": "eb54f74bd1c5dae63140a78b6d6a8710",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 73,
"avg_line_length": 30.3,
"alnum_prop": 0.641914191419142,
"repo_name": "owennw/OpenFinD3FC",
"id": "f98484b9293bfb6fd361271fbfff45796033c68f",
"size": "606",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "kill.bat",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "21484"
},
{
"name": "HTML",
"bytes": "12028"
},
{
"name": "JavaScript",
"bytes": "100543"
}
],
"symlink_target": ""
} |
// ----------------------------------------------------------------------------------
//
// Copyright Microsoft Corporation
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// ----------------------------------------------------------------------------------
using System;
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("Microsoft Azure Powershell - Key Vault")]
[assembly: AssemblyCompany(Microsoft.WindowsAzure.Commands.Common.AzurePowerShell.AssemblyCompany)]
[assembly: AssemblyProduct(Microsoft.WindowsAzure.Commands.Common.AzurePowerShell.AssemblyProduct)]
[assembly: AssemblyCopyright(Microsoft.WindowsAzure.Commands.Common.AzurePowerShell.AssemblyCopyright)]
[assembly: ComVisible(false)]
[assembly: CLSCompliant(false)]
[assembly: Guid("2994548F-69B9-4DC2-8D19-52CC0C0C85BC")]
[assembly: AssemblyVersion("2.4.0")]
[assembly: AssemblyFileVersion("2.4.0")]
#if SIGN
[assembly: InternalsVisibleTo("Microsoft.Azure.Commands.KeyVault.Test, PublicKey=0024000004800000940000000602000000240000525341310004000001000100b5fc90e7027f67871e773a8fde8938c81dd402ba65b9201d60593e96c492651e889cc13f1415ebb53fac1131ae0bd333c5ee6021672d9718ea31a8aebd0da0072f25d87dba6fc90ffd598ed4da35e44c398c454307e8e33b8426143daec9f596836f97c8f74750e5975c64e2189f45def46b2a2b1247adc3652bf5c308055da9")]
#else
[assembly: InternalsVisibleTo("Microsoft.Azure.Commands.KeyVault.Test")]
#endif
| {
"content_hash": "496d5a34499d3d3de2f244d29040e89b",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 404,
"avg_line_length": 56.13157894736842,
"alnum_prop": 0.763244256915143,
"repo_name": "alfantp/azure-powershell",
"id": "a7f236f5fb6e36be782c61ae70a16a594112fcd2",
"size": "2135",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "src/ResourceManager/KeyVault/Commands.KeyVault/Properties/AssemblyInfo.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "16509"
},
{
"name": "C#",
"bytes": "31784205"
},
{
"name": "HTML",
"bytes": "209"
},
{
"name": "JavaScript",
"bytes": "4979"
},
{
"name": "PHP",
"bytes": "41"
},
{
"name": "PowerShell",
"bytes": "3567759"
},
{
"name": "Shell",
"bytes": "50"
},
{
"name": "XSLT",
"bytes": "6114"
}
],
"symlink_target": ""
} |
<?php
/* WebProfilerBundle:Profiler:table.html.twig */
class __TwigTemplate_5399e58eed2fce5aee4cccde610840c83b423806050c8e4f2805f4de18548c6d extends Twig_Template
{
public function __construct(Twig_Environment $env)
{
parent::__construct($env);
$this->parent = false;
$this->blocks = array(
);
}
protected function doDisplay(array $context, array $blocks = array())
{
// line 1
echo "<table ";
if (array_key_exists("class", $context)) {
echo "class='";
echo twig_escape_filter($this->env, (isset($context["class"]) ? $context["class"] : $this->getContext($context, "class")), "html", null, true);
echo "'";
}
echo " >
<thead>
<tr>
<th scope=\"col\" style=\"width: 25%\">Key</th>
<th scope=\"col\" style=\"width: 75%\">Value</th>
</tr>
</thead>
<tbody>
";
// line 9
$context['_parent'] = (array) $context;
$context['_seq'] = twig_ensure_traversable(twig_sort_filter(twig_get_array_keys_filter((isset($context["data"]) ? $context["data"] : $this->getContext($context, "data")))));
foreach ($context['_seq'] as $context["_key"] => $context["key"]) {
// line 10
echo " <tr>
<th>";
// line 11
echo twig_escape_filter($this->env, $context["key"], "html", null, true);
echo "</th>
<td><pre>";
// line 12
echo twig_escape_filter($this->env, $this->env->getExtension('profiler')->dumpValue($this->getAttribute((isset($context["data"]) ? $context["data"] : $this->getContext($context, "data")), $context["key"], array(), "array")), "html", null, true);
echo "</pre></td>
</tr>
";
}
$_parent = $context['_parent'];
unset($context['_seq'], $context['_iterated'], $context['_key'], $context['key'], $context['_parent'], $context['loop']);
$context = array_intersect_key($context, $_parent) + $_parent;
// line 15
echo " </tbody>
</table>
";
}
public function getTemplateName()
{
return "WebProfilerBundle:Profiler:table.html.twig";
}
public function isTraitable()
{
return false;
}
public function getDebugInfo()
{
return array ( 55 => 15, 46 => 12, 42 => 11, 39 => 10, 35 => 9, 19 => 1,);
}
}
| {
"content_hash": "b575d4fad149202c6529f6fdee73a6bb",
"timestamp": "",
"source": "github",
"line_count": 74,
"max_line_length": 257,
"avg_line_length": 33.62162162162162,
"alnum_prop": 0.5160771704180064,
"repo_name": "quepasso/advert",
"id": "f3424c80e5b8f87b0dee24c8f980ffbe002ebb3d",
"size": "2488",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/cache/dev/twig/53/99/e58eed2fce5aee4cccde610840c83b423806050c8e4f2805f4de18548c6d.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "58376"
},
{
"name": "JavaScript",
"bytes": "31605"
},
{
"name": "PHP",
"bytes": "114420"
}
],
"symlink_target": ""
} |
package org.apache.camel.component.avro;
import org.apache.avro.Protocol;
import org.apache.avro.Schema;
import org.apache.camel.AsyncEndpoint;
import org.apache.camel.Component;
import org.apache.camel.Consumer;
import org.apache.camel.Exchange;
import org.apache.camel.ExchangePattern;
import org.apache.camel.Processor;
import org.apache.camel.impl.DefaultEndpoint;
import org.apache.camel.spi.UriEndpoint;
import org.apache.camel.spi.UriParam;
/**
* Working with Apache Avro for data serialization.
*/
@UriEndpoint(firstVersion = "2.10.0", scheme = "avro", title = "Avro", syntax = "avro:transport:host:port/messageName", consumerClass = AvroConsumer.class, label = "messaging,transformation")
public abstract class AvroEndpoint extends DefaultEndpoint implements AsyncEndpoint {
@UriParam
private AvroConfiguration configuration;
/**
* Constructs a fully-initialized DefaultEndpoint instance. This is the
* preferred method of constructing an object from Java code (as opposed to
* Spring beans, etc.).
*
* @param endpointUri the full URI used to create this endpoint
* @param component the component that created this endpoint
*/
public AvroEndpoint(String endpointUri, Component component, AvroConfiguration configuration) {
super(endpointUri, component);
this.configuration = configuration;
}
public Exchange createExchange(Protocol.Message message, Object request) {
ExchangePattern pattern = ExchangePattern.InOut;
if (message.getResponse().equals(Schema.Type.NULL)) {
pattern = ExchangePattern.InOnly;
}
Exchange exchange = createExchange(pattern);
exchange.getIn().setBody(request);
exchange.getIn().setHeader(AvroConstants.AVRO_MESSAGE_NAME, message.getName());
return exchange;
}
@Override
public boolean isSingleton() {
return true;
}
/**
* Creates a new <a
* href="http://camel.apache.org/event-driven-consumer.html">Event
* Driven Consumer</a> which consumes messages from the endpoint using the
* given processor
*
* @param processor the given processor
* @return a newly created consumer
* @throws Exception can be thrown
*/
@Override
public Consumer createConsumer(Processor processor) throws Exception {
return new AvroConsumer(this, processor);
}
public AvroConfiguration getConfiguration() {
return configuration;
}
}
| {
"content_hash": "b38dc8d655383d2c47ed71ff41445ab7",
"timestamp": "",
"source": "github",
"line_count": 73,
"max_line_length": 191,
"avg_line_length": 34.397260273972606,
"alnum_prop": 0.7100756670649144,
"repo_name": "onders86/camel",
"id": "9dfd2a8523eb1ab14e9086fa39717f5c90141a42",
"size": "3314",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "components/camel-avro/src/main/java/org/apache/camel/component/avro/AvroEndpoint.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Apex",
"bytes": "6519"
},
{
"name": "Batchfile",
"bytes": "6512"
},
{
"name": "CSS",
"bytes": "30373"
},
{
"name": "Elm",
"bytes": "10852"
},
{
"name": "FreeMarker",
"bytes": "11410"
},
{
"name": "Groovy",
"bytes": "54390"
},
{
"name": "HTML",
"bytes": "190929"
},
{
"name": "Java",
"bytes": "69972191"
},
{
"name": "JavaScript",
"bytes": "90399"
},
{
"name": "Makefile",
"bytes": "513"
},
{
"name": "Python",
"bytes": "36"
},
{
"name": "Ruby",
"bytes": "4802"
},
{
"name": "Scala",
"bytes": "323702"
},
{
"name": "Shell",
"bytes": "23616"
},
{
"name": "Tcl",
"bytes": "4974"
},
{
"name": "Thrift",
"bytes": "6979"
},
{
"name": "XQuery",
"bytes": "546"
},
{
"name": "XSLT",
"bytes": "285105"
}
],
"symlink_target": ""
} |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="it">
<head>
<!-- Generated by javadoc (version 1.7.0_03) on Fri Mar 28 15:35:32 CET 2014 -->
<title>Uses of Class jade.domain.FIPAAgentManagement.NotRegistered (JADE v4.3.2 API)</title>
<meta name="date" content="2014-03-28">
<link rel="stylesheet" type="text/css" href="../../../../stylesheet.css" title="Style">
</head>
<body>
<script type="text/javascript"><!--
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Class jade.domain.FIPAAgentManagement.NotRegistered (JADE v4.3.2 API)";
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar_top">
<!-- -->
</a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../jade/domain/FIPAAgentManagement/NotRegistered.html" title="class in jade.domain.FIPAAgentManagement">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../index-all.html">Index</a></li>
<li><a href="../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../index.html?jade/domain/FIPAAgentManagement/\class-useNotRegistered.html" target="_top">Frames</a></li>
<li><a href="NotRegistered.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip-navbar_top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<div class="header">
<h2 title="Uses of Class jade.domain.FIPAAgentManagement.NotRegistered" class="title">Uses of Class<br>jade.domain.FIPAAgentManagement.NotRegistered</h2>
</div>
<div class="classUseContainer">No usage of jade.domain.FIPAAgentManagement.NotRegistered</div>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar_bottom">
<!-- -->
</a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../jade/domain/FIPAAgentManagement/NotRegistered.html" title="class in jade.domain.FIPAAgentManagement">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../index-all.html">Index</a></li>
<li><a href="../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../index.html?jade/domain/FIPAAgentManagement/\class-useNotRegistered.html" target="_top">Frames</a></li>
<li><a href="NotRegistered.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip-navbar_bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<p class="legalCopy"><small><center>These are the official <i><a href=http://jade.tilab.com target=top>JADE</a></i> API. For these API backward compatibility is guaranteed accross JADE versions</center></small></p>
</body>
</html>
| {
"content_hash": "6d3ca366a3e95e1646afa3d73dfcfd29",
"timestamp": "",
"source": "github",
"line_count": 116,
"max_line_length": 214,
"avg_line_length": 39.1551724137931,
"alnum_prop": 0.6184500220167327,
"repo_name": "tomkren/pikater",
"id": "c306aa781a3c9a1d92962046436ce9d226c72ecf",
"size": "4542",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/jade-4.3.2/doc/api/jade/domain/FIPAAgentManagement/class-use/NotRegistered.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "2720"
},
{
"name": "CLIPS",
"bytes": "8093"
},
{
"name": "CSS",
"bytes": "17962"
},
{
"name": "HTML",
"bytes": "14268583"
},
{
"name": "Haskell",
"bytes": "1632"
},
{
"name": "Java",
"bytes": "17530800"
},
{
"name": "Makefile",
"bytes": "3240"
},
{
"name": "Shell",
"bytes": "1665"
}
],
"symlink_target": ""
} |
"""The frontend for the Mojo bindings system."""
import os
import sys
from optparse import OptionParser
from parser import mojo_parser
from parser import mojo_translate
from generators import mojom_data
from generators import mojom_cpp_generator
def Main():
parser = OptionParser(usage="usage: %prog [options] filename1 [filename2...]")
parser.add_option("-i", "--include_dir", dest="include_dir", default=".",
help="specify directory for #includes")
parser.add_option("-o", "--output_dir", dest="output_dir", default=".",
help="specify output directory")
(options, args) = parser.parse_args()
if len(args) < 1:
parser.print_help()
sys.exit(1)
for filename in args:
name = os.path.splitext(os.path.basename(filename))[0]
# TODO(darin): There's clearly too many layers of translation here! We can
# at least avoid generating the serialized Mojom IR.
tree = mojo_parser.Parse(filename)
mojom = mojo_translate.Translate(tree, name)
module = mojom_data.ModuleFromData(mojom)
cpp = mojom_cpp_generator.CPPGenerator(
module, options.include_dir, options.output_dir)
cpp.GenerateFiles()
if __name__ == '__main__':
Main()
| {
"content_hash": "a270df33bd8e5b6aac09e756de6284df",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 80,
"avg_line_length": 32.23684210526316,
"alnum_prop": 0.676734693877551,
"repo_name": "cvsuser-chromium/chromium",
"id": "567adf7113c414e78c71ad893e54d5e3df427124",
"size": "1410",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mojo/public/bindings/mojo_idl.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "AppleScript",
"bytes": "6973"
},
{
"name": "Assembly",
"bytes": "36421"
},
{
"name": "C",
"bytes": "6924841"
},
{
"name": "C++",
"bytes": "179649999"
},
{
"name": "CSS",
"bytes": "812951"
},
{
"name": "Java",
"bytes": "3768838"
},
{
"name": "JavaScript",
"bytes": "8338074"
},
{
"name": "Makefile",
"bytes": "52980"
},
{
"name": "Objective-C",
"bytes": "819293"
},
{
"name": "Objective-C++",
"bytes": "6453781"
},
{
"name": "PHP",
"bytes": "61320"
},
{
"name": "Perl",
"bytes": "17897"
},
{
"name": "Python",
"bytes": "5640877"
},
{
"name": "Rebol",
"bytes": "262"
},
{
"name": "Shell",
"bytes": "648699"
},
{
"name": "XSLT",
"bytes": "418"
},
{
"name": "nesC",
"bytes": "15926"
}
],
"symlink_target": ""
} |
using System;
using System.Collections.Generic;
using System.Text;
namespace HockeyApp.Common.Enums
{
public enum TournamentAnnounsementSorting
{
Name,
CreateDate
}
}
| {
"content_hash": "af80b793ecec9e0e9f51d859f12803cc",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 45,
"avg_line_length": 16.416666666666668,
"alnum_prop": 0.6954314720812182,
"repo_name": "ekonor/hokeyapp",
"id": "9705a4f35b9bd17eecb3bd52f052a6de5f515ba1",
"size": "199",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "core/HockeyApp.Common/Enums/TournamentAnnounsementSorting.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "2289108"
},
{
"name": "HTML",
"bytes": "608"
},
{
"name": "PLpgSQL",
"bytes": "3320"
},
{
"name": "SQLPL",
"bytes": "4726"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
import itertools
import json
import os
import shutil
import tempfile
import time
import unittest
import caffe_pb2
import math
# Find the best implementation available
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
from bs4 import BeautifulSoup
from digits.config import config_value
import digits.dataset.images.classification.test_views
import digits.test_views
from digits import test_utils
import digits.webapp
from digits.frameworks import CaffeFramework
from google.protobuf import text_format
# May be too short on a slow system
TIMEOUT_DATASET = 45
TIMEOUT_MODEL = 60
################################################################################
# Base classes (they don't start with "Test" so nose won't run them)
################################################################################
class BaseViewsTest(digits.test_views.BaseViewsTest):
"""
Provides some functions
"""
CAFFE_NETWORK = \
"""
layer {
name: "hidden"
type: 'InnerProduct'
bottom: "data"
top: "output"
}
layer {
name: "loss"
type: "SoftmaxWithLoss"
bottom: "output"
bottom: "label"
top: "loss"
exclude { stage: "deploy" }
}
layer {
name: "accuracy"
type: "Accuracy"
bottom: "output"
bottom: "label"
top: "accuracy"
include { stage: "val" }
}
layer {
name: "softmax"
type: "Softmax"
bottom: "output"
top: "softmax"
include { stage: "deploy" }
}
"""
TORCH_NETWORK = \
"""
return function(p)
-- adjust to number of classes
local nclasses = p.nclasses or 1
-- model should adjust to any 3D input
local nDim = 1
if p.inputShape then p.inputShape:apply(function(x) nDim=nDim*x end) end
local model = nn.Sequential()
model:add(nn.View(-1):setNumInputDims(3)) -- c*h*w -> chw (flattened)
-- set all weights and biases to zero as this speeds learning up
-- for the type of problem we're trying to solve in this test
local linearLayer = nn.Linear(nDim, nclasses)
linearLayer.weight:fill(0)
linearLayer.bias:fill(0)
model:add(linearLayer) -- chw -> nclasses
model:add(nn.LogSoftMax())
return {
model = model
}
end
"""
TENSORFLOW_NETWORK = \
"""
class UserModel(Tower):
@model_property
def inference(self):
ninputs = self.input_shape[0] * self.input_shape[1] * self.input_shape[2]
W = tf.get_variable('W', [ninputs, self.nclasses], initializer=tf.constant_initializer(0.0))
b = tf.get_variable('b', [self.nclasses], initializer=tf.constant_initializer(0.0)),
model = tf.reshape(self.x, shape=[-1, ninputs])
model = tf.add(tf.matmul(model, W), b)
return model
@model_property
def loss(self):
loss = digits.classification_loss(self.inference, self.y)
return loss
"""
@classmethod
def model_exists(cls, job_id):
return cls.job_exists(job_id, 'models')
@classmethod
def model_status(cls, job_id):
return cls.job_status(job_id, 'models')
@classmethod
def model_info(cls, job_id):
return cls.job_info(job_id, 'models')
@classmethod
def abort_model(cls, job_id):
return cls.abort_job(job_id, job_type='models')
@classmethod
def model_wait_completion(cls, job_id, **kwargs):
kwargs['job_type'] = 'models'
if 'timeout' not in kwargs:
kwargs['timeout'] = TIMEOUT_MODEL
return cls.job_wait_completion(job_id, **kwargs)
@classmethod
def delete_model(cls, job_id):
return cls.delete_job(job_id, job_type='models')
@classmethod
def network(cls):
if cls.FRAMEWORK == 'torch':
return cls.TORCH_NETWORK
elif cls.FRAMEWORK == 'caffe':
return cls.CAFFE_NETWORK
elif cls.FRAMEWORK == 'tensorflow':
return cls.TENSORFLOW_NETWORK
else:
raise Exception('Unknown cls.FRAMEWORK "%s"' % cls.FRAMEWORK)
class BaseViewsTestWithDataset(BaseViewsTest,
digits.dataset.images.classification.test_views.BaseViewsTestWithDataset):
"""
Provides a dataset
"""
# Inherited classes may want to override these attributes
CROP_SIZE = None
TRAIN_EPOCHS = 1
SHUFFLE = False
LR_POLICY = None
LR_MULTISTEP_VALUES = None
LEARNING_RATE = None
AUG_FLIP = None
AUG_QUAD_ROT = None
AUG_ROT = None
AUG_SCALE = None
AUG_NOISE = None
AUG_CONTRAST = None
AUG_WHITENING = None
AUG_HSV_USE = None
AUG_HSV_H = None
AUG_HSV_S = None
AUG_HSV_V = None
OPTIMIZER = None
@classmethod
def setUpClass(cls):
super(BaseViewsTestWithDataset, cls).setUpClass()
cls.created_models = []
@classmethod
def tearDownClass(cls):
# delete any created datasets
for job_id in cls.created_models:
cls.delete_model(job_id)
super(BaseViewsTestWithDataset, cls).tearDownClass()
@classmethod
def create_model(cls, network=None, **kwargs):
"""
Create a model
Returns the job_id
Raise RuntimeError if job fails to create
Keyword arguments:
**kwargs -- data to be sent with POST request
"""
if network is None:
network = cls.network()
data = {
'model_name': 'test_model',
'group_name': 'test_group',
'dataset': cls.dataset_id,
'method': 'custom',
'custom_network': network,
'batch_size': 10,
'train_epochs': cls.TRAIN_EPOCHS,
'framework': cls.FRAMEWORK,
'random_seed': 0xCAFEBABE,
'shuffle': 'true' if cls.SHUFFLE else 'false'
}
if cls.CROP_SIZE is not None:
data['crop_size'] = cls.CROP_SIZE
if cls.LR_POLICY is not None:
data['lr_policy'] = cls.LR_POLICY
if cls.LEARNING_RATE is not None:
data['learning_rate'] = cls.LEARNING_RATE
if cls.LR_MULTISTEP_VALUES is not None:
data['lr_multistep_values'] = cls.LR_MULTISTEP_VALUES
if cls.AUG_FLIP is not None:
data['aug_flip'] = cls.AUG_FLIP
if cls.AUG_QUAD_ROT is not None:
data['aug_quad_rot'] = cls.AUG_QUAD_ROT
if cls.AUG_ROT is not None:
data['aug_rot'] = cls.AUG_ROT
if cls.AUG_SCALE is not None:
data['aug_scale'] = cls.AUG_SCALE
if cls.AUG_NOISE is not None:
data['aug_noise'] = cls.AUG_NOISE
if cls.AUG_CONTRAST is not None:
data['aug_contrast'] = cls.AUG_CONTRAST
if cls.AUG_WHITENING is not None:
data['aug_whitening'] = cls.AUG_WHITENING
if cls.AUG_HSV_USE is not None:
data['aug_hsv_use'] = cls.AUG_HSV_USE
if cls.AUG_HSV_H is not None:
data['aug_hsv_h'] = cls.AUG_HSV_H
if cls.AUG_HSV_S is not None:
data['aug_hsv_s'] = cls.AUG_HSV_S
if cls.AUG_HSV_V is not None:
data['aug_hsv_v'] = cls.AUG_HSV_V
if cls.OPTIMIZER is not None:
data['solver_type'] = cls.OPTIMIZER
data.update(kwargs)
request_json = data.pop('json', False)
url = '/models/images/classification'
if request_json:
url += '.json'
rv = cls.app.post(url, data=data)
if request_json:
if rv.status_code != 200:
print json.loads(rv.data)
raise RuntimeError('Model creation failed with %s' % rv.status_code)
data = json.loads(rv.data)
if 'jobs' in data.keys():
return [j['id'] for j in data['jobs']]
else:
return data['id']
# expect a redirect
if not 300 <= rv.status_code <= 310:
print 'Status code:', rv.status_code
s = BeautifulSoup(rv.data, 'html.parser')
div = s.select('div.alert-danger')
if div:
print div[0]
else:
print rv.data
raise RuntimeError('Failed to create dataset - status %s' % rv.status_code)
job_id = cls.job_id_from_response(rv)
assert cls.model_exists(job_id), 'model not found after successful creation'
cls.created_models.append(job_id)
return job_id
class BaseViewsTestWithModel(BaseViewsTestWithDataset):
"""
Provides a model
"""
@classmethod
def setUpClass(cls):
super(BaseViewsTestWithModel, cls).setUpClass()
cls.model_id = cls.create_model(json=True)
assert cls.model_wait_completion(cls.model_id) == 'Done', 'create failed'
class BaseTestViews(BaseViewsTest):
"""
Tests which don't require a dataset or a model
"""
def test_page_model_new(self):
rv = self.app.get('/models/images/classification/new')
assert rv.status_code == 200, 'page load failed with %s' % rv.status_code
assert 'New Image Classification Model' in rv.data, 'unexpected page format'
def test_nonexistent_model(self):
assert not self.model_exists('foo'), "model shouldn't exist"
def test_visualize_network(self):
rv = self.app.post('/models/visualize-network?framework=' + self.FRAMEWORK,
data={'custom_network': self.network()}
)
s = BeautifulSoup(rv.data, 'html.parser')
if rv.status_code != 200:
body = s.select('body')[0]
if 'InvocationException' in str(body):
raise unittest.SkipTest('GraphViz not installed')
raise AssertionError('POST failed with %s\n\n%s' % (rv.status_code, body))
image = s.select('img')
assert image is not None, "didn't return an image"
def test_customize(self):
rv = self.app.post('/models/customize?network=lenet&framework=' + self.FRAMEWORK)
s = BeautifulSoup(rv.data, 'html.parser')
body = s.select('body')
assert rv.status_code == 200, 'POST failed with %s\n\n%s' % (rv.status_code, body)
class BaseTestCreation(BaseViewsTestWithDataset):
"""
Model creation tests
"""
def test_create_json(self):
job_id = self.create_model(json=True)
self.abort_model(job_id)
def test_create_delete(self):
job_id = self.create_model()
assert self.delete_model(job_id) == 200, 'delete failed'
assert not self.model_exists(job_id), 'model exists after delete'
def test_create_wait_delete(self):
job_id = self.create_model()
assert self.model_wait_completion(job_id) == 'Done', 'create failed'
assert self.delete_model(job_id) == 200, 'delete failed'
assert not self.model_exists(job_id), 'model exists after delete'
def test_create_abort_delete(self):
job_id = self.create_model()
assert self.abort_model(job_id) == 200, 'abort failed'
assert self.delete_model(job_id) == 200, 'delete failed'
assert not self.model_exists(job_id), 'model exists after delete'
def test_snapshot_interval_2(self):
job_id = self.create_model(snapshot_interval=0.5)
assert self.model_wait_completion(job_id) == 'Done', 'create failed'
rv = self.app.get('/models/%s.json' % job_id)
assert rv.status_code == 200, 'json load failed with %s' % rv.status_code
content = json.loads(rv.data)
assert len(content['snapshots']) > 1, 'should take >1 snapshot'
def test_snapshot_interval_0_5(self):
job_id = self.create_model(train_epochs=4, snapshot_interval=2)
assert self.model_wait_completion(job_id) == 'Done', 'create failed'
rv = self.app.get('/models/%s.json' % job_id)
assert rv.status_code == 200, 'json load failed with %s' % rv.status_code
content = json.loads(rv.data)
assert len(content['snapshots']) == 2, 'should take 2 snapshots'
@unittest.skipIf(
not config_value('gpu_list'),
'no GPUs selected')
@unittest.skipIf(
not config_value('caffe')['cuda_enabled'],
'CUDA disabled')
@unittest.skipIf(
config_value('caffe')['multi_gpu'],
'multi-GPU enabled')
def test_select_gpu(self):
for index in config_value('gpu_list').split(','):
yield self.check_select_gpu, index
def check_select_gpu(self, gpu_index):
job_id = self.create_model(select_gpu=gpu_index)
assert self.model_wait_completion(job_id) == 'Done', 'create failed'
@unittest.skipIf(
not config_value('gpu_list'),
'no GPUs selected')
@unittest.skipIf(
not config_value('caffe')['cuda_enabled'],
'CUDA disabled')
@unittest.skipIf(
not config_value('caffe')['multi_gpu'],
'multi-GPU disabled')
def test_select_gpus(self):
# test all possible combinations
gpu_list = config_value('gpu_list').split(',')
for i in xrange(len(gpu_list)):
for combination in itertools.combinations(gpu_list, i + 1):
yield self.check_select_gpus, combination
def check_select_gpus(self, gpu_list):
job_id = self.create_model(select_gpus_list=','.join(gpu_list), batch_size=len(gpu_list))
assert self.model_wait_completion(job_id) == 'Done', 'create failed'
def classify_one_for_job(self, job_id, test_misclassification=True):
# carry out one inference test per category in dataset
for category in self.imageset_paths.keys():
image_path = self.imageset_paths[category][0]
image_path = os.path.join(self.imageset_folder, image_path)
with open(image_path, 'rb') as infile:
# StringIO wrapping is needed to simulate POST file upload.
image_upload = (StringIO(infile.read()), 'image.png')
rv = self.app.post(
'/models/images/classification/classify_one?job_id=%s' % job_id,
data={
'image_file': image_upload,
}
)
s = BeautifulSoup(rv.data, 'html.parser')
body = s.select('body')
assert rv.status_code == 200, 'POST failed with %s\n\n%s' % (rv.status_code, body)
# gets an array of arrays [[confidence, label],...]
predictions = [p.get_text().split() for p in s.select('ul.list-group li')]
if test_misclassification:
assert predictions[0][1] == category, 'image misclassified'
def test_classify_one_mean_image(self):
# test the creation
job_id = self.create_model(use_mean='image')
assert self.model_wait_completion(job_id) == 'Done', 'job failed'
self.classify_one_for_job(job_id)
def test_classify_one_mean_pixel(self):
# test the creation
job_id = self.create_model(use_mean='pixel')
assert self.model_wait_completion(job_id) == 'Done', 'job failed'
self.classify_one_for_job(job_id)
def test_classify_one_mean_none(self):
# test the creation
job_id = self.create_model(use_mean='none')
assert self.model_wait_completion(job_id) == 'Done', 'job failed'
self.classify_one_for_job(job_id, False)
def test_retrain(self):
job1_id = self.create_model()
assert self.model_wait_completion(job1_id) == 'Done', 'first job failed'
rv = self.app.get('/models/%s.json' % job1_id)
assert rv.status_code == 200, 'json load failed with %s' % rv.status_code
content = json.loads(rv.data)
assert len(content['snapshots']), 'should have at least snapshot'
options = {
'method': 'previous',
'previous_networks': job1_id,
}
options['%s-snapshot' % job1_id] = content['snapshots'][-1]
job2_id = self.create_model(**options)
assert self.model_wait_completion(job2_id) == 'Done', 'second job failed'
def test_retrain_twice(self):
# retrain from a job which already had a pretrained model
job1_id = self.create_model()
assert self.model_wait_completion(job1_id) == 'Done', 'first job failed'
rv = self.app.get('/models/%s.json' % job1_id)
assert rv.status_code == 200, 'json load failed with %s' % rv.status_code
content = json.loads(rv.data)
assert len(content['snapshots']), 'should have at least snapshot'
options_2 = {
'method': 'previous',
'previous_networks': job1_id,
}
options_2['%s-snapshot' % job1_id] = content['snapshots'][-1]
job2_id = self.create_model(**options_2)
assert self.model_wait_completion(job2_id) == 'Done', 'second job failed'
options_3 = {
'method': 'previous',
'previous_networks': job2_id,
}
options_3['%s-snapshot' % job2_id] = -1
job3_id = self.create_model(**options_3)
assert self.model_wait_completion(job3_id) == 'Done', 'third job failed'
def test_bad_network_definition(self):
if self.FRAMEWORK == 'caffe':
bogus_net = """
layer {
name: "hidden"
type: 'BogusCode'
bottom: "data"
top: "output"
}
layer {
name: "loss"
type: "SoftmaxWithLoss"
bottom: "output"
bottom: "label"
top: "loss"
exclude { stage: "deploy" }
}
layer {
name: "softmax"
type: "Softmax"
bottom: "output"
top: "softmax"
include { stage: "deploy" }
}
"""
elif self.FRAMEWORK == 'torch':
bogus_net = """
local model = BogusCode(0)
return function(params)
return {
model = model
}
end
"""
elif self.FRAMEWORK == 'tensorflow':
bogus_net = """
class UserModel(Tower):
@model_property
def inference(self):
model = BogusCode(0)
return model
@model_property
def loss(y):
return BogusCode(0)
"""
job_id = self.create_model(json=True, network=bogus_net)
assert self.model_wait_completion(job_id) == 'Error', 'job should have failed'
job_info = self.job_info_html(job_id=job_id, job_type='models')
assert 'BogusCode' in job_info, "job_info: \n%s" % str(job_info)
def test_clone(self):
options_1 = {
'shuffle': True,
'snapshot_interval': 2.0,
'lr_step_size': 33.0,
'lr_inv_power': 0.5,
'lr_inv_gamma': 0.1,
'lr_poly_power': 3.0,
'lr_exp_gamma': 0.9,
'use_mean': 'image',
'lr_multistep_gamma': 0.5,
'lr_policy': 'exp',
'val_interval': 3.0,
'random_seed': 123,
'learning_rate': 0.0125,
'lr_step_gamma': 0.1,
'lr_sigmoid_step': 50.0,
'lr_sigmoid_gamma': 0.1,
'lr_multistep_values': '50,85',
}
job1_id = self.create_model(**options_1)
assert self.model_wait_completion(job1_id) == 'Done', 'first job failed'
rv = self.app.get('/models/%s.json' % job1_id)
assert rv.status_code == 200, 'json load failed with %s' % rv.status_code
content1 = json.loads(rv.data)
# Clone job1 as job2
options_2 = {
'clone': job1_id,
}
job2_id = self.create_model(**options_2)
assert self.model_wait_completion(job2_id) == 'Done', 'second job failed'
rv = self.app.get('/models/%s.json' % job2_id)
assert rv.status_code == 200, 'json load failed with %s' % rv.status_code
content2 = json.loads(rv.data)
# These will be different
content1.pop('id')
content2.pop('id')
content1.pop('directory')
content2.pop('directory')
content1.pop('creation time')
content2.pop('creation time')
content1.pop('job id')
content2.pop('job id')
assert (content1 == content2), 'job content does not match'
job1 = digits.webapp.scheduler.get_job(job1_id)
job2 = digits.webapp.scheduler.get_job(job2_id)
assert (job1.form_data == job2.form_data), 'form content does not match'
class BaseTestCreated(BaseViewsTestWithModel):
"""
Tests on a model that has already been created
"""
def test_save(self):
job = digits.webapp.scheduler.get_job(self.model_id)
assert job.save(), 'Job failed to save'
def test_get_snapshot(self):
job = digits.webapp.scheduler.get_job(self.model_id)
task = job.train_task()
f = task.get_snapshot(-1)
assert f, "Failed to load snapshot"
filename = task.get_snapshot_filename(-1)
assert filename, "Failed to get filename"
def test_download(self):
for extension in ['tar', 'zip', 'tar.gz', 'tar.bz2']:
yield self.check_download, extension
def check_download(self, extension):
url = '/models/%s/download.%s' % (self.model_id, extension)
rv = self.app.get(url)
assert rv.status_code == 200, 'download "%s" failed with %s' % (url, rv.status_code)
def test_index_json(self):
rv = self.app.get('/index.json')
assert rv.status_code == 200, 'page load failed with %s' % rv.status_code
content = json.loads(rv.data)
found = False
for m in content['models']:
if m['id'] == self.model_id:
found = True
break
assert found, 'model not found in list'
def test_model_json(self):
rv = self.app.get('/models/%s.json' % self.model_id)
assert rv.status_code == 200, 'page load failed with %s' % rv.status_code
content = json.loads(rv.data)
assert content['id'] == self.model_id, 'id %s != %s' % (content['id'], self.model_id)
assert content['dataset_id'] == self.dataset_id, 'dataset_id %s != %s' % (
content['dataset_id'], self.dataset_id)
assert len(content['snapshots']) > 0, 'no snapshots in list'
def test_edit_name(self):
status = self.edit_job(
self.dataset_id,
name='new name'
)
assert status == 200, 'failed with %s' % status
def test_edit_notes(self):
status = self.edit_job(
self.dataset_id,
notes='new notes'
)
assert status == 200, 'failed with %s' % status
def test_classify_one(self):
# test first image in first category
category = self.imageset_paths.keys()[0]
image_path = self.imageset_paths[category][0]
image_path = os.path.join(self.imageset_folder, image_path)
with open(image_path, 'rb') as infile:
# StringIO wrapping is needed to simulate POST file upload.
image_upload = (StringIO(infile.read()), 'image.png')
rv = self.app.post(
'/models/images/classification/classify_one?job_id=%s' % self.model_id,
data={
'image_file': image_upload,
'show_visualizations': 'y',
}
)
s = BeautifulSoup(rv.data, 'html.parser')
body = s.select('body')
assert rv.status_code == 200, 'POST failed with %s\n\n%s' % (rv.status_code, body)
# gets an array of arrays [[confidence, label],...]
predictions = [p.get_text().split() for p in s.select('ul.list-group li')]
assert predictions[0][1] == category, 'image misclassified'
def test_classify_one_json(self):
# test last image in last category
category = self.imageset_paths.keys()[-1]
image_path = self.imageset_paths[category][-1]
image_path = os.path.join(self.imageset_folder, image_path)
with open(image_path, 'rb') as infile:
# StringIO wrapping is needed to simulate POST file upload.
image_upload = (StringIO(infile.read()), 'image.png')
rv = self.app.post(
'/models/images/classification/classify_one.json?job_id=%s' % self.model_id,
data={
'image_file': image_upload,
'show_visualizations': 'y',
}
)
assert rv.status_code == 200, 'POST failed with %s' % rv.status_code
data = json.loads(rv.data)
assert data['predictions'][0][0] == category, 'image misclassified'
def test_classify_many(self):
textfile_images = ''
label_id = 0
for label, images in self.imageset_paths.iteritems():
for image in images:
image_path = image
image_path = os.path.join(self.imageset_folder, image_path)
textfile_images += '%s %d\n' % (image_path, label_id)
label_id += 1
# StringIO wrapping is needed to simulate POST file upload.
file_upload = (StringIO(textfile_images), 'images.txt')
rv = self.app.post(
'/models/images/classification/classify_many?job_id=%s' % self.model_id,
data={'image_list': file_upload}
)
s = BeautifulSoup(rv.data, 'html.parser')
body = s.select('body')
assert rv.status_code == 200, 'POST failed with %s\n\n%s' % (rv.status_code, body)
def test_classify_many_from_folder(self):
textfile_images = ''
label_id = 0
for label, images in self.imageset_paths.iteritems():
for image in images:
image_path = image
textfile_images += '%s %d\n' % (image_path, label_id)
label_id += 1
# StringIO wrapping is needed to simulate POST file upload.
file_upload = (StringIO(textfile_images), 'images.txt')
rv = self.app.post(
'/models/images/classification/classify_many?job_id=%s' % self.model_id,
data={'image_list': file_upload, 'image_folder': self.imageset_folder}
)
s = BeautifulSoup(rv.data, 'html.parser')
body = s.select('body')
assert rv.status_code == 200, 'POST failed with %s\n\n%s' % (rv.status_code, body)
def test_classify_many_invalid_ground_truth(self):
textfile_images = ''
label_id = 0
for label, images in self.imageset_paths.iteritems():
for image in images:
image_path = image
image_path = os.path.join(self.imageset_folder, image_path)
# test label_id with -1 and >len(labels)
textfile_images += '%s %s\n' % (image_path, 3 * label_id - 1)
label_id += 1
# StringIO wrapping is needed to simulate POST file upload.
file_upload = (StringIO(textfile_images), 'images.txt')
rv = self.app.post(
'/models/images/classification/classify_many?job_id=%s' % self.model_id,
data={'image_list': file_upload}
)
s = BeautifulSoup(rv.data, 'html.parser')
body = s.select('body')
assert rv.status_code == 200, 'POST failed with %s\n\n%s' % (rv.status_code, body)
def test_classify_many_json(self):
textfile_images = ''
label_id = 0
for label, images in self.imageset_paths.iteritems():
for image in images:
image_path = image
image_path = os.path.join(self.imageset_folder, image_path)
textfile_images += '%s %d\n' % (image_path, label_id)
label_id += 1
# StringIO wrapping is needed to simulate POST file upload.
file_upload = (StringIO(textfile_images), 'images.txt')
rv = self.app.post(
'/models/images/classification/classify_many.json?job_id=%s' % self.model_id,
data={'image_list': file_upload}
)
assert rv.status_code == 200, 'POST failed with %s' % rv.status_code
data = json.loads(rv.data)
assert 'classifications' in data, 'invalid response'
# verify classification of first image in each category
for category in self.imageset_paths.keys():
image_path = self.imageset_paths[category][0]
image_path = os.path.join(self.imageset_folder, image_path)
prediction = data['classifications'][image_path][0][0]
assert prediction == category, 'image misclassified- predicted %s - expected %s' % (prediction, category)
def test_top_n(self):
textfile_images = ''
label_id = 0
for label, images in self.imageset_paths.iteritems():
for image in images:
image_path = image
image_path = os.path.join(self.imageset_folder, image_path)
textfile_images += '%s %d\n' % (image_path, label_id)
label_id += 1
# StringIO wrapping is needed to simulate POST file upload.
file_upload = (StringIO(textfile_images), 'images.txt')
rv = self.app.post(
'/models/images/classification/top_n?job_id=%s' % self.model_id,
data={'image_list': file_upload}
)
s = BeautifulSoup(rv.data, 'html.parser')
body = s.select('body')
assert rv.status_code == 200, 'POST failed with %s\n\n%s' % (rv.status_code, body)
keys = self.imageset_paths.keys()
for key in keys:
assert key in rv.data, '"%s" not found in the response'
def test_top_n_from_folder(self):
textfile_images = ''
label_id = 0
for label, images in self.imageset_paths.iteritems():
for image in images:
image_path = image
textfile_images += '%s %d\n' % (image_path, label_id)
label_id += 1
# StringIO wrapping is needed to simulate POST file upload.
file_upload = (StringIO(textfile_images), 'images.txt')
rv = self.app.post(
'/models/images/classification/top_n?job_id=%s' % self.model_id,
data={'image_list': file_upload, 'image_folder': self.imageset_folder}
)
s = BeautifulSoup(rv.data, 'html.parser')
body = s.select('body')
assert rv.status_code == 200, 'POST failed with %s\n\n%s' % (rv.status_code, body)
keys = self.imageset_paths.keys()
for key in keys:
assert key in rv.data, '"%s" not found in the response'
def test_inference_while_training(self):
# make sure we can do inference while all GPUs are in use for training
# if no GPUs, just test inference during a normal training job
# get number of GPUs
if self.FRAMEWORK == 'tensorflow':
raise unittest.SkipTest('Tensorflow CPU inference during training not supported')
gpu_count = 1
if (config_value('gpu_list') and
config_value('caffe')['cuda_enabled'] and
config_value('caffe')['multi_gpu']):
gpu_count = len(config_value('gpu_list').split(','))
# grab an image for testing
category = self.imageset_paths.keys()[-1]
image_path = self.imageset_paths[category][-1]
image_path = os.path.join(self.imageset_folder, image_path)
with open(image_path, 'rb') as infile:
# StringIO wrapping is needed to simulate POST file upload.
image_upload = (StringIO(infile.read()), 'image.png')
# create a long-running training job
job2_id = self.create_model(
select_gpu_count=gpu_count,
batch_size=10 * gpu_count,
train_epochs=1000,
)
try:
while True:
status = self.model_status(job2_id)
if status in ['Initialized', 'Waiting']:
time.sleep(0.01)
elif status == 'Running':
break
else:
raise RuntimeError('job status is %s' % status)
rv = self.app.post(
'/models/images/classification/classify_one.json?job_id=%s' % self.model_id,
data={'image_file': image_upload}
)
json.loads(rv.data)
assert rv.status_code == 200, 'POST failed with %s' % rv.status_code
finally:
self.delete_model(job2_id)
class BaseTestDatasetModelInteractions(BaseViewsTestWithDataset):
"""
Test the interactions between datasets and models
"""
# If you try to create a model using a deleted dataset, it should fail
def test_create_model_deleted_dataset(self):
dataset_id = self.create_dataset()
assert self.delete_dataset(dataset_id) == 200, 'delete failed'
assert not self.dataset_exists(dataset_id), 'dataset exists after delete'
try:
self.create_model(dataset=dataset_id)
except RuntimeError:
return
assert False, 'Should have failed'
# If you try to create a model using a running dataset,
# it should wait to start until the dataset is completed
def test_create_model_running_dataset(self):
dataset_id = self.create_dataset()
model_id = self.create_model(dataset=dataset_id)
# Model should be in WAIT status while dataset is running
# Copying functionality from job_wait_completion ...
start_time = time.time()
timeout = TIMEOUT_DATASET
dataset_status = self.dataset_status(dataset_id)
while dataset_status != 'Done':
model_status = self.model_status(model_id)
if model_status == 'Initialized':
# give it some time ...
pass
elif model_status == 'Waiting':
# That's what we were waiting for
break
else:
raise Exception('Model not waiting - "%s"' % model_status)
assert (time.time() - start_time) < timeout, 'Job took more than %s seconds' % timeout
time.sleep(0.5)
dataset_status = self.dataset_status(dataset_id)
# Model should switch to RUN status after dataset is DONE
assert self.dataset_wait_completion(dataset_id) == 'Done', 'dataset creation failed'
time.sleep(1)
assert self.model_status(model_id) in ['Running', 'Done'], "model didn't start"
self.abort_model(model_id)
# If you try to delete a completed dataset with a dependent model, it should fail
def test_delete_dataset_dependent_model(self):
dataset_id = self.create_dataset()
model_id = self.create_model(dataset=dataset_id)
assert self.dataset_wait_completion(dataset_id) == 'Done', 'dataset creation failed'
assert self.delete_dataset(dataset_id) == 403, 'dataset deletion should not have succeeded'
self.abort_model(model_id)
# If you try to delete a running dataset with a dependent model, it should fail
def test_delete_running_dataset_dependent_model(self):
dataset_id = self.create_dataset()
model_id = self.create_model(dataset=dataset_id)
assert self.delete_dataset(dataset_id) == 403, 'dataset deletion should not have succeeded'
self.abort_dataset(dataset_id)
self.abort_model(model_id)
class BaseTestCreatedWide(BaseTestCreated):
IMAGE_WIDTH = 20
class BaseTestCreatedTall(BaseTestCreated):
IMAGE_HEIGHT = 20
class BaseTestCreatedCropInForm(BaseTestCreated):
CROP_SIZE = 8
class BaseTestCreatedDataAug(BaseTestCreatedTall):
AUG_FLIP = 'fliplrud'
AUG_QUAD_ROT = 'rotall'
AUG_ROT = 45
AUG_SCALE = 0.07
AUG_NOISE = 0.03
AUG_HSV_USE = True
AUG_HSV_H = 0.02
AUG_HSV_S = 0.04
AUG_HSV_V = 0.06
class BaseTestCreatedCropInNetwork(BaseTestCreated):
CAFFE_NETWORK = \
"""
layer {
name: "data"
type: "Data"
top: "data"
top: "label"
include {
phase: TRAIN
}
transform_param {
crop_size: 8
}
}
layer {
name: "data"
type: "Data"
top: "data"
top: "label"
include {
phase: TEST
}
transform_param {
crop_size: 8
}
}
layer {
name: "hidden"
type: 'InnerProduct'
bottom: "data"
top: "output"
}
layer {
name: "loss"
type: "SoftmaxWithLoss"
bottom: "output"
bottom: "label"
top: "loss"
exclude { stage: "deploy" }
}
layer {
name: "accuracy"
type: "Accuracy"
bottom: "output"
bottom: "label"
top: "accuracy"
include { stage: "val" }
}
layer {
name: "softmax"
type: "Softmax"
bottom: "output"
top: "softmax"
include { stage: "deploy" }
}
"""
TORCH_NETWORK = \
"""
return function(p)
local nclasses = p.nclasses or 1
local croplen = 8, channels
if p.inputShape then channels=p.inputShape[1] else channels=1 end
local model = nn.Sequential()
model:add(nn.View(-1):setNumInputDims(3)) -- flatten
local linLayer = nn.Linear(channels*croplen*croplen, nclasses)
linLayer.weight:fill(0)
linLayer.bias:fill(0)
model:add(linLayer) -- chw -> nclasses
model:add(nn.LogSoftMax())
return {
model = model,
croplen = croplen
}
end
"""
TENSORFLOW_NETWORK = \
"""
@TODO(tzaman)
"""
################################################################################
# Test classes
################################################################################
class TestCaffeViews(BaseTestViews, test_utils.CaffeMixin):
pass
class TestCaffeCreation(BaseTestCreation, test_utils.CaffeMixin):
pass
class TestCaffeCreatedWideMoreNumOutput(BaseTestCreatedWide, test_utils.CaffeMixin):
CAFFE_NETWORK = \
"""
layer {
name: "hidden"
type: 'InnerProduct'
bottom: "data"
top: "output"
inner_product_param {
num_output: 1000
}
}
layer {
name: "loss"
type: "SoftmaxWithLoss"
bottom: "output"
bottom: "label"
top: "loss"
exclude { stage: "deploy" }
}
layer {
name: "accuracy"
type: "Accuracy"
bottom: "output"
bottom: "label"
top: "accuracy"
include { stage: "val" }
}
layer {
name: "softmax"
type: "Softmax"
bottom: "output"
top: "softmax"
include { stage: "deploy" }
}
"""
class TestCaffeDatasetModelInteractions(BaseTestDatasetModelInteractions, test_utils.CaffeMixin):
pass
class TestCaffeCreatedCropInForm(BaseTestCreatedCropInForm, test_utils.CaffeMixin):
pass
class TestCaffeCreatedCropInNetwork(BaseTestCreatedCropInNetwork, test_utils.CaffeMixin):
pass
@unittest.skipIf(
not CaffeFramework().can_accumulate_gradients(),
'This version of Caffe cannot accumulate gradients')
class TestBatchAccumulationCaffe(BaseViewsTestWithDataset, test_utils.CaffeMixin):
TRAIN_EPOCHS = 1
IMAGE_COUNT = 10 # per class
def test_batch_accumulation_calculations(self):
batch_size = 10
batch_accumulation = 2
job_id = self.create_model(
batch_size=batch_size,
batch_accumulation=batch_accumulation,
)
assert self.model_wait_completion(job_id) == 'Done', 'create failed'
info = self.model_info(job_id)
solver = caffe_pb2.SolverParameter()
with open(os.path.join(info['directory'], info['solver file']), 'r') as infile:
text_format.Merge(infile.read(), solver)
assert solver.iter_size == batch_accumulation, \
'iter_size is %d instead of %d' % (solver.iter_size, batch_accumulation)
max_iter = int(math.ceil(
float(self.TRAIN_EPOCHS * self.IMAGE_COUNT * 3) /
(batch_size * batch_accumulation)
))
assert solver.max_iter == max_iter,\
'max_iter is %d instead of %d' % (solver.max_iter, max_iter)
class TestCaffeCreatedTallMultiStepLR(BaseTestCreatedTall, test_utils.CaffeMixin):
LR_POLICY = 'multistep'
LR_MULTISTEP_VALUES = '50,75,90'
class TestTorchViews(BaseTestViews, test_utils.TorchMixin):
pass
class TestTorchCreation(BaseTestCreation, test_utils.TorchMixin):
pass
class TestTorchCreatedUnencodedShuffle(BaseTestCreated, test_utils.TorchMixin):
ENCODING = 'none'
SHUFFLE = True
class TestTorchCreatedHdf5(BaseTestCreated, test_utils.TorchMixin):
BACKEND = 'hdf5'
class TestTorchCreatedTallHdf5Shuffle(BaseTestCreatedTall, test_utils.TorchMixin):
BACKEND = 'hdf5'
SHUFFLE = True
class TestTorchDatasetModelInteractions(BaseTestDatasetModelInteractions, test_utils.TorchMixin):
pass
class TestCaffeLeNet(BaseTestCreated, test_utils.CaffeMixin):
IMAGE_WIDTH = 28
IMAGE_HEIGHT = 28
CAFFE_NETWORK = open(
os.path.join(
os.path.dirname(digits.__file__),
'standard-networks', 'caffe', 'lenet.prototxt')
).read()
class TestCaffeLeNetADAMOptimizer(TestCaffeLeNet):
OPTIMIZER = 'ADAM'
class TestTorchCreatedCropInForm(BaseTestCreatedCropInForm, test_utils.TorchMixin):
pass
class TestTorchCreatedDataAug(BaseTestCreatedDataAug, test_utils.TorchMixin):
TRAIN_EPOCHS = 2
class TestTorchCreatedCropInNetwork(BaseTestCreatedCropInNetwork, test_utils.TorchMixin):
pass
class TestTorchCreatedWideMultiStepLR(BaseTestCreatedWide, test_utils.TorchMixin):
LR_POLICY = 'multistep'
LR_MULTISTEP_VALUES = '50,75,90'
class TestTorchLeNet(BaseTestCreated, test_utils.TorchMixin):
IMAGE_WIDTH = 28
IMAGE_HEIGHT = 28
TRAIN_EPOCHS = 20
# standard lenet model will adjust to color
# or grayscale images
TORCH_NETWORK = open(
os.path.join(
os.path.dirname(digits.__file__),
'standard-networks', 'torch', 'lenet.lua')
).read()
def test_inference_while_training(self):
# override parent method to skip this test as the reference
# model for LeNet uses CuDNN by default and it difficult to
# perform inference on a CuDNN-trained model without non-trivial
# model tweaking
raise unittest.SkipTest('Torch CPU inference on CuDNN-trained model not supported')
class TestTorchLeNetADAMOptimizer(TestTorchLeNet):
OPTIMIZER = 'ADAM'
class TestTorchLeNetHdf5Shuffle(TestTorchLeNet):
BACKEND = 'hdf5'
SHUFFLE = True
class TestCaffePythonLayer(BaseViewsTestWithDataset, test_utils.CaffeMixin):
CAFFE_NETWORK = """\
layer {
name: "hidden"
type: 'InnerProduct'
inner_product_param {
num_output: 500
weight_filler {
type: "xavier"
}
bias_filler {
type: "constant"
}
}
bottom: "data"
top: "output"
}
layer {
name: "loss"
type: "SoftmaxWithLoss"
bottom: "output"
bottom: "label"
top: "loss"
exclude { stage: "deploy" }
}
layer {
name: "accuracy"
type: "Accuracy"
bottom: "output"
bottom: "label"
top: "accuracy"
include { stage: "val" }
}
layer {
name: "py_test"
type: "Python"
bottom: "output"
top: "py_test"
python_param {
module: "digits_python_layers"
layer: "PythonLayer"
}
}
layer {
name: "softmax"
type: "Softmax"
bottom: "output"
top: "softmax"
include { stage: "deploy" }
}
"""
def write_python_layer_script(self, filename):
with open(filename, 'w') as f:
f.write("""\
import caffe
import numpy as np
class PythonLayer(caffe.Layer):
def setup(self, bottom, top):
print 'PythonLayer::setup'
if len(bottom) != 1:
raise Exception("Need one input.")
def reshape(self, bottom, top):
print 'PythonLayer::reshape'
top[0].reshape(1)
def forward(self, bottom, top):
print 'PythonLayer::forward'
top[0].data[...] = np.sum(bottom[0].data) / 2. / bottom[0].num
""")
# This test makes a temporary python layer file whose path is set
# as py_layer_server_file. The job creation process copies that
# file to the job_dir. The CAFFE_NETWORK above, requires that
# python script to be in the correct spot. If there is an error
# in the script or if the script is named incorrectly, or does
# not exist in the job_dir, then the test will fail.
def test_python_layer(self):
tmpdir = tempfile.mkdtemp()
py_file = tmpdir + '/py_test.py'
self.write_python_layer_script(py_file)
job_id = self.create_model(python_layer_server_file=py_file)
# remove the temporary python script.
shutil.rmtree(tmpdir)
assert self.model_wait_completion(job_id) == 'Done', 'first job failed'
rv = self.app.get('/models/%s.json' % job_id)
assert rv.status_code == 200, 'json load failed with %s' % rv.status_code
content = json.loads(rv.data)
assert len(content['snapshots']), 'should have at least snapshot'
class TestSweepCreation(BaseViewsTestWithDataset, test_utils.CaffeMixin):
"""
Model creation tests
"""
def test_sweep(self):
job_ids = self.create_model(json=True, learning_rate='[0.01, 0.02]', batch_size='[8, 10]')
for job_id in job_ids:
assert self.model_wait_completion(job_id) == 'Done', 'create failed'
assert self.delete_model(job_id) == 200, 'delete failed'
assert not self.model_exists(job_id), 'model exists after delete'
# Tensorflow
class TestTensorflowCreation(BaseTestCreation, test_utils.TensorflowMixin):
pass
class TestTensorflowCreatedWideUnencodedShuffle(BaseTestCreatedWide, test_utils.TensorflowMixin):
ENCODING = 'none'
SHUFFLE = True
class TestTensorflowCreatedHdf5(BaseTestCreated, test_utils.TensorflowMixin):
BACKEND = 'hdf5'
class TestTensorflowCreatedTallHdf5Shuffle(BaseTestCreatedTall, test_utils.TensorflowMixin):
BACKEND = 'hdf5'
SHUFFLE = True
class TestTensorflowDatasetModelInteractions(BaseTestDatasetModelInteractions, test_utils.TensorflowMixin):
pass
class TestTensorflowCreatedDataAug(BaseTestCreatedDataAug, test_utils.TensorflowMixin):
AUG_FLIP = 'fliplrud'
AUG_NOISE = 0.03
AUG_CONTRAST = 0.1
AUG_WHITENING = True
AUG_HSV_USE = True
AUG_HSV_H = 0.02
AUG_HSV_S = 0.04
AUG_HSV_V = 0.06
TRAIN_EPOCHS = 2
class TestTensorflowCreatedWideMultiStepLR(BaseTestCreatedWide, test_utils.TensorflowMixin):
LR_POLICY = 'multistep'
LR_MULTISTEP_VALUES = '50,75,90'
class TestTensorflowLeNet(BaseTestCreated, test_utils.TensorflowMixin):
IMAGE_WIDTH = 28
IMAGE_HEIGHT = 28
TRAIN_EPOCHS = 20
# standard lenet model will adjust to color
# or grayscale images
TENSORFLOW_NETWORK = open(os.path.join(os.path.dirname(digits.__file__),
'standard-networks',
'tensorflow',
'lenet.py')).read()
class TestTensorflowLeNetADAMOptimizer(TestTensorflowLeNet):
OPTIMIZER = 'ADAM'
| {
"content_hash": "036cf523dcdacfce8c217bee1f41c723",
"timestamp": "",
"source": "github",
"line_count": 1415,
"max_line_length": 117,
"avg_line_length": 33.386572438162545,
"alnum_prop": 0.5912112103636594,
"repo_name": "ethantang95/DIGITS",
"id": "fc037f8527d3808769d4951b8395f3e5bb7d6756",
"size": "47311",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "digits/model/images/classification/test_views.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "4386"
},
{
"name": "HTML",
"bytes": "2638345"
},
{
"name": "JavaScript",
"bytes": "53917"
},
{
"name": "Lua",
"bytes": "110602"
},
{
"name": "Makefile",
"bytes": "113"
},
{
"name": "Protocol Buffer",
"bytes": "1750"
},
{
"name": "Python",
"bytes": "1230584"
},
{
"name": "Shell",
"bytes": "13547"
}
],
"symlink_target": ""
} |
package io.druid.server.http.security;
import com.google.inject.Inject;
import com.sun.jersey.spi.container.ContainerRequest;
import io.druid.server.security.Access;
import io.druid.server.security.AuthorizerMapper;
import io.druid.server.security.AuthorizationUtils;
import io.druid.server.security.ForbiddenException;
import io.druid.server.security.Resource;
import io.druid.server.security.ResourceAction;
import io.druid.server.security.ResourceType;
/**
* Use this ResourceFilter at end points where Druid Cluster State is read or written
* Here are some example paths where this filter is used -
* - druid/broker/v1
* - druid/coordinator/v1
* - druid/historical/v1
* - druid/indexer/v1
* - druid/coordinator/v1/rules
* - druid/coordinator/v1/tiers
* - druid/worker/v1
* - druid/coordinator/v1/servers
* - status
* Note - Currently the resource name for all end points is set to "STATE" however if more fine grained access control
* is required the resource name can be set to specific state properties.
*/
public class StateResourceFilter extends AbstractResourceFilter
{
@Inject
public StateResourceFilter(
AuthorizerMapper authorizerMapper
)
{
super(authorizerMapper);
}
@Override
public ContainerRequest filter(ContainerRequest request)
{
final ResourceAction resourceAction = new ResourceAction(
new Resource("STATE", ResourceType.STATE),
getAction(request)
);
final Access authResult = AuthorizationUtils.authorizeResourceAction(
getReq(),
resourceAction,
getAuthorizerMapper()
);
if (!authResult.isAllowed()) {
throw new ForbiddenException(authResult.toString());
}
return request;
}
@Override
public boolean isApplicable(String requestPath)
{
return requestPath.startsWith("druid/broker/v1") ||
requestPath.startsWith("druid/coordinator/v1") ||
requestPath.startsWith("druid/historical/v1") ||
requestPath.startsWith("druid/indexer/v1") ||
requestPath.startsWith("druid/coordinator/v1/rules") ||
requestPath.startsWith("druid/coordinator/v1/tiers") ||
requestPath.startsWith("druid/worker/v1") ||
requestPath.startsWith("druid/coordinator/v1/servers") ||
requestPath.startsWith("druid/v2") ||
requestPath.startsWith("status");
}
}
| {
"content_hash": "c76d764f413dc22522c00289e0097055",
"timestamp": "",
"source": "github",
"line_count": 75,
"max_line_length": 118,
"avg_line_length": 31.746666666666666,
"alnum_prop": 0.7165056698866022,
"repo_name": "winval/druid",
"id": "f3f8a16343a7f8460a47b8265d74ac3d4af55146",
"size": "3186",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "server/src/main/java/io/druid/server/http/security/StateResourceFilter.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "1406"
},
{
"name": "CSS",
"bytes": "11623"
},
{
"name": "HTML",
"bytes": "26739"
},
{
"name": "Java",
"bytes": "18262421"
},
{
"name": "JavaScript",
"bytes": "295150"
},
{
"name": "Makefile",
"bytes": "659"
},
{
"name": "PostScript",
"bytes": "5"
},
{
"name": "R",
"bytes": "17002"
},
{
"name": "Roff",
"bytes": "3617"
},
{
"name": "Shell",
"bytes": "6103"
},
{
"name": "TeX",
"bytes": "399444"
},
{
"name": "Thrift",
"bytes": "199"
}
],
"symlink_target": ""
} |
"""
oauthlib.oauth1.rfc5849
~~~~~~~~~~~~~~
This module is an implementation of various logic needed
for signing and checking OAuth 1.0 RFC 5849 requests.
"""
from __future__ import absolute_import, unicode_literals
import base64
import hashlib
import logging
log = logging.getLogger(__name__)
import sys
try:
import urlparse
except ImportError:
import urllib.parse as urlparse
if sys.version_info[0] == 3:
bytes_type = bytes
else:
bytes_type = str
from oauthlib.common import Request, urlencode, generate_nonce
from oauthlib.common import generate_timestamp, to_unicode
from . import parameters, signature
SIGNATURE_HMAC = "HMAC-SHA1"
SIGNATURE_RSA = "RSA-SHA1"
SIGNATURE_PLAINTEXT = "PLAINTEXT"
SIGNATURE_METHODS = (SIGNATURE_HMAC, SIGNATURE_RSA, SIGNATURE_PLAINTEXT)
SIGNATURE_TYPE_AUTH_HEADER = 'AUTH_HEADER'
SIGNATURE_TYPE_QUERY = 'QUERY'
SIGNATURE_TYPE_BODY = 'BODY'
CONTENT_TYPE_FORM_URLENCODED = 'application/x-www-form-urlencoded'
class Client(object):
"""A client used to sign OAuth 1.0 RFC 5849 requests."""
SIGNATURE_METHODS = {
SIGNATURE_HMAC: signature.sign_hmac_sha1_with_client,
SIGNATURE_RSA: signature.sign_rsa_sha1_with_client,
SIGNATURE_PLAINTEXT: signature.sign_plaintext_with_client
}
@classmethod
def register_signature_method(cls, method_name, method_callback):
cls.SIGNATURE_METHODS[method_name] = method_callback
def __init__(self, client_key,
client_secret=None,
resource_owner_key=None,
resource_owner_secret=None,
callback_uri=None,
signature_method=SIGNATURE_HMAC,
signature_type=SIGNATURE_TYPE_AUTH_HEADER,
rsa_key=None, verifier=None, realm=None,
encoding='utf-8', decoding=None,
nonce=None, timestamp=None):
"""Create an OAuth 1 client.
:param client_key: Client key (consumer key), mandatory.
:param resource_owner_key: Resource owner key (oauth token).
:param resource_owner_secret: Resource owner secret (oauth token secret).
:param callback_uri: Callback used when obtaining request token.
:param signature_method: SIGNATURE_HMAC, SIGNATURE_RSA or SIGNATURE_PLAINTEXT.
:param signature_type: SIGNATURE_TYPE_AUTH_HEADER (default),
SIGNATURE_TYPE_QUERY or SIGNATURE_TYPE_BODY
depending on where you want to embed the oauth
credentials.
:param rsa_key: RSA key used with SIGNATURE_RSA.
:param verifier: Verifier used when obtaining an access token.
:param realm: Realm (scope) to which access is being requested.
:param encoding: If you provide non-unicode input you may use this
to have oauthlib automatically convert.
:param decoding: If you wish that the returned uri, headers and body
from sign be encoded back from unicode, then set
decoding to your preferred encoding, i.e. utf-8.
:param nonce: Use this nonce instead of generating one. (Mainly for testing)
:param timestamp: Use this timestamp instead of using current. (Mainly for testing)
"""
# Convert to unicode using encoding if given, else assume unicode
encode = lambda x: to_unicode(x, encoding) if encoding else x
self.client_key = encode(client_key)
self.client_secret = encode(client_secret)
self.resource_owner_key = encode(resource_owner_key)
self.resource_owner_secret = encode(resource_owner_secret)
self.signature_method = encode(signature_method)
self.signature_type = encode(signature_type)
self.callback_uri = encode(callback_uri)
self.rsa_key = encode(rsa_key)
self.verifier = encode(verifier)
self.realm = encode(realm)
self.encoding = encode(encoding)
self.decoding = encode(decoding)
self.nonce = encode(nonce)
self.timestamp = encode(timestamp)
def __repr__(self):
attrs = vars(self).copy()
attrs['client_secret'] = '****' if attrs['client_secret'] else None
attrs['rsa_key'] = '****' if attrs['rsa_key'] else None
attrs[
'resource_owner_secret'] = '****' if attrs['resource_owner_secret'] else None
attribute_str = ', '.join('%s=%s' % (k, v) for k, v in attrs.items())
return '<%s %s>' % (self.__class__.__name__, attribute_str)
def get_oauth_signature(self, request):
"""Get an OAuth signature to be used in signing a request
To satisfy `section 3.4.1.2`_ item 2, if the request argument's
headers dict attribute contains a Host item, its value will
replace any netloc part of the request argument's uri attribute
value.
.. _`section 3.4.1.2`: https://tools.ietf.org/html/rfc5849#section-3.4.1.2
"""
if self.signature_method == SIGNATURE_PLAINTEXT:
# fast-path
return signature.sign_plaintext(self.client_secret,
self.resource_owner_secret)
uri, headers, body = self._render(request)
collected_params = signature.collect_parameters(
uri_query=urlparse.urlparse(uri).query,
body=body,
headers=headers)
log.debug("Collected params: {0}".format(collected_params))
normalized_params = signature.normalize_parameters(collected_params)
normalized_uri = signature.normalize_base_string_uri(uri,
headers.get('Host', None))
log.debug("Normalized params: {0}".format(normalized_params))
log.debug("Normalized URI: {0}".format(normalized_uri))
base_string = signature.construct_base_string(request.http_method,
normalized_uri, normalized_params)
log.debug("Signing: signature base string: {0}".format(base_string))
if self.signature_method not in self.SIGNATURE_METHODS:
raise ValueError('Invalid signature method.')
sig = self.SIGNATURE_METHODS[self.signature_method](base_string, self)
log.debug("Signature: {0}".format(sig))
return sig
def get_oauth_params(self, request):
"""Get the basic OAuth parameters to be used in generating a signature.
"""
nonce = (generate_nonce()
if self.nonce is None else self.nonce)
timestamp = (generate_timestamp()
if self.timestamp is None else self.timestamp)
params = [
('oauth_nonce', nonce),
('oauth_timestamp', timestamp),
('oauth_version', '1.0'),
('oauth_signature_method', self.signature_method),
('oauth_consumer_key', self.client_key),
]
if self.resource_owner_key:
params.append(('oauth_token', self.resource_owner_key))
if self.callback_uri:
params.append(('oauth_callback', self.callback_uri))
if self.verifier:
params.append(('oauth_verifier', self.verifier))
# providing body hash for requests other than x-www-form-urlencoded
# as described in http://oauth.googlecode.com/svn/spec/ext/body_hash/1.0/oauth-bodyhash.html
# 4.1.1. When to include the body hash
# * [...] MUST NOT include an oauth_body_hash parameter on requests with form-encoded request bodies
# * [...] SHOULD include the oauth_body_hash parameter on all other requests.
content_type = request.headers.get('Content-Type', None)
content_type_eligible = content_type and content_type.find('application/x-www-form-urlencoded') < 0
if request.body is not None and content_type_eligible:
params.append(('oauth_body_hash', base64.b64encode(hashlib.sha1(request.body.encode('utf-8')).digest()).decode('utf-8')))
return params
def _render(self, request, formencode=False, realm=None):
"""Render a signed request according to signature type
Returns a 3-tuple containing the request URI, headers, and body.
If the formencode argument is True and the body contains parameters, it
is escaped and returned as a valid formencoded string.
"""
# TODO what if there are body params on a header-type auth?
# TODO what if there are query params on a body-type auth?
uri, headers, body = request.uri, request.headers, request.body
# TODO: right now these prepare_* methods are very narrow in scope--they
# only affect their little thing. In some cases (for example, with
# header auth) it might be advantageous to allow these methods to touch
# other parts of the request, like the headers—so the prepare_headers
# method could also set the Content-Type header to x-www-form-urlencoded
# like the spec requires. This would be a fundamental change though, and
# I'm not sure how I feel about it.
if self.signature_type == SIGNATURE_TYPE_AUTH_HEADER:
headers = parameters.prepare_headers(
request.oauth_params, request.headers, realm=realm)
elif self.signature_type == SIGNATURE_TYPE_BODY and request.decoded_body is not None:
body = parameters.prepare_form_encoded_body(
request.oauth_params, request.decoded_body)
if formencode:
body = urlencode(body)
headers['Content-Type'] = 'application/x-www-form-urlencoded'
elif self.signature_type == SIGNATURE_TYPE_QUERY:
uri = parameters.prepare_request_uri_query(
request.oauth_params, request.uri)
else:
raise ValueError('Unknown signature type specified.')
return uri, headers, body
def sign(self, uri, http_method='GET', body=None, headers=None, realm=None):
"""Sign a request
Signs an HTTP request with the specified parts.
Returns a 3-tuple of the signed request's URI, headers, and body.
Note that http_method is not returned as it is unaffected by the OAuth
signing process. Also worth noting is that duplicate parameters
will be included in the signature, regardless of where they are
specified (query, body).
The body argument may be a dict, a list of 2-tuples, or a formencoded
string. The Content-Type header must be 'application/x-www-form-urlencoded'
if it is present.
If the body argument is not one of the above, it will be returned
verbatim as it is unaffected by the OAuth signing process. Attempting to
sign a request with non-formencoded data using the OAuth body signature
type is invalid and will raise an exception.
If the body does contain parameters, it will be returned as a properly-
formatted formencoded string.
Body may not be included if the http_method is either GET or HEAD as
this changes the semantic meaning of the request.
All string data MUST be unicode or be encoded with the same encoding
scheme supplied to the Client constructor, default utf-8. This includes
strings inside body dicts, for example.
"""
# normalize request data
request = Request(uri, http_method, body, headers,
encoding=self.encoding)
# sanity check
content_type = request.headers.get('Content-Type', None)
multipart = content_type and content_type.startswith('multipart/')
should_have_params = content_type == CONTENT_TYPE_FORM_URLENCODED
has_params = request.decoded_body is not None
# 3.4.1.3.1. Parameter Sources
# [Parameters are collected from the HTTP request entity-body, but only
# if [...]:
# * The entity-body is single-part.
if multipart and has_params:
raise ValueError(
"Headers indicate a multipart body but body contains parameters.")
# * The entity-body follows the encoding requirements of the
# "application/x-www-form-urlencoded" content-type as defined by
# [W3C.REC-html40-19980424].
elif should_have_params and not has_params:
raise ValueError(
"Headers indicate a formencoded body but body was not decodable.")
# * The HTTP request entity-header includes the "Content-Type"
# header field set to "application/x-www-form-urlencoded".
elif not should_have_params and has_params:
raise ValueError(
"Body contains parameters but Content-Type header was {0} "
"instead of {1}".format(content_type or "not set",
CONTENT_TYPE_FORM_URLENCODED))
# 3.5.2. Form-Encoded Body
# Protocol parameters can be transmitted in the HTTP request entity-
# body, but only if the following REQUIRED conditions are met:
# o The entity-body is single-part.
# o The entity-body follows the encoding requirements of the
# "application/x-www-form-urlencoded" content-type as defined by
# [W3C.REC-html40-19980424].
# o The HTTP request entity-header includes the "Content-Type" header
# field set to "application/x-www-form-urlencoded".
elif self.signature_type == SIGNATURE_TYPE_BODY and not (
should_have_params and has_params and not multipart):
raise ValueError(
'Body signatures may only be used with form-urlencoded content')
# We amend https://tools.ietf.org/html/rfc5849#section-3.4.1.3.1
# with the clause that parameters from body should only be included
# in non GET or HEAD requests. Extracting the request body parameters
# and including them in the signature base string would give semantic
# meaning to the body, which it should not have according to the
# HTTP 1.1 spec.
elif http_method.upper() in ('GET', 'HEAD') and has_params:
raise ValueError('GET/HEAD requests should not include body.')
# generate the basic OAuth parameters
request.oauth_params = self.get_oauth_params(request)
# generate the signature
request.oauth_params.append(
('oauth_signature', self.get_oauth_signature(request)))
# render the signed request and return it
uri, headers, body = self._render(request, formencode=True,
realm=(realm or self.realm))
if self.decoding:
log.debug('Encoding URI, headers and body to %s.', self.decoding)
uri = uri.encode(self.decoding)
body = body.encode(self.decoding) if body else body
new_headers = {}
for k, v in headers.items():
new_headers[k.encode(self.decoding)] = v.encode(self.decoding)
headers = new_headers
return uri, headers, body
| {
"content_hash": "c94dd556c7eb6bca4014d643fd413215",
"timestamp": "",
"source": "github",
"line_count": 327,
"max_line_length": 133,
"avg_line_length": 46.547400611620795,
"alnum_prop": 0.6322186452926878,
"repo_name": "kylebebak/Requester",
"id": "6d8960eb2f91d93762d3b5a9dda59bf343e49f4b",
"size": "15247",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "deps/oauthlib/oauth1/rfc5849/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "178949"
},
{
"name": "Shell",
"bytes": "313"
}
],
"symlink_target": ""
} |
WITH daysCte (d) AS
(SELECT CONVERT(DATETIME, '1 January 2011') AS d -- starting date
UNION ALL
SELECT DATEADD(D, 1, d) FROM daysCte
WHERE DATEPART(yyyy, d) <= 2012 -- stop year
)
SELECT d
, DATEPART(wk, d) AS week_number
, DATENAME(dw, d) AS day_name
, DATENAME(m, d) AS month_name
, DATENAME(q, d) AS [quarter]
FROM daysCte | {
"content_hash": "95dce4c6180cc788abc7bf03ec08ae9e",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 65,
"avg_line_length": 31.5,
"alnum_prop": 0.6031746031746031,
"repo_name": "antlr/codebuff",
"id": "ca5f38a5cf1112e89cbaf80d93446445c5d03afe",
"size": "378",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "output/sqlite_noisy/1.4.14/daysCTE.sql",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "ANTLR",
"bytes": "1479752"
},
{
"name": "GAP",
"bytes": "146955"
},
{
"name": "Java",
"bytes": "32484822"
},
{
"name": "Python",
"bytes": "113118"
},
{
"name": "SQLPL",
"bytes": "605792"
},
{
"name": "Shell",
"bytes": "445"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="utf-8"?>
<resources>
<!-- TODO: define the server-side client ID. See readme.md for details -->
<string name="server_side_client_id"></string>
<string name="app_name">Using Google</string>
<string name="action_settings">Settings</string>
<string name="login_btn_text">Login with Google</string>
<string name="id_token_btn_text">Get ID Token for server</string>
<string name="code_btn_text">Get code for server</string>
</resources>
| {
"content_hash": "1939e54cb39a5a03ad8a513704e3bc01",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 78,
"avg_line_length": 36.142857142857146,
"alnum_prop": 0.6620553359683794,
"repo_name": "pmhsfelix/ndc-oslo-14-mobile-auth",
"id": "b4388c89f8b87422fb7bc3c21487db86b483ecaa",
"size": "506",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "demos/GoogleClient/res/values/strings.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "17845"
}
],
"symlink_target": ""
} |
angular.module('ngNuxeoClient')
.service('nuxeo', ['$injector', 'Automation', 'Document', 'Folder', 'Section', 'Workspace', 'NuxeoDirectory', 'NuxeoTag',
function ($injector, Automation, Document, Folder, Section, Workspace, NuxeoDirectory, NuxeoTag) {
/**
* All basic nuxeo services are registered here
*/
angular.extend(this, {
Automation: Automation,
Document: Document,
Folder: Folder,
Section: Section,
Workspace: Workspace,
continents: NuxeoDirectory.continents,
countries: NuxeoDirectory.countries,
natures: NuxeoDirectory.natures,
subjects: NuxeoDirectory.subjects,
tags: NuxeoTag
});
this.register = function (service, name) {
if (angular.isFunction(service) && name && !this.hasOwnProperty(name)) {
this[name] = $injector.get(name);
} else {
throw 'Nuxeo service registration failed for service [' + service + ']';
}
};
this.index = function (entries) {
if (angular.isArray(entries)) {
return entries.reduce(function (result, entry) {
result[entry.uid] = entry;
return result;
}, {});
}
};
this.upload = function (fileInputElement, successCallback, errorCallback) {
var file = fileInputElement.files[0];
if (file) {
var reader = new FileReader();
reader.onloadend = function () {
var document = new Document({
type: 'Picture',
name: file.name
});
document.upload(file, successCallback, errorCallback);
};
reader.readAsBinaryString(file);
}
};
}]); | {
"content_hash": "18b6516b9667d527471b9d0bc0ebf760",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 123,
"avg_line_length": 33.056603773584904,
"alnum_prop": 0.5684931506849316,
"repo_name": "fmaturel/angular-nuxeo",
"id": "da941cc33131e4c170daddc2035ec4871e8c86ed",
"size": "1752",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/nuxeo/js/services/nuxeo-services.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1455"
},
{
"name": "HTML",
"bytes": "17367"
},
{
"name": "JavaScript",
"bytes": "91722"
}
],
"symlink_target": ""
} |
<?php
namespace Thorazine\Hack\Models\Builders;
use Illuminate\Database\Eloquent\Model;
class Textarea extends BaseBuilder
{
/**
* The databae table
*/
protected $table = 'builder_textareas';
/**
* Set the type
*/
public $type = 'textarea';
/**
* Constructor
*/
public function __construct()
{
// we need to force the parent construct
parent::__construct($this);
}
}
| {
"content_hash": "425696dfee2c114b4c675c8da4931230",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 48,
"avg_line_length": 15.586206896551724,
"alnum_prop": 0.577433628318584,
"repo_name": "Thorazine/hack",
"id": "9a40a6671c148bdcf30957feb04641298759620c",
"size": "452",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Models/Builders/Textarea.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "101193"
},
{
"name": "HTML",
"bytes": "113042"
},
{
"name": "JavaScript",
"bytes": "32159"
},
{
"name": "PHP",
"bytes": "435982"
}
],
"symlink_target": ""
} |
package com.amazon.pay.response.ipn.model;
import com.amazon.pay.response.model.IdList;
import com.amazon.pay.response.model.Price;
import com.amazon.pay.response.model.Status;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlSchemaType;
import javax.xml.bind.annotation.XmlType;
import javax.xml.datatype.XMLGregorianCalendar;
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "ProviderCreditDetails", propOrder = {
"amazonProviderCreditId",
"sellerId",
"providerSellerId",
"creditReferenceId",
"creditAmount",
"creditReversalAmount",
"creditReversalIdList",
"creationTimestamp",
"creditStatus"
})
public class ProviderCreditDetails {
@XmlElement(name = "AmazonProviderCreditId", required = true)
protected String amazonProviderCreditId;
@XmlElement(name = "SellerId", required = true)
protected String sellerId;
@XmlElement(name = "ProviderSellerId", required = true)
protected String providerSellerId;
@XmlElement(name = "CreditReferenceId", required = true)
protected String creditReferenceId;
@XmlElement(name = "CreditAmount", required = true)
protected Price creditAmount;
@XmlElement(name = "CreditReversalAmount", required = true)
protected Price creditReversalAmount;
@XmlElement(name = "CreditReversalIdList", required = true)
protected CreditReversalIdList creditReversalIdList;
@XmlElement(name = "CreationTimestamp", required = true)
@XmlSchemaType(name = "dateTime")
protected XMLGregorianCalendar creationTimestamp;
@XmlElement(name = "CreditStatus", required = true)
protected Status creditStatus;
public ProviderCreditDetails() {
super();
}
/**
* @return Returns the amazonProviderCreditId
*/
public String getAmazonProviderCreditId() {
return amazonProviderCreditId;
}
/**
* @return Returns the sellerId
*/
public String getSellerId() {
return sellerId;
}
/**
* @return Returns the providerSellerId
* (applicable while processing ProviderCreditNotification only)
*/
public String getProviderSellerId() {
return providerSellerId;
}
/**
* Returns the creditReferenceId from notification
* @return Returns the creditReferenceId
*/
public String getCreditReferenceId() {
return creditReferenceId;
}
/**
* Returns the creditAmount from notification
* @return Returns the creditAmount from notification
*/
public Price getCreditAmount() {
return creditAmount;
}
/**
* Returns the creditReversalAmount from notification
* @return Returns the creditReversalAmount from notification
*/
public Price getCreditReversalAmount() {
return creditReversalAmount;
}
/**
* Returns the creditReversalIdList from notification
* @return Returns the creditReversalIdList from notification
*/
public CreditReversalIdList getCreditReversalIdList() {
return creditReversalIdList;
}
/**
* Returns the creationTimeStamp from notification
* @return Returns the creationTimeStamp from notification
*/
public XMLGregorianCalendar getCreationTimestamp() {
return creationTimestamp;
}
/**
* Returns the creditStatus from notification
* @return Returns the creditStatus from notification
*/
public Status getCreditStatus() {
return creditStatus;
}
/**
* String representation of providerCreditDetails
*/
@Override
public String toString() {
return "ProviderCreditDetails{" + "amazonProviderCreditId=" + amazonProviderCreditId + ", sellerId="
+ sellerId + ", creditReferenceId=" + creditReferenceId
+ ", creditAmount=" + creditAmount + ", creditReversalAmount=" + creditReversalAmount
+ ", creditReversalIdList=" + creditReversalIdList + ", creationTimestamp=" + creationTimestamp
+ ", creditStatus=" + creditStatus + '}';
}
}
| {
"content_hash": "649f5668864eef58c734b10d7299c7df",
"timestamp": "",
"source": "github",
"line_count": 133,
"max_line_length": 112,
"avg_line_length": 31.586466165413533,
"alnum_prop": 0.6924541775767674,
"repo_name": "amzn/login-and-pay-with-amazon-sdk-java",
"id": "095eac27fbe5e1159256c6cb6a8d4f94afc75774",
"size": "4786",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/com/amazon/pay/response/ipn/model/ProviderCreditDetails.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "726362"
}
],
"symlink_target": ""
} |
/**
* @note HEADER-ONLY IMPLEMENTATION FILE
* @warn Do not include directly
*/
// C++ Standard library
#include <ctime>
#include <cstring>
// FFNN
#include <ffnn/assert.h>
#include <ffnn/logging.h>
#include <ffnn/optimizer/none.h>
#include <ffnn/internal/signature.h>
namespace ffnn
{
namespace layer
{
template<typename ValueType,
template<class> class NeuronType,
FFNN_SIZE_TYPE SizeAtCompileTime>
Activation<ValueType, NeuronType, SizeAtCompileTime>::Activation()
{}
template<typename ValueType,
template<class> class NeuronType,
FFNN_SIZE_TYPE SizeAtCompileTime>
Activation<ValueType, NeuronType, SizeAtCompileTime>::~Activation()
{}
template<typename ValueType,
template<class> class NeuronType,
FFNN_SIZE_TYPE SizeAtCompileTime>
bool Activation<ValueType, NeuronType, SizeAtCompileTime>::initialize()
{
// This layer has equal inputs and outputs
Base::output_dimension_ = Base::countInputs();
// Abort if layer is already initialized
if (Base::isInitialized())
{
FFNN_WARN_NAMED("layer::Activation", "<" << Base::getID() << "> already initialized.");
return false;
}
else if (!Base::initialize())
{
return false;
}
// Initialize neurons
neurons_.resize(Base::output_dimension_);
FFNN_DEBUG_NAMED("layer::Activation",
"<" <<
Base::getID() <<
"> initialized as (in=" <<
Base::input_dimension_ <<
", out=" <<
Base::output_dimension_ <<
")");
return Base::output_dimension_ == Base::input_dimension_;
}
template<typename ValueType,
template<class> class NeuronType,
FFNN_SIZE_TYPE SizeAtCompileTime>
bool Activation<ValueType, NeuronType, SizeAtCompileTime>::forward()
{
// Compute neuron outputs
for (SizeType idx = 0; idx < Base::input_dimension_; idx++)
{
neurons_[idx].fn((*Base::input_)(idx), (*Base::output_)(idx));
}
return true;
}
template<typename ValueType,
template<class> class NeuronType,
FFNN_SIZE_TYPE SizeAtCompileTime>
bool Activation<ValueType, NeuronType, SizeAtCompileTime>::backward()
{
// Compute neuron derivatives
Base::backward_error_->noalias() = *Base::output_;
for (SizeType idx = 0; idx < Base::output_dimension_; idx++)
{
neurons_[idx].derivative((*Base::input_)(idx), (*Base::backward_error_)(idx));
}
// Incorporate error
Base::backward_error_->array() *= Base::forward_error_->array();
return true;
}
template<typename ValueType,
template<class> class NeuronType,
FFNN_SIZE_TYPE SizeAtCompileTime>
void Activation<ValueType, NeuronType, SizeAtCompileTime>::
save(typename Activation<ValueType, NeuronType, SizeAtCompileTime>::OutputArchive& ar,
typename Activation<ValueType, NeuronType, SizeAtCompileTime>::VersionType version) const
{
ffnn::io::signature::apply<Activation<ValueType, NeuronType, SizeAtCompileTime>>(ar);
Base::save(ar, version);
FFNN_DEBUG_NAMED("layer::Activation", "Saved");
}
template<typename ValueType,
template<class> class NeuronType,
FFNN_SIZE_TYPE SizeAtCompileTime>
void Activation<ValueType, NeuronType, SizeAtCompileTime>::
load(typename Activation<ValueType, NeuronType, SizeAtCompileTime>::InputArchive& ar,
typename Activation<ValueType, NeuronType, SizeAtCompileTime>::VersionType version)
{
ffnn::io::signature::check<Activation<ValueType, NeuronType, SizeAtCompileTime>>(ar);
Base::load(ar, version);
FFNN_DEBUG_NAMED("layer::Activation", "Loaded");
}
} // namespace layer
} // namespace ffnn
| {
"content_hash": "ef9f775b3b7092d42e55adc71aa3de96",
"timestamp": "",
"source": "github",
"line_count": 120,
"max_line_length": 96,
"avg_line_length": 30.375,
"alnum_prop": 0.6765432098765433,
"repo_name": "briancairl/ffnn-cpp",
"id": "6996ed26fa8a433f42242a6b6ee79e611b548879",
"size": "3645",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ffnn/include/ffnn/layer/impl/activation.hpp",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "1010"
},
{
"name": "C++",
"bytes": "135472"
},
{
"name": "CMake",
"bytes": "4110"
}
],
"symlink_target": ""
} |
a new repo for training
| {
"content_hash": "abae42f544c66648769195f19edef5b5",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 23,
"avg_line_length": 24,
"alnum_prop": 0.7916666666666666,
"repo_name": "igoretk/java_ht_new",
"id": "76118d63ff8d335998186742fb27ec5f16179caf",
"size": "38",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "142972"
},
{
"name": "PHP",
"bytes": "237"
}
],
"symlink_target": ""
} |
package org.omg.IOP;
/**
* org/omg/IOP/TAG_ALTERNATE_IIOP_ADDRESS.java .
* Generated by the IDL-to-Java compiler (portable), version "3.2"
* from c:/workspace/8-2-build-windows-amd64-cygwin/jdk8u65/4987/corba/src/share/classes/org/omg/PortableInterceptor/IOP.idl
* Tuesday, October 6, 2015 4:40:34 PM PDT
*/
public interface TAG_ALTERNATE_IIOP_ADDRESS
{
/**
* In cases where the same object key is used for more than one
* internet location, the following standard IOR Component is defined
* for support in IIOP version 1.2.
* <p>
* The <code>TAG_ALTERNATE_IIOP_ADDRESS</code> component has an
* associated value of type:
* <code>
* <pre>
* struct {
* string HostID,
* short Port
* };
* </pre>
* </code>
* encoded as a CDR encapsulation.
* <p>
* Zero or more instances of the <code>TAG_ALTERNATE_IIOP_ADDRESS</code>
* component type may be included in a version 1.2
* <code>TAG_INTERNET_IOP</code> Profile. Each of these alternative
* addresses may be used by the client orb, in addition to the host
* and port address expressed in the body of the Profile. In cases
* where one or more <code>TAG_ALTERNATE_IIOP_ADDRESS</code> components
* are present in a <code>TAG_INTERNET_IOP</code> Profile, no order of
* use is prescribed by Version 1.2 of IIOP.
*/
public static final int value = (int)(3L);
}
| {
"content_hash": "e69f55cad32fe8ecb58895c623a9a5fb",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 123,
"avg_line_length": 36,
"alnum_prop": 0.6436314363143631,
"repo_name": "Java8-CNAPI-Team/Java8CN",
"id": "13cf8f02a8853efbc8de1dd05db7635bd96f0a35",
"size": "1476",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "org/omg/IOP/TAG_ALTERNATE_IIOP_ADDRESS.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "55246870"
}
],
"symlink_target": ""
} |
Recipes
================
These recipes include some command-line utilities written with many comments
and designed to be easy to follow. You can use them as a template for your own
code.
For now, look at the source code and its comments on `github <https://github.com/richardkiss/pycoin/tree/master/recipes/multisig>`_.
| {
"content_hash": "9a0759d872b9cbed6533aa3a066c9851",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 132,
"avg_line_length": 40.25,
"alnum_prop": 0.7484472049689441,
"repo_name": "richardkiss/pycoin",
"id": "bb8cf56c81a510ca478c3b8fa97d1b77ed8f7b78",
"size": "322",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "docs/source/recipes.rst",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "115"
},
{
"name": "Python",
"bytes": "752865"
},
{
"name": "Shell",
"bytes": "198"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
android:id="@+id/bottom_sheet_layout"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_gravity="center_horizontal"
android:background="@drawable/bottom_sheet_bg"
android:gravity="center_horizontal"
android:orientation="vertical"
android:padding="8dp"
app:behavior_hideable="true"
app:layout_behavior="com.google.android.material.bottomsheet.BottomSheetBehavior">
<LinearLayout
android:id="@+id/gesture_layout"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="vertical"
android:paddingTop="10dp"
android:paddingBottom="10dp">
<ImageView
android:id="@+id/bottom_sheet_arrow"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_gravity="center"
android:src="@drawable/icn_chevron_up" />
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginTop="10dp"
android:orientation="horizontal">
<TextView
android:id="@+id/detected_item"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:textColor="@android:color/black"
android:textSize="16sp" />
<TextView
android:id="@+id/detected_item_value"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:gravity="right"
android:textColor="@android:color/black"
android:textSize="16sp" />
</LinearLayout>
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginTop="10dp"
android:orientation="horizontal">
<TextView
android:id="@+id/detected_item1"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:textColor="@android:color/darker_gray"
android:textSize="12sp" />
<TextView
android:id="@+id/detected_item1_value"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:gravity="right"
android:textColor="@android:color/darker_gray"
android:textSize="12sp" />
</LinearLayout>
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginTop="10dp"
android:orientation="horizontal">
<TextView
android:id="@+id/detected_item2"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:textColor="@android:color/darker_gray"
android:textSize="12sp" />
<TextView
android:id="@+id/detected_item2_value"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:gravity="right"
android:textColor="@android:color/darker_gray"
android:textSize="12sp" />
</LinearLayout>
<View
android:layout_width="match_parent"
android:layout_height="1px"
android:layout_marginTop="8dp"
android:background="@android:color/darker_gray" />
</LinearLayout>
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="horizontal">
<TextView
android:id="@+id/frame"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginTop="10dp"
android:text="Frame"
android:textColor="@android:color/black" />
<TextView
android:id="@+id/frame_info"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginTop="10dp"
android:gravity="right"
android:text="640*480"
android:textColor="@android:color/black" />
</LinearLayout>
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="horizontal">
<TextView
android:id="@+id/crop"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginTop="10dp"
android:text="Crop"
android:textColor="@android:color/black" />
<TextView
android:id="@+id/crop_info"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginTop="10dp"
android:gravity="right"
android:text="640*480"
android:textColor="@android:color/black" />
</LinearLayout>
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="horizontal">
<TextView
android:id="@+id/view"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginTop="10dp"
android:text="View"
android:textColor="@android:color/black" />
<TextView
android:id="@+id/view_info"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginTop="10dp"
android:gravity="right"
android:text="640*480"
android:textColor="@android:color/black" />
</LinearLayout>
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="horizontal">
<TextView
android:id="@+id/rotation"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginTop="10dp"
android:text="Rotation"
android:textColor="@android:color/black" />
<TextView
android:id="@+id/rotation_info"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginTop="10dp"
android:gravity="right"
android:text="640*480"
android:textColor="@android:color/black" />
</LinearLayout>
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="horizontal">
<TextView
android:id="@+id/inference"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginTop="10dp"
android:text="Inference Time"
android:textColor="@android:color/black" />
<TextView
android:id="@+id/inference_info"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginTop="10dp"
android:gravity="right"
android:text="640*480"
android:textColor="@android:color/black" />
</LinearLayout>
<View
android:layout_width="match_parent"
android:layout_height="1px"
android:layout_marginTop="10dp"
android:background="@android:color/darker_gray" />
<RelativeLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginTop="10dp"
android:orientation="horizontal">
<TextView
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginTop="10dp"
android:text="Threads"
android:textColor="@android:color/black" />
<LinearLayout
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignParentRight="true"
android:background="@drawable/rectangle"
android:gravity="center"
android:orientation="horizontal"
android:padding="4dp">
<ImageView
android:id="@+id/minus"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:src="@drawable/ic_baseline_remove" />
<TextView
android:id="@+id/threads"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginLeft="10dp"
android:layout_marginRight="10dp"
android:text="1"
android:textColor="@android:color/black"
android:textSize="14sp" />
<ImageView
android:id="@+id/plus"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:src="@drawable/ic_baseline_add" />
</LinearLayout>
</RelativeLayout>
<View
android:layout_width="match_parent"
android:layout_height="1px"
android:layout_marginTop="10dp"
android:background="@android:color/darker_gray" />
<RelativeLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="horizontal">
<TextView
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginTop="10dp"
android:text="@string/tfe_ic_model"
android:textColor="@android:color/black" />
<Spinner
android:id="@+id/model_spinner"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignParentRight="true"
android:layout_marginTop="10dp"
android:entries="@array/tfe_ic_models"
android:prompt="@string/tfe_ic_model" />
</RelativeLayout>
<RelativeLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="horizontal">
<TextView
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginTop="10dp"
android:text="@string/tfe_ic_device"
android:textColor="@android:color/black" />
<Spinner
android:id="@+id/device_spinner"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginTop="10dp"
android:layout_alignParentRight="true"
android:entries="@array/tfe_ic_devices"
android:prompt="@string/tfe_ic_device" />
</RelativeLayout>
</LinearLayout>
| {
"content_hash": "69c36d100d0c5be7d9d276860a56dd52",
"timestamp": "",
"source": "github",
"line_count": 321,
"max_line_length": 86,
"avg_line_length": 35.84423676012461,
"alnum_prop": 0.5798713714583695,
"repo_name": "intel-isl/MiDaS",
"id": "77a348af90e2ed995ff106cd209cbf304c6b9153",
"size": "11506",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mobile/android/app/src/main/res/layout/tfe_ic_layout_bottom_sheet.xml",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "9891"
},
{
"name": "CMake",
"bytes": "6607"
},
{
"name": "Dockerfile",
"bytes": "809"
},
{
"name": "Java",
"bytes": "113288"
},
{
"name": "Python",
"bytes": "63838"
},
{
"name": "Ruby",
"bytes": "408"
},
{
"name": "Shell",
"bytes": "3713"
},
{
"name": "Swift",
"bytes": "58465"
}
],
"symlink_target": ""
} |
package org.apache.harmony.vts.test.vm.jvmti;
/**
* @author Valentin Al. Sitnick
* @version $Revision: 1.1 $
*
*/
public class GetClassLoader0101 {
static public void main(String args[]) {
CustomClassLoader_GetClassLoader0101 ccl = new CustomClassLoader_GetClassLoader0101();
try {
Class specclass = ccl.loadClass(
"org.apache.harmony.vts.test.vm.jvmti.SpecialClass002");
SpecialClass002 MyClass = (SpecialClass002) specclass.newInstance();
MyClass.superMegaMethod();
MyClass = null;
specclass = null;
ccl = null;
} catch (Throwable tex) { }
return;
}
}
class CustomClassLoader_GetClassLoader0101 extends ClassLoader {
int fake = 0;
}
| {
"content_hash": "60cfb968237cd0a33a9c38bda83d889a",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 94,
"avg_line_length": 24.40625,
"alnum_prop": 0.6209987195902689,
"repo_name": "freeVM/freeVM",
"id": "e96d1f840f2514f823d0e9fd269b229ad37c1fb0",
"size": "1434",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "enhanced/buildtest/tests/vts/vm/src/test/vm/jvmti/funcs/GetClassLoader/GetClassLoader0101/GetClassLoader0101.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "116828"
},
{
"name": "C",
"bytes": "17860389"
},
{
"name": "C++",
"bytes": "19007206"
},
{
"name": "CSS",
"bytes": "217777"
},
{
"name": "Java",
"bytes": "152108632"
},
{
"name": "Objective-C",
"bytes": "106412"
},
{
"name": "Objective-J",
"bytes": "11029421"
},
{
"name": "Perl",
"bytes": "305690"
},
{
"name": "Scilab",
"bytes": "34"
},
{
"name": "Shell",
"bytes": "153821"
},
{
"name": "XSLT",
"bytes": "152859"
}
],
"symlink_target": ""
} |
namespace asio {
namespace detail {
class std_thread
: private noncopyable
{
public:
// Constructor.
template <typename Function>
std_thread(Function f, unsigned int = 0)
: thread_(f)
{
}
// Destructor.
~std_thread()
{
join();
}
// Wait for the thread to exit.
void join()
{
if (thread_.joinable())
thread_.join();
}
private:
std::thread thread_;
};
} // namespace detail
} // namespace asio
#include "asio/detail/pop_options.hpp"
#endif // defined(ASIO_HAS_STD_THREAD)
#endif // ASIO_DETAIL_STD_THREAD_HPP
| {
"content_hash": "330a70d6602ff40bfe08d4e07755ee54",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 42,
"avg_line_length": 14.435897435897436,
"alnum_prop": 0.6252220248667851,
"repo_name": "laeotropic/HTTP-Proxy",
"id": "dc11d7e1b483927dac159002bb693c189669de3c",
"size": "1206",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "deps/asio-1.10.1/include/asio/detail/std_thread.hpp",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C++",
"bytes": "25868"
}
],
"symlink_target": ""
} |
require 'thor'
require 'thor/group'
module HalfMoon
class ProjectGenerator < Thor::Group
include Thor::Actions
argument :name
# 実行されたディレクトリに作成する
def self.source_root
File.expand_path(File.dirname(__FILE__))
end
def create_assets
@current = File.expand_path(File.dirname('.'))
%w(css img js).each do |dir|
empty_directory "#{@current}/#{name}/app/assets/#{dir}"
end
end
def create_config
template './templates/config/config.tt', "#{@current}/#{name}/app/config/config.rb"
copy_file './templates/config/routes.rb', "#{@current}/#{name}/app/config/routes.rb"
end
def create_mvc
%w(models views controllers).each do |dir|
empty_directory "#{@current}/#{name}/app/#{dir}"
end
end
def create_db
empty_directory "#{@current}/#{name}/app/db/migration"
copy_file './templates/models/seeds.rb', "#{@current}/#{name}/app/db/seeds.rb"
end
def create_exceptions
copy_file './templates/exceptions/default.erb', "#{@current}/#{name}/app/exceptions/default.erb"
end
def create_other_files
%w(config.ru Rakefile).each do |file|
copy_file "./templates/#{file}", "#{@current}/#{name}/#{file}"
end
end
def complete_message
say 'Creation complete!', :green
end
end
class ControllerGenerator < Thor::Group
include Thor::Actions
argument :name
argument :methods
def self.source_root
File.expand_path(File.dirname(__FILE__))
end
def setting
require 'halfmoon/config'
require "#{File.expand_path('.')}/app/config/config.rb"
@ctrl = Config[:root] + Config[:ctrl_path]
@view = Config[:root] + Config[:view_path]
end
def create_controller
template './templates/controller.tt', "#{@ctrl}/#{name}_controller.rb"
end
def create_view
config = { title: '<%=Config[:title]%>', method: nil }
methods.each do |m|
config[:method] = m
template './templates/view.tt', "#{@view}/#{name}/#{m}.html.erb", config
end
end
def complete_message
say 'Creation complete!', :green
end
end
class ModelGenerator < Thor::Group
include Thor::Actions
argument :name
argument :column
def self.source_root
File.expand_path(File.dirname(__FILE__))
end
def setting
require 'halfmoon/config'
require "#{File.expand_path('.')}/app/config/config.rb"
require 'active_support/all'
@model = Config[:root] + Config[:model_path]
@migrate = Config[:root] + Config[:db_path]
@c = {}
end
def parse_args
options = %w(default size)
@column.each do |v|
name, types = v.split(':')
type, *opts = types.split('/')
opts.map! do |opt|
case opt
when /^(\w+?)\((.+)\)/i
"#{Regexp.last_match(1)}: #{Regexp.last_match(2)}" if options.include?(Regexp.last_match(1))
when /^not_null$/i then 'null: false'
when /^text$/i then 'text: true'
when /^primary$/i then 'primary_key: true'
end
end
opts.compact!
@c.store(name, [type, opts].compact)
end
end
def create_model
template './templates/models/model.tt', "#{@model}/#{name}.rb"
end
def create_migration
template './templates/models/migration.tt', "#{@migrate}/migration/#{name.pluralize}_migration.rb"
end
def complete_message
say 'Creation complete!', :green
end
end
end
| {
"content_hash": "d5b1f03b0f6d55a0000536a2287f7ed9",
"timestamp": "",
"source": "github",
"line_count": 137,
"max_line_length": 104,
"avg_line_length": 25.927007299270073,
"alnum_prop": 0.588963963963964,
"repo_name": "sobreera/halfmoon",
"id": "f96326d4833908d079fb85e4469b2ac1be9c05b8",
"size": "3584",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/halfmoon/commands/generator.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "212"
},
{
"name": "Ruby",
"bytes": "26086"
}
],
"symlink_target": ""
} |
<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context="com.avapira.bobroreader.ThreadFragment">
<!-- TODO: Update blank fragment layout -->
<TextView
android:layout_width="match_parent"
android:layout_height="match_parent"
android:text="@string/hello_blank_fragment"/>
</FrameLayout>
| {
"content_hash": "4b138eacd2db8e0c1d2ef7b1cea5580c",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 71,
"avg_line_length": 41.07692307692308,
"alnum_prop": 0.6385767790262172,
"repo_name": "AvaPirA/Bobroreader",
"id": "08c366598f076b1be430a84b50d8525072c5a788",
"size": "534",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Bobroreader/src/main/res/layout/fragment_thread.xml",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "167566"
}
],
"symlink_target": ""
} |
""" P1 tests for VPN users
"""
# Import Local Modules
from nose.plugins.attrib import attr
from marvin.cloudstackException import cloudstackAPIException
from marvin.cloudstackTestCase import cloudstackTestCase
from marvin.integration.lib.base import (
Account,
ServiceOffering,
VirtualMachine,
PublicIPAddress,
Vpn,
VpnUser,
Configurations,
NATRule
)
from marvin.integration.lib.common import (get_domain,
get_zone,
get_template,
cleanup_resources,
)
class Services:
"""Test VPN users Services
"""
def __init__(self):
self.services = {
"account": {
"email": "[email protected]",
"firstname": "Test",
"lastname": "User",
"username": "test",
# Random characters are appended for unique
# username
"password": "password",
},
"service_offering": {
"name": "Tiny Instance",
"displaytext": "Tiny Instance",
"cpunumber": 1,
"cpuspeed": 100, # in MHz
"memory": 128, # In MBs
},
"disk_offering": {
"displaytext": "Small Disk Offering",
"name": "Small Disk Offering",
"disksize": 1
},
"virtual_machine": {
"displayname": "TestVM",
"username": "root",
"password": "password",
"ssh_port": 22,
"hypervisor": 'KVM',
"privateport": 22,
"publicport": 22,
"protocol": 'TCP',
},
"vpn_user": {
"username": "test",
"password": "test",
},
"natrule": {
"privateport": 1701,
"publicport": 1701,
"protocol": "UDP"
},
"ostype": 'CentOS 5.5 (64-bit)',
"sleep": 60,
"timeout": 10,
# Networking mode: Advanced, Basic
}
class TestVPNUsers(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.api_client = super(TestVPNUsers,
cls).getClsTestClient().getApiClient()
cls.services = Services().services
# Get Zone, Domain and templates
cls.domain = get_domain(cls.api_client, cls.services)
cls.zone = get_zone(cls.api_client, cls.services)
cls.services["mode"] = cls.zone.networktype
cls.template = get_template(
cls.api_client,
cls.zone.id,
cls.services["ostype"]
)
cls.services["virtual_machine"]["zoneid"] = cls.zone.id
cls.service_offering = ServiceOffering.create(
cls.api_client,
cls.services["service_offering"]
)
cls._cleanup = [cls.service_offering, ]
return
@classmethod
def tearDownClass(cls):
try:
# Cleanup resources used
cleanup_resources(cls.api_client, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def setUp(self):
try:
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.account = Account.create(
self.apiclient,
self.services["account"],
domainid=self.domain.id
)
self.cleanup = [
self.account,
]
self.virtual_machine = VirtualMachine.create(
self.apiclient,
self.services["virtual_machine"],
templateid=self.template.id,
accountid=self.account.name,
domainid=self.account.domainid,
serviceofferingid=self.service_offering.id
)
self.public_ip = PublicIPAddress.create(
self.apiclient,
accountid=self.virtual_machine.account,
zoneid=self.virtual_machine.zoneid,
domainid=self.virtual_machine.domainid,
services=self.services["virtual_machine"]
)
return
except cloudstackAPIException as e:
self.tearDown()
raise e
def tearDown(self):
try:
# Clean up, terminate the created instance, volumes and snapshots
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def create_VPN(self, public_ip):
"""Creates VPN for the network"""
self.debug("Creating VPN with public IP: %s" % public_ip.ipaddress.id)
try:
# Assign VPN to Public IP
vpn = Vpn.create(self.apiclient,
self.public_ip.ipaddress.id,
account=self.account.name,
domainid=self.account.domainid)
self.debug("Verifying the remote VPN access")
vpns = Vpn.list(self.apiclient,
publicipid=public_ip.ipaddress.id,
listall=True)
self.assertEqual(
isinstance(vpns, list),
True,
"List VPNs shall return a valid response"
)
return vpn
except Exception as e:
self.fail("Failed to create remote VPN access: %s" % e)
def create_VPN_Users(self, rand_name=True, api_client=None):
"""Creates VPN users for the network"""
self.debug("Creating VPN users for account: %s" %
self.account.name)
if api_client is None:
api_client = self.apiclient
try:
vpnuser = VpnUser.create(
api_client,
self.services["vpn_user"]["username"],
self.services["vpn_user"]["password"],
account=self.account.name,
domainid=self.account.domainid,
rand_name=rand_name
)
self.debug("Verifying the remote VPN access")
vpn_users = VpnUser.list(self.apiclient,
id=vpnuser.id,
listall=True)
self.assertEqual(
isinstance(vpn_users, list),
True,
"List VPNs shall return a valid response"
)
return vpnuser
except Exception as e:
self.fail("Failed to create remote VPN users: %s" % e)
@attr(tags=["advanced", "advancedns"])
@attr(configuration='remote.access.vpn.user.limit')
def test_01_VPN_user_limit(self):
"""VPN remote access user limit tests"""
# Validate the following
# prerequisite: change management configuration setting of
# remote.access.vpn.user.limit
# 1. provision more users than is set in the limit
# Provisioning of users after the limit should failProvisioning of
# users after the limit should fail
self.debug("Fetching the limit for remote access VPN users")
configs = Configurations.list(
self.apiclient,
name='remote.access.vpn.user.limit',
listall=True)
self.assertEqual(isinstance(configs, list),
True,
"List configs should return a valid response")
limit = int(configs[0].value)
self.debug("Enabling the VPN access for IP: %s" %
self.public_ip.ipaddress)
self.create_VPN(self.public_ip)
self.debug("Creating %s VPN users" % limit)
for x in range(limit):
self.create_VPN_Users()
self.debug("Adding another user exceeding limit for remote VPN users")
with self.assertRaises(Exception):
self.create_VPN_Users()
self.debug("Limit exceeded exception raised!")
return
@attr(tags=["advanced", "advancedns"])
def test_02_use_vpn_port(self):
"""Test create VPN when L2TP port in use"""
# Validate the following
# 1. set a port forward for UDP: 1701 and enable VPN
# 2. set port forward rule for the udp port 1701 over which L2TP works
# 3. port forward should prevent VPN from being enabled
self.debug("Creating a port forwarding rule on port 1701")
# Create NAT rule
nat_rule = NATRule.create(
self.apiclient,
self.virtual_machine,
self.services["natrule"],
self.public_ip.ipaddress.id)
self.debug("Verifying the NAT rule created")
nat_rules = NATRule.list(self.apiclient, id=nat_rule.id, listall=True)
self.assertEqual(isinstance(nat_rules, list),
True,
"List NAT rules should return a valid response")
self.debug("Enabling the VPN connection for IP: %s" %
self.public_ip.ipaddress)
with self.assertRaises(Exception):
self.create_VPN(self.public_ip)
self.debug("Create VPN connection failed! Test successful!")
return
@attr(tags=["advanced", "advancedns"])
def test_03_enable_vpn_use_port(self):
"""Test create NAT rule when VPN when L2TP enabled"""
# Validate the following
# 1. Enable a VPN connection on source NAT
# 2. Add a VPN user
# 3. add a port forward rule for UDP port 1701. Should result in error
# saying that VPN is enabled over port 1701
self.debug("Enabling the VPN connection for IP: %s" %
self.public_ip.ipaddress)
self.create_VPN(self.public_ip)
self.debug("Creating a port forwarding rule on port 1701")
# Create NAT rule
with self.assertRaises(Exception):
NATRule.create(
self.apiclient,
self.virtual_machine,
self.services["natrule"],
self.public_ip.ipaddress.id)
self.debug("Create NAT rule failed! Test successful!")
return
@attr(tags=["advanced", "advancedns"])
def test_04_add_new_users(self):
"""Test add new users to existing VPN"""
# Validate the following
# 1. Enable a VPN connection on source NAT
# 2. Add new user to VPN when there are already existing users.
# 3. We should be able to successfully establish a VPN connection using
# the newly added user credential.
self.debug("Enabling the VPN connection for IP: %s" %
self.public_ip.ipaddress)
self.create_VPN(self.public_ip)
try:
self.debug("Adding new VPN user to account: %s" %
self.account.name)
self.create_VPN_Users()
# TODO: Verify the VPN connection
self.debug("Adding another user to account")
self.create_VPN_Users()
# TODO: Verify the VPN connection with new user
except Exception as e:
self.fail("Failed to create new VPN user: %s" % e)
return
@attr(tags=["advanced", "advancedns"])
def test_05_add_duplicate_user(self):
"""Test add duplicate user to existing VPN"""
# Validate the following
# 1. Enable a VPN connection on source NAT
# 2. Add a VPN user say "abc" that already an added user to the VPN.
# 3. Adding this VPN user should fail.
self.debug("Enabling the VPN connection for IP: %s" %
self.public_ip.ipaddress)
self.create_VPN(self.public_ip)
self.debug("Adding new VPN user to account: %s" %
self.account.name)
self.create_VPN_Users(rand_name=False)
# TODO: Verify the VPN connection
self.debug("Adding another user to account with same username")
with self.assertRaises(Exception):
self.create_VPN_Users(rand_name=False)
return
@attr(tags=["advanced", "advancedns"])
def test_06_add_VPN_user_global_admin(self):
"""Test as global admin, add a new VPN user to an existing VPN entry
that was created by another account."""
# Steps for verification
# 1. Create a new user and deploy few Vms.
# 2. Enable VPN access. Add few VPN users.
# 3. Make sure that VPN access works as expected.
# 4. As global Admin , add VPN user to this user's existing VPN entry.
# Validate the following
# 1. The newly added VPN user should get configured to the router of
# user account.
# 2. We should be able to use this newly created user credential to
# establish VPN connection that will give access all VMs of this user
self.debug("Enabling VPN connection to account: %s" %
self.account.name)
self.create_VPN(self.public_ip)
self.debug("Creating VPN user for the account: %s" %
self.account.name)
self.create_VPN_Users()
self.debug("Creating a global admin account")
admin = Account.create(self.apiclient,
self.services["account"],
admin=True,
domainid=self.account.domainid)
self.cleanup.append(admin)
self.debug("Creating API client for newly created user")
api_client = self.testClient.createUserApiClient(
UserName=self.account.name,
DomainName=self.account.domain)
self.debug("Adding new user to VPN as a global admin: %s" %
admin.name)
try:
self.create_VPN_Users(api_client=api_client)
except Exception as e:
self.fail("Global admin should be allowed to create VPN user: %s" %
e)
return
@attr(tags=["advanced", "advancedns"])
def test_07_add_VPN_user_domain_admin(self):
"""Test as domain admin, add a new VPN user to an existing VPN entry
that was created by another account."""
# Steps for verification
# 1. Create a new user and deploy few Vms.
# 2. Enable VPN access. Add few VPN users.
# 3. Make sure that VPN access works as expected.
# 4. As domain Admin , add VPN user to this user's existing VPN entry.
# Validate the following
# 1. The newly added VPN user should get configured to the router of
# user account.
# 2. We should be able to use this newly created user credential to
# establish VPN connection that will give access all VMs of this user
self.debug("Enabling VPN connection to account: %s" %
self.account.name)
self.create_VPN(self.public_ip)
self.debug("Creating VPN user for the account: %s" %
self.account.name)
self.create_VPN_Users()
self.debug("Creating a domain admin account")
admin = Account.create(self.apiclient,
self.services["account"],
domainid=self.account.domainid)
self.cleanup.append(admin)
self.debug("Creating API client for newly created user")
api_client = self.testClient.createUserApiClient(
UserName=self.account.name,
DomainName=self.account.domain)
self.debug("Adding new user to VPN as a domain admin: %s" %
admin.name)
try:
self.create_VPN_Users(api_client=api_client)
except Exception as e:
self.fail("Domain admin should be allowed to create VPN user: %s" %
e)
return
| {
"content_hash": "8e5598b22dbbed059c7ae1234f479954",
"timestamp": "",
"source": "github",
"line_count": 435,
"max_line_length": 88,
"avg_line_length": 42.813793103448276,
"alnum_prop": 0.47540807560137455,
"repo_name": "mufaddalq/cloudstack-datera-driver",
"id": "9ee907bd94c7e86e092fc811cc59352e03050bd0",
"size": "19410",
"binary": false,
"copies": "1",
"ref": "refs/heads/4.2",
"path": "test/integration/component/test_vpn_users.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "250"
},
{
"name": "Batchfile",
"bytes": "6317"
},
{
"name": "CSS",
"bytes": "302008"
},
{
"name": "FreeMarker",
"bytes": "4917"
},
{
"name": "HTML",
"bytes": "38671"
},
{
"name": "Java",
"bytes": "79758943"
},
{
"name": "JavaScript",
"bytes": "4237188"
},
{
"name": "Perl",
"bytes": "1879"
},
{
"name": "Python",
"bytes": "5187499"
},
{
"name": "Shell",
"bytes": "803262"
}
],
"symlink_target": ""
} |
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>mathcomp-field: Not compatible 👼</title>
<link rel="shortcut icon" type="image/png" href="../../../../../favicon.png" />
<link href="../../../../../bootstrap.min.css" rel="stylesheet">
<link href="../../../../../bootstrap-custom.css" rel="stylesheet">
<link href="//maxcdn.bootstrapcdn.com/font-awesome/4.2.0/css/font-awesome.min.css" rel="stylesheet">
<script src="../../../../../moment.min.js"></script>
<!-- HTML5 Shim and Respond.js IE8 support of HTML5 elements and media queries -->
<!-- WARNING: Respond.js doesn't work if you view the page via file:// -->
<!--[if lt IE 9]>
<script src="https://oss.maxcdn.com/html5shiv/3.7.2/html5shiv.min.js"></script>
<script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script>
<![endif]-->
</head>
<body>
<div class="container">
<div class="navbar navbar-default" role="navigation">
<div class="container-fluid">
<div class="navbar-header">
<a class="navbar-brand" href="../../../../.."><i class="fa fa-lg fa-flag-checkered"></i> Coq bench</a>
</div>
<div id="navbar" class="collapse navbar-collapse">
<ul class="nav navbar-nav">
<li><a href="../..">clean / released</a></li>
<li class="active"><a href="">8.15.0 / mathcomp-field - 1.12.0</a></li>
</ul>
</div>
</div>
</div>
<div class="article">
<div class="row">
<div class="col-md-12">
<a href="../..">« Up</a>
<h1>
mathcomp-field
<small>
1.12.0
<span class="label label-info">Not compatible 👼</span>
</small>
</h1>
<p>📅 <em><script>document.write(moment("2022-10-21 19:22:45 +0000", "YYYY-MM-DD HH:mm:ss Z").fromNow());</script> (2022-10-21 19:22:45 UTC)</em><p>
<h2>Context</h2>
<pre># Packages matching: installed
# Name # Installed # Synopsis
base-bigarray base
base-threads base
base-unix base
conf-findutils 1 Virtual package relying on findutils
conf-gmp 4 Virtual package relying on a GMP lib system installation
coq 8.15.0 Formal proof management system
dune 3.4.1 Fast, portable, and opinionated build system
ocaml 4.07.1 The OCaml compiler (virtual package)
ocaml-base-compiler 4.07.1 Official release 4.07.1
ocaml-config 1 OCaml Switch Configuration
ocaml-secondary-compiler 4.08.1-1 OCaml 4.08.1 Secondary Switch Compiler
ocamlfind 1.9.1 A library manager for OCaml
ocamlfind-secondary 1.9.1 Adds support for ocaml-secondary-compiler to ocamlfind
zarith 1.12 Implements arithmetic and logical operations over arbitrary-precision integers
# opam file:
opam-version: "2.0"
maintainer: "Mathematical Components <[email protected]>"
homepage: "https://math-comp.github.io/"
bug-reports: "https://github.com/math-comp/math-comp/issues"
dev-repo: "git+https://github.com/math-comp/math-comp.git"
license: "CECILL-B"
build: [ make "-C" "mathcomp/field" "-j" "%{jobs}%" "COQEXTRAFLAGS+=-native-compiler yes" {coq-native:installed & coq:version < "8.13~" } ]
install: [ make "-C" "mathcomp/field" "install" ]
depends: [ "coq-mathcomp-solvable" { = version } ]
tags: [ "keyword:algebra" "keyword:field" "keyword:small scale reflection" "keyword:mathematical components" "keyword:odd order theorem" "logpath:mathcomp.field" ]
authors: [ "Jeremy Avigad <>" "Andrea Asperti <>" "Stephane Le Roux <>" "Yves Bertot <>" "Laurence Rideau <>" "Enrico Tassi <>" "Ioana Pasca <>" "Georges Gonthier <>" "Sidi Ould Biha <>" "Cyril Cohen <>" "Francois Garillot <>" "Alexey Solovyev <>" "Russell O'Connor <>" "Laurent Théry <>" "Assia Mahboubi <>" ]
synopsis: "Mathematical Components Library on Fields"
description:"""
This library contains definitions and theorems about field extensions,
galois theory, algebraic numbers, cyclotomic polynomials...
"""
url {
src: "https://github.com/math-comp/math-comp/archive/mathcomp-1.12.0.tar.gz"
checksum: "sha256=a57b79a280e7e8527bf0d8710c1f65cde00032746b52b87be1ab12e6213c9783"
}
</pre>
<h2>Lint</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
</dl>
<h2>Dry install 🏜️</h2>
<p>Dry install with the current Coq version:</p>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>opam install -y --show-action coq-mathcomp-field.1.12.0 coq.8.15.0</code></dd>
<dt>Return code</dt>
<dd>5120</dd>
<dt>Output</dt>
<dd><pre>[NOTE] Package coq is already installed (current version is 8.15.0).
The following dependencies couldn't be met:
- coq-mathcomp-field -> coq-mathcomp-solvable = 1.12.0 -> coq-mathcomp-algebra = 1.12.0 -> coq-mathcomp-fingroup = 1.12.0 -> coq-mathcomp-ssreflect = 1.12.0 -> coq < 8.15~ -> ocaml < 4.06.0
base of this switch (use `--unlock-base' to force)
Your request can't be satisfied:
- No available version of coq satisfies the constraints
No solution found, exiting
</pre></dd>
</dl>
<p>Dry install without Coq/switch base, to test if the problem was incompatibility with the current Coq/OCaml version:</p>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>opam remove -y coq; opam install -y --show-action --unlock-base coq-mathcomp-field.1.12.0</code></dd>
<dt>Return code</dt>
<dd>0</dd>
</dl>
<h2>Install dependencies</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
<dt>Duration</dt>
<dd>0 s</dd>
</dl>
<h2>Install 🚀</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
<dt>Duration</dt>
<dd>0 s</dd>
</dl>
<h2>Installation size</h2>
<p>No files were installed.</p>
<h2>Uninstall 🧹</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
<dt>Missing removes</dt>
<dd>
none
</dd>
<dt>Wrong removes</dt>
<dd>
none
</dd>
</dl>
</div>
</div>
</div>
<hr/>
<div class="footer">
<p class="text-center">
Sources are on <a href="https://github.com/coq-bench">GitHub</a> © Guillaume Claret 🐣
</p>
</div>
</div>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js"></script>
<script src="../../../../../bootstrap.min.js"></script>
</body>
</html>
| {
"content_hash": "0b767f6d350eb127d8e1b93e4d77caec",
"timestamp": "",
"source": "github",
"line_count": 164,
"max_line_length": 554,
"avg_line_length": 49.44512195121951,
"alnum_prop": 0.5654211370082625,
"repo_name": "coq-bench/coq-bench.github.io",
"id": "808d5a51ff83ca3f50b6f069ef11772fd94d506d",
"size": "8135",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "clean/Linux-x86_64-4.07.1-2.0.6/released/8.15.0/mathcomp-field/1.12.0.html",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
import { strictEqual } from 'assert';
import { Block, doc, initApp, removeApp, NodeMixin as Node } from '../../src';
let app;
let block;
const container = doc.create('div');
const remove = () => {
removeApp(container);
app = null;
block = null;
};
class NodeHelper extends Block {
static html = html`
<div Node(div)="{getParentTemplate()}"/>
<b Node(b)/>
`;
afterRender() {
block = this;
}
}
class NodeApp extends Block {
static html = html`
<span Node="span"/>
<input Node="{setInput}"/>
<i Node/>
<NodeHelper/>
`;
afterRender() {
app = this;
}
setInput = (input) => {
this.input = input;
};
}
export default () => {
describe('Node', () => {
before(() => {
initApp(NodeApp, container);
});
it('should support string value and set the block property to the Elem', () => {
strictEqual(container.find('span')[0], app.span);
});
it('should support function value and call the function with the Elem argument', () => {
strictEqual(container.find('input')[0], app.input);
});
it('should support string argument with block value', () => {
strictEqual(container.find('div')[0], app.div);
});
it('should support string argument with no value', () => {
strictEqual(container.find('b')[0], block.b);
});
after(remove);
});
};
| {
"content_hash": "7eee98dfbce3c0bd329f52329a35d584",
"timestamp": "",
"source": "github",
"line_count": 63,
"max_line_length": 92,
"avg_line_length": 21.682539682539684,
"alnum_prop": 0.58199121522694,
"repo_name": "dwaynejs/dwayne",
"id": "8cbfcc4360aaeac191f7af3ee9594172faa4f792",
"size": "1366",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/mixins/Node.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "145"
},
{
"name": "JavaScript",
"bytes": "269916"
}
],
"symlink_target": ""
} |
[](http://lib.haxe.org/p/pixijs) [](http://badge.fury.io/js/hxpixijs) [](https://travis-ci.org/pixijs/pixi-haxe)
=========

Externs of Pixi.js v4.x for Haxe - A fast and lightweight 2D javascript rendering library that works across all devices.
### Installation
```
haxelib install pixijs
or via npm
npm install hxpixijs
```
### Issues
Found any bug? Please create a new [issue](https://github.com/pixijs/pixi-haxe/issues/new).
### Demos
* [Basics](http://adireddy.github.io/demos/pixi-haxe/basics.html)
* [Graphics](http://adireddy.github.io/demos/pixi-haxe/graphics.html)
* [Movieclip](http://adireddy.github.io/demos/pixi-haxe/movieclip.html)
* [Spritesheet](http://adireddy.github.io/demos/pixi-haxe/spritesheet.html)
* [Rope](http://adireddy.github.io/demos/pixi-haxe/rope.html)
* [Dragging](http://adireddy.github.io/demos/pixi-haxe/dragging.html)
* [Texture Swap](http://adireddy.github.io/demos/pixi-haxe/textureswap.html)
* [Tiling](http://adireddy.github.io/demos/pixi-haxe/tiling.html)
* [Bitmap Font](http://adireddy.github.io/demos/pixi-haxe/bitmapfont.html)
* [Alpha Mask](http://adireddy.github.io/demos/pixi-haxe/alphamask.html)
* [Bunnymark](http://adireddy.github.io/demos/pixi-haxe/bunnymark.html)
* [Retina](http://adireddy.github.io/demos/pixi-haxe/retina.html)
* [Events](http://adireddy.github.io/demos/pixi-haxe/events.html)
* [Loader](http://adireddy.github.io/demos/pixi-haxe/loader.html)
* [Video](http://adireddy.github.io/demos/pixi-haxe/video.html)
* [Nape](http://adireddy.github.io/demos/pixi-haxe/nape.html)
**Filters (WebGL only)**
* [ColorMatrix](http://adireddy.github.io/demos/pixi-haxe/colormatrix.html)
* [Blur](http://adireddy.github.io/demos/pixi-haxe/blur.html)
**[COHERE](http://adireddy.github.io/cohere/)** - Sample MVC application using Haxe and Pixi.js
Look at the `samples` folder for the source code of above examples.
### Usage
```haxe
package samples.basics;
import pixi.core.display.Container;
import pixi.core.textures.Texture;
import pixi.core.renderers.SystemRenderer;
import pixi.core.renderers.Detector;
import pixi.core.sprites.Sprite;
import js.Browser;
class Main {
var _bunny:Sprite;
var _renderer:SystemRenderer;
var _stage:Container;
public function new() {
// Rendering options usage sample
var options:RenderingOptions = {};
options.backgroundColor = 0x003366;
options.resolution = 1;
_stage = new Container();
_renderer = Detector.autoDetectRenderer(800, 600, options);
_bunny = new Sprite(Texture.fromImage("assets/basics/bunny.png"));
_bunny.anchor.set(0.5, 0.5);
_bunny.position.set(400, 300);
_stage.addChild(_bunny);
Browser.document.body.appendChild(_renderer.view);
Browser.window.requestAnimationFrame(cast _animate);
}
function _animate() {
Browser.window.requestAnimationFrame(cast _animate);
_bunny.rotation += 0.1;
_renderer.render(_stage);
}
static function main() {
new Main();
}
}
```
### Licensing Information
<a rel="license" href="http://opensource.org/licenses/MIT">
<img alt="MIT license" height="40" src="http://upload.wikimedia.org/wikipedia/commons/c/c3/License_icon-mit.svg" /></a>
This content is released under the [MIT](http://opensource.org/licenses/MIT) License.
[Pixi.js](https://github.com/GoodBoyDigital/pixi.js) is licensed under the [MIT](http://opensource.org/licenses/MIT) License.
### Contributor Code of Conduct
[Code of Conduct](https://github.com/CoralineAda/contributor_covenant) is adapted from [Contributor Covenant, version 1.4](http://contributor-covenant.org/version/1/4)
| {
"content_hash": "ac07ce45e2c3e9e2a99d687a6fe80ff5",
"timestamp": "",
"source": "github",
"line_count": 110,
"max_line_length": 313,
"avg_line_length": 34.40909090909091,
"alnum_prop": 0.7350066050198151,
"repo_name": "Tiago-Ling/pixi-haxe",
"id": "2f31daa105704194f32f619ce64c3cab53165fb3",
"size": "3785",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "README.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Haxe",
"bytes": "165481"
},
{
"name": "JavaScript",
"bytes": "1064320"
}
],
"symlink_target": ""
} |
/* -*- Mode: C; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is mozilla.org code.
*
* The Initial Developer of the Original Code is
* Netscape Communications Corporation.
* Portions created by the Initial Developer are Copyright (C) 1999
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
*
* Alternatively, the contents of this file may be used under the terms of
* either of the GNU General Public License Version 2 or later (the "GPL"),
* or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
#ifndef nsICDETAdaptor_h__
#define nsICDETAdaptor_h__
#include "nsISupports.h"
class nsICharsetDetector;
class nsIWebShellServices;
class nsIDocument;
class nsIParser;
// {12BB8F13-2389-11d3-B3BF-00805F8A6670}
#define NS_ICHARSETDETECTIONADAPTOR_IID \
{ 0x12bb8f13, 0x2389, 0x11d3, { 0xb3, 0xbf, 0x0, 0x80, 0x5f, 0x8a, 0x66, 0x70 } }
/*
This interface is defined to be serverd as an Adaptor between
nsIWebShellServices, nsICharsetDetector and nsIParserFilter
It make the nsICharsetDetector implementation independent from the
nsIParserFilter and nsIWebShellServices
*/
class nsICharsetDetectionAdaptor : public nsISupports {
public:
NS_DECLARE_STATIC_IID_ACCESSOR(NS_ICHARSETDETECTIONADAPTOR_IID)
/*
Initialize it by setup the nsICharsetDetector and the
nsIWebShellServices
*/
NS_IMETHOD Init(nsIWebShellServices* aWebShell, nsICharsetDetector *aDetector,
nsIDocument* aDocument, nsIParser* aParser,
const char* aCharset, const char* aCommand=nsnull) = 0;
};
NS_DEFINE_STATIC_IID_ACCESSOR(nsICharsetDetectionAdaptor,
NS_ICHARSETDETECTIONADAPTOR_IID)
#endif /* nsICDETAdaptor_h__ */
| {
"content_hash": "fdabcedb52d690a16e067c9b50ed194f",
"timestamp": "",
"source": "github",
"line_count": 73,
"max_line_length": 81,
"avg_line_length": 40.78082191780822,
"alnum_prop": 0.7282499160228418,
"repo_name": "leighpauls/k2cro4",
"id": "84a25b891f4a742663ef57d77344cdbb357f4024",
"size": "2977",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "third_party/xulrunner-sdk/win/include/chardet/nsICharsetDetectionAdaptor.h",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ASP",
"bytes": "3062"
},
{
"name": "AppleScript",
"bytes": "25392"
},
{
"name": "Arduino",
"bytes": "464"
},
{
"name": "Assembly",
"bytes": "68131038"
},
{
"name": "C",
"bytes": "242794338"
},
{
"name": "C#",
"bytes": "11024"
},
{
"name": "C++",
"bytes": "353525184"
},
{
"name": "Common Lisp",
"bytes": "3721"
},
{
"name": "D",
"bytes": "1931"
},
{
"name": "Emacs Lisp",
"bytes": "1639"
},
{
"name": "F#",
"bytes": "4992"
},
{
"name": "FORTRAN",
"bytes": "10404"
},
{
"name": "Java",
"bytes": "3845159"
},
{
"name": "JavaScript",
"bytes": "39146656"
},
{
"name": "Lua",
"bytes": "13768"
},
{
"name": "Matlab",
"bytes": "22373"
},
{
"name": "Objective-C",
"bytes": "21887598"
},
{
"name": "PHP",
"bytes": "2344144"
},
{
"name": "Perl",
"bytes": "49033099"
},
{
"name": "Prolog",
"bytes": "2926122"
},
{
"name": "Python",
"bytes": "39863959"
},
{
"name": "R",
"bytes": "262"
},
{
"name": "Racket",
"bytes": "359"
},
{
"name": "Ruby",
"bytes": "304063"
},
{
"name": "Scheme",
"bytes": "14853"
},
{
"name": "Shell",
"bytes": "9195117"
},
{
"name": "Tcl",
"bytes": "1919771"
},
{
"name": "Verilog",
"bytes": "3092"
},
{
"name": "Visual Basic",
"bytes": "1430"
},
{
"name": "eC",
"bytes": "5079"
}
],
"symlink_target": ""
} |
layout: bare
title: Seagate Longmont Class
description: Seagate Longmont Class
tags: [notes, class]
path: classnotes/2012-12-07-seagate-class.md
eventdate: 2012-12-03
---
Held on December 3, 2012
Taught by:
* Tim Berglund ([Twitter](http://twitter.com/tlberglund), [GitHub](https://github.com/tlberglund))
* Jordan McCullough ([Twitter](http://twitter.com/thejordanmcc), [GitHub](https://github.com/jordanmccullough))
## Installation
* Git Installation
* [The Git-SCM Web Site (All Platforms)](http://git-scm.com)
* [The GitHub for Windows Client and Command Line](http://windows.github.com)
* Minimum is 1.7.10, but can have issues with HTTPS credential prompting.
* Best is 1.8.X or higher
## Suggested Books, Articles
* [Free ProGit Book](http://git-scm.com/book)
* [Getting started with Version Control](http://teach.github.com/articles/lesson-new-to-version-control/)
* [The GitHub Flow](http://scottchacon.com/2011/08/31/github-flow.html)
* [DVCS Workflows Book](https://github.com/zkessin/dvcs-workflows)
* [Git Cheat Sheets](http://teach.github.com/articles/git-cheatsheets/)
* [Git Workflow Links](https://pinboard.in/u:matthew.mccullough/t:git+workflow)
## Course Materials, Links
* [Git Teaching Materials](http://teach.github.com)
* [Course Slides](http://teach.github.com/articles/course-slides/)
* [Post-event Git and GitHub questions](https://github.com/githubtraining/feedback/)
* [Free Office Hours Sessions](http://training.github.com/web/free-classes/)
## Tools, Tips, Shortcuts
* [ghi gem](https://github.com/stephencelis/ghi)
* [hubgem ](https://github.com/defunkt/hub)
* [gist gem](https://github.com/defunkt/gist)
* [gitg](http://git.gnome.org/browse/gitg) instead of gitk
* [tig](http://gitready.com/advanced/2009/07/31/tig-the-ncurses-front-end-to-git.html)
* [Matthew's shell scripts](https://github.com/matthewmccullough/scripts)
* [Matthew's Git aliases](https://github.com/matthewmccullough/dotfiles) (in the gitconfig file)
## Resources
* SSL Certificates
* [Adding SSL Certificates for GitHub.com (Common for Windows XP)](http://stackoverflow.com/questions/3777075/https-github-access/4454754#4454754)
* Disabling SSL Certificate Checks for Git:
git config --global http.sslVerify false
* [Open Source Git Ignore Templates](https://github.com/github/gitignore)
* [Book Authoring on Git and GitHub](http://teach.github.com/articles/book-authoring-using-git-and-github/)
* [Post Receive Hooks](https://help.github.com/articles/post-receive-hooks)
* [GitHub Training Videos](http://training.github.com/resources/videos/)
* [Using Git with Media Temple](http://carl-topham.com/theblog/post/using-git-media-temple/)
* [GitHub Training Feedback and Follow-up Questions](https://github.com/githubtraining/feedback/issues?state=open)
* [GitHub Commit Status API for Continuous Integration](https://github.com/blog/1227-commit-status-api)
* [Git Credential Cache for HTTP](http://teach.github.com/articles/lesson-git-credential-cache/)
* [GitHub Issues Cheatsheet](http://teach.github.com/articles/github-issues-cheatsheet/)
* [Jenkins Git Plugin](https://wiki.jenkins-ci.org/display/JENKINS/Git+Plugin)
* [Open Source Git Ignores](https://github.com/github/gitignore)
* [Ship of Theseus - Related to Similarity Index](http://en.wikipedia.org/wiki/Ship_of_Theseus)
* [git-p4 Perforce Script](http://answers.perforce.com/articles/KB_Article/Git-P4)
* [Unix watch command](http://en.wikipedia.org/wiki/Watch_(Unix\))
* [SHA-1 Hash Collisions](http://git-scm.com/book/ch6-1.html#A-SHORT-NOTE-ABOUT-SHA-1)
* [NPD Git Cheatsheet](http://ndpsoftware.com/git-cheatsheet.html)
* [More Git Cheatsheets](http://teach.github.com/articles/git-cheatsheets/)
https://github.com/githubtraining/feedback/issues
## Class Outline
* Introduction and history
* Creating repos
* Adding and committing files
* Git's Three Stage Thinking
* Versioning Files in Git
* Viewing History in Git
* Understanding Git's Content Tracking
* Moving and copying files
* Branching and Merging in Git
* Rebase and interactive rebase
* Establishing upstream repositories
* Push, pull, and fetch
* RefLog as a safety net
* Stash for work-in-progress
* Reset modalities
* Collaboration through Pull Requests
* Code review features of GitHub
* http://mrkn.co/survey
## Command Line History
git --version
pwd
git init newproject
cd newproject
ls
ls -la
tree .git
ls -la .git
git config user.name
git config user.email
git help config
vi ~/.gitconfig
git config user.name
git config user.email
vi ~/.gitconfig
git config --global user.name "Tim Berglund"
git config --global user.email "[email protected]"
vi ~/.gitconfig
git config user.name
git config user.email
git config --local user.name
git config --global user.name
git config --list
git config --list --local
git config --list --global
git config --list --system
touch /opt/github/homebrew/Cellar/git/1.8.0/etc/gitconfig
git config --list --system
ll .git
cat .git/config
cat ~/.gitconfig
git config --global color.ui auto
pwd
ls -a
vi caesar.txt
git status
git add caesar.txt
git status
git commit -m "Initial commit"
git status
ls -la .git
tree .git/objects
vi caesar.txt
git status
git add caesar.txt
git status
git commit -m "The noblest man"
vi caesar.txt
git diff
git difftool
git status
git add caesar.txt
git diff
git diff --staged
vi caesar.txt
git status
git diff
git diff --staged
vi caesar.txt
git diff
git diff --color-words
git diff --word-diff
git diff HEAD
git diff HEAD --word-diff
git status
git add caesar.txt
git status
git commit -m "Costly blood"
git stauts
git status
git log
git log --stat
git log --patch
git log --patch --word-diff
git log --pretty=raw
git log --pretty=oneline | wc -l
git log --pretty=raw
git log --pretty=email --patch -1
git log --pretty=oneline
git log --pretty=oneline --abbrev-commit
tree .git/objects
tree .git/objects | less
git log --pretty=oneline --abbrev-commit
git log --pretty=oneline --abbrev-commit --graph
git lg
git status
ls
mkdir obj
touch obj/InterruptController.o
touch obj/DMA.o
ll ~ > build.log
git status
git status -u
vi .gitignore
mkdir otherdir
cd otherdir
mkdir obj
touch obj/monkey.o
cd ../..
tree
git status
pwd
cd newproject
git status
vi .gitignore
ll
cd otherdir
ll
tree
vi .gitignore
cd ..
git status
git status -u
vi .git
vi .gitignore
git status
rm otherdir/.gitignore
git staus
git status
git add .gitignore
git status
git commit -m "Added a .gitignore file"
ll
tree
git config core.excludesfile
vi ~/.gitignore
git status
vi ~/.gitignore
cat .gitignore
vi monkey.txt
git add .
git commit -m "Angry monkey"
vi .gitignore
git status
git add .gitignore
git commit -m "All of the ignores"
vi monkey.txt
git status
cat .gitignore
git add .
git status
git commit -m "Changed the emotion of the monkey"
git log --stat
ll
git rm monkey.txt
git status
ls
git reset HEAD monkey.txt
ll
git checkout -- monkey.txt
git status
ls
ll
git rm --cached monkey.txt
git status
ll
cat .git
cat .gitignore
git status
git commit -m "Made monkey.txt an untracked file"
vi monkey.txt
git status
history
generaterandomfiles 5 junk txt
ls
ll
cat junk1.txt
git status
git add .
git status
git commit -m "Commited some files prepared for destruction"
git status
ll
git rm junk1.txt
ll
git status
rm junk2.txt
git status
git rm junk2.txt
git statsu
git status
open .
git status
git add -u .
git status
git commit -m "Cleaned up all of the cruft"
tmux
tr.git
tmux attach
cd scratch/newproject
export PS1="$ "
ll
rm -rf obj
rm -rf otherdir
rm monkey.txt
rm build.log
ll
generaterandomfiles 5 file txt
ll
git status
git add .
git status
ll ~ > listing.txt
git status
git add .
git status
git commit -m "Added some files to move around"
git status
mkdir files
tree
git status
cd files
mkdir monkey
cd ..
git status
cd files
touch .gitignore
cd ..
git status
cd files
rm .gitignore
rmdir monkey
cd ..
tree
git status
git mv file1.txt files/
tree
git status
mv file2.txt files/
tree
git status
git rm file2.txt
git status
git add files/file2.txt
git status
open .
cd files
ll
cp file2.txt monkey-2.txt
git status
git add monkey-2.txt
git status
rm monkey-2.txt
git reset HEAD monkey-2.txt
git status
git add -A .
git status
cd ..
git add -A .
git status
tree
open .
git status
git commit -m "Refactored by trivial moving"
git log --stat -1
git log --stat -1 -M
vi listing.txt
mv listing.txt files/
tree
git status
git add -A .
git status
git commit -m "Renamed with changes"
git log --stat -1
git log --stat -1 -M
git log --stat -1 -M80
git log --stat -1 -M70
ll
ll ~ > test.txt
git add .
git commit -m"FIle to move"
mv test.txt files/oops.txt
vi files/oops.txt
git add -A .
git status
git commit -m "Moved, changed, AND RENAMED--stop me before I kill again"
git log --stat -M -1
vi files/listing.txt
git add .
git commit -m "9001 listings"
ll
git log --stat -M -- files/listing.txt
git log --stat -M --follow -- files/listing.txt
git log --stat -M99 --follow -- files/listing.txt
git status
git branch
tree .git/refs
cat .git/refs/heads/master
git lg
tmux
tmux attach
export PS1="$ "
tr.git
cd scratch/newproject
ll
git lg
tr.git
cd scratch/newproject
git loglive
git branch feature
tree .git/refs
git branch
git lg
git branch
git branch
vi caesar.txt
git add .
git commit -m "Domestic fury"
git checkout feature
cat caesar.txt
vi caesar.txt
git status
git add .
git commit -m "Added a title to Caesar"
git branch
git status
git checkout master
cat caesar.txt
git branch
git merge feature
git lg
ll
tree .git/refs
git branch -d feature
git branch wat 7cf5
git branch feature 3598dad
git branch -d feature
git branch -d wat
ll
vi caesar.txt
git status
git add .
git commit -m "Blood and destruction"
git branch feature2
git checkout feature2
git branch
vi caesar.txt
git checkout master
git branch -d feature2
git branch feature2 f780044
git checkout feature2
vi caesar.txt
vi jabberwocky.txt
git statu
git status
git add .
git status
git commit -m "Defective Shakespeare, effect Carrol"
git commit --amend
git status
git branch
git checkout master
git merge master
git merge feature2
git status
vi caesar.txt
cat jabberwocky.txt
git branch
cat caesar.txt
git merge --abort
ll
cat caesar.txt
git merge feature2
vi caesar.txt
git status
git add caesar.txt
git status
git mergetool
git log --graph --oneline --all
git status
git commit
git loglive -15
vi caesar.txt
git add .
git commit -m "More defective Shakespeare"
git reset
vi jabberwocky.txt
git reset --hard 47c9
vi caesar.txt
git branch foo 44a5
git branch -d foo
git branch -D foo
git reflog
git reset --hard HEAD@{1}
vi jabberwocky.txt
git add .
git commit -m "MOAR jabberwocky"
vi caesar.txt
git checkout 47c9 -- caesar.txt
git status
vi caesar.txt
git add .
git commit -m "Choked pity"
git checkout 47c9
git branch feature3
git checkout master
git branch -d feature3
git branch -d feature2
git reflog
ll
git remote add origin https://github.com/githubtrainer/poetry.git
git remote
cat .git/config
git help push
ll .git/refs/heads
ls .git/refs/heads
git push -u origin master
cat .git/co
cat .git/config
tr.git
cd scratch
export PS1="$ "
tr.git
cd scratch
ll
git clone https://github.com/githubtrainer/poetry.git
cd poetry
ll
git checkout -b tlberglund
vi caesar.txt
git add .
git commit -m "Caesar rage"
git push -u origin tlberglund
git checkout master
git branch -a
git fetch
git branch
ls
git branch -a
tree .git/refs
git lg
git rev-parse HEAD
git lg
git fetch
git lg
git branch -a
git checkout timandjordanROCK
git branch -a
ll
cat caesar.txt
cat jabberwocky.txt
ll
ll files
ll
git branch
git diff master
git checkout master
git merge timandjordanROCK
git branch -d timandjordanROCK
git push
git lg
vi caesar.txt
git merge tlberglund
vi caesar.txt
git add .
git commit -m "Cry HAVOC"
git lg
git diff origin/after5cst
git merge origin/after5cst
git lg
git merge origin/jacooper
git lg
git branch
git push
git config --global push.default simple
git push
ls
cat caesar.txt
ll files
ls files
git branch -a
git pull
git loglive -10
git branch -a
ll
vi caesar.txt
git status
git add .
git commit -m "Carrion men"
git status
git push
git fetch
ll
ls
vi jabberwocky.txt
git add .
git commit -m "Uffish thought"
git push
git fetch
git pull
git reflog
git reset --hard HEAD@{1}
git status
git pull
git push
git tag CLASS_DONE
tree .git/refs
git show CLASS_DONE
git tag -a UFFISH_THOUGHT 612ef31
git show UFFISH_THOUGHT
tree .git/refs
| {
"content_hash": "6f27f6c78f50953c21d548bdbd9cb6fc",
"timestamp": "",
"source": "github",
"line_count": 598,
"max_line_length": 150,
"avg_line_length": 24.078595317725753,
"alnum_prop": 0.648933953746788,
"repo_name": "youpeiban/youpeiban.github.io",
"id": "a8628c06cf557ce1f389d71d425b86c2b325e79c",
"size": "14403",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "classnotes/2012-12-07-seagate-class.md",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "366229"
},
{
"name": "HTML",
"bytes": "414399"
},
{
"name": "JavaScript",
"bytes": "4638"
},
{
"name": "Ruby",
"bytes": "2011"
},
{
"name": "Shell",
"bytes": "438"
}
],
"symlink_target": ""
} |
// Copyright (c) 2009-2010 Satoshi Nakamoto
// Copyright (c) 2009-2013 The Bitcoin developers
// Distributed under the MIT/X11 software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#ifndef BITCOIN_WALLET_H
#define BITCOIN_WALLET_H
#include "core.h"
#include "crypter.h"
#include "key.h"
#include "keystore.h"
#include "main.h"
#include "ui_interface.h"
#include "util.h"
#include "walletdb.h"
#include "stealth.h"
#include "script.h"
#include <algorithm>
#include <map>
#include <set>
#include <stdexcept>
#include <stdint.h>
#include <string>
#include <utility>
#include <vector>
// Settings
extern int64_t nTransactionFee;
extern bool bSpendZeroConfChange;
// -paytxfee default
static const int64_t DEFAULT_TRANSACTION_FEE = 0;
// -paytxfee will warn if called with a higher fee than this amount (in satoshis) per KB
static const int nHighTransactionFeeWarning = 0.05 * COIN;
typedef std::map<CKeyID, CStealthKeyMetadata> StealthKeyMetaMap;
typedef std::map<std::string, std::string> mapValue_t;
class CAccountingEntry;
class CCoinControl;
class COutput;
class CReserveKey;
class CScript;
class CWalletTx;
/** (client) version numbers for particular wallet features */
enum WalletFeature
{
FEATURE_BASE = 10500, // the earliest version new wallets supports (only useful for getinfo's clientversion output)
FEATURE_WALLETCRYPT = 40000, // wallet encryption
FEATURE_COMPRPUBKEY = 60000, // compressed public keys
FEATURE_LATEST = 60000
};
/** A key pool entry */
class CKeyPool
{
public:
int64_t nTime;
CPubKey vchPubKey;
CKeyPool()
{
nTime = GetTime();
}
CKeyPool(const CPubKey& vchPubKeyIn)
{
nTime = GetTime();
vchPubKey = vchPubKeyIn;
}
IMPLEMENT_SERIALIZE
(
if (!(nType & SER_GETHASH))
READWRITE(nVersion);
READWRITE(nTime);
READWRITE(vchPubKey);
)
};
/** Address book data */
class CAddressBookData
{
public:
std::string name;
std::string purpose;
CAddressBookData()
{
purpose = "unknown";
}
typedef std::map<std::string, std::string> StringMap;
StringMap destdata;
};
/** A CWallet is an extension of a keystore, which also maintains a set of transactions and balances,
* and provides the ability to create new transactions.
*/
class CWallet : public CCryptoKeyStore, public CWalletInterface
{
private:
bool SelectCoins(int64_t nTargetValue, std::set<std::pair<const CWalletTx*,unsigned int> >& setCoinsRet, int64_t& nValueRet, const CCoinControl *coinControl = NULL) const;
CWalletDB *pwalletdbEncryption;
// the current wallet version: clients below this version are not able to load the wallet
int nWalletVersion;
// the maximum wallet format version: memory-only variable that specifies to what version this wallet may be upgraded
int nWalletMaxVersion;
int64_t nNextResend;
int64_t nLastResend;
// Used to keep track of spent outpoints, and
// detect and report conflicts (double-spends or
// mutated transactions where the mutant gets mined).
typedef std::multimap<COutPoint, uint256> TxSpends;
TxSpends mapTxSpends;
void AddToSpends(const COutPoint& outpoint, const uint256& wtxid);
void AddToSpends(const uint256& wtxid);
void SyncMetaData(std::pair<TxSpends::iterator, TxSpends::iterator>);
public:
/// Main wallet lock.
/// This lock protects all the fields added by CWallet
/// except for:
/// fFileBacked (immutable after instantiation)
/// strWalletFile (immutable after instantiation)
mutable CCriticalSection cs_wallet;
bool fFileBacked;
std::string strWalletFile;
std::set<int64_t> setKeyPool;
std::map<CKeyID, CKeyMetadata> mapKeyMetadata;
std::set<CStealthAddress> stealthAddresses;
StealthKeyMetaMap mapStealthKeyMeta;
uint32_t nStealth, nFoundStealth; // for reporting, zero before use
typedef std::map<unsigned int, CMasterKey> MasterKeyMap;
MasterKeyMap mapMasterKeys;
unsigned int nMasterKeyMaxID;
CWallet()
{
SetNull();
}
CWallet(std::string strWalletFileIn)
{
SetNull();
strWalletFile = strWalletFileIn;
fFileBacked = true;
}
void SetNull()
{
nWalletVersion = FEATURE_BASE;
nWalletMaxVersion = FEATURE_BASE;
fFileBacked = false;
nMasterKeyMaxID = 0;
pwalletdbEncryption = NULL;
nOrderPosNext = 0;
nNextResend = 0;
nLastResend = 0;
nTimeFirstKey = 0;
}
std::map<uint256, CWalletTx> mapWallet;
int64_t nOrderPosNext;
std::map<uint256, int> mapRequestCount;
std::map<CTxDestination, CAddressBookData> mapAddressBook;
CPubKey vchDefaultKey;
std::set<COutPoint> setLockedCoins;
int64_t nTimeFirstKey;
const CWalletTx* GetWalletTx(const uint256& hash) const;
// check whether we are allowed to upgrade (or already support) to the named feature
bool CanSupportFeature(enum WalletFeature wf) { AssertLockHeld(cs_wallet); return nWalletMaxVersion >= wf; }
void AvailableCoins(std::vector<COutput>& vCoins, bool fOnlyConfirmed=true, const CCoinControl *coinControl = NULL) const;
bool SelectCoinsMinConf(int64_t nTargetValue, int nConfMine, int nConfTheirs, std::vector<COutput> vCoins, std::set<std::pair<const CWalletTx*,unsigned int> >& setCoinsRet, int64_t& nValueRet) const;
bool IsSpent(const uint256& hash, unsigned int n) const;
bool IsLockedCoin(uint256 hash, unsigned int n) const;
void LockCoin(COutPoint& output);
void UnlockCoin(COutPoint& output);
void UnlockAllCoins();
void ListLockedCoins(std::vector<COutPoint>& vOutpts);
// keystore implementation
// Generate a new key
CPubKey GenerateNewKey();
// Adds a key to the store, and saves it to disk.
//bool AddKeyPubKey(const CKey& key, const CPubKey &pubkey);
bool AddKey(const CKey& key);
// Adds a key to the store, without saving it to disk (used by LoadWallet)
//bool LoadKey(const CKey& key, const CPubKey &pubkey) { return CCryptoKeyStore::AddKeyPubKey(key, pubkey); }
bool LoadKey(const CKey& key) { return CCryptoKeyStore::AddKey(key); }
// Load metadata (used by LoadWallet)
bool LoadKeyMetadata(const CPubKey &pubkey, const CKeyMetadata &metadata);
bool LoadMinVersion(int nVersion) { AssertLockHeld(cs_wallet); nWalletVersion = nVersion; nWalletMaxVersion = std::max(nWalletMaxVersion, nVersion); return true; }
// Adds an encrypted key to the store, and saves it to disk.
bool AddCryptedKey(const CPubKey &vchPubKey, const std::vector<unsigned char> &vchCryptedSecret);
// Adds an encrypted key to the store, without saving it to disk (used by LoadWallet)
bool LoadCryptedKey(const CPubKey &vchPubKey, const std::vector<unsigned char> &vchCryptedSecret);
bool AddCScript(const CScript& redeemScript);
bool LoadCScript(const CScript& redeemScript);
/// Adds a destination data tuple to the store, and saves it to disk
bool AddDestData(const CTxDestination &dest, const std::string &key, const std::string &value);
/// Erases a destination data tuple in the store and on disk
bool EraseDestData(const CTxDestination &dest, const std::string &key);
/// Adds a destination data tuple to the store, without saving it to disk
bool LoadDestData(const CTxDestination &dest, const std::string &key, const std::string &value);
/// Look up a destination data tuple in the store, return true if found false otherwise
bool GetDestData(const CTxDestination &dest, const std::string &key, std::string *value) const;
bool Lock();
bool Unlock(const SecureString& strWalletPassphrase);
bool ChangeWalletPassphrase(const SecureString& strOldWalletPassphrase, const SecureString& strNewWalletPassphrase);
bool EncryptWallet(const SecureString& strWalletPassphrase);
void GetKeyBirthTimes(std::map<CKeyID, int64_t> &mapKeyBirth) const;
/** Increment the next transaction order id
@return next transaction order id
*/
int64_t IncOrderPosNext(CWalletDB *pwalletdb = NULL);
typedef std::pair<CWalletTx*, CAccountingEntry*> TxPair;
typedef std::multimap<int64_t, TxPair > TxItems;
/** Get the wallet's activity log
@return multimap of ordered transactions and accounting entries
@warning Returned pointers are *only* valid within the scope of passed acentries
*/
TxItems OrderedTxItems(std::list<CAccountingEntry>& acentries, std::string strAccount = "");
void MarkDirty();
bool AddToWallet(const CWalletTx& wtxIn, bool fFromLoadWallet=false);
void SyncTransaction(const uint256 &hash, const CTransaction& tx, const CBlock* pblock);
bool AddToWalletIfInvolvingMe(const uint256 &hash, const CTransaction& tx, const CBlock* pblock, bool fUpdate);
void EraseFromWallet(const uint256 &hash);
int ScanForWalletTransactions(CBlockIndex* pindexStart, bool fUpdate = false);
void ReacceptWalletTransactions();
void ResendWalletTransactions();
int64_t GetBalance() const;
int64_t GetUnconfirmedBalance() const;
int64_t GetImmatureBalance() const;
/*bool CreateTransaction(const std::vector<std::pair<CScript, int64_t> >& vecSend,
CWalletTx& wtxNew, CReserveKey& reservekey, int64_t& nFeeRet, std::string& strFailReason, const CCoinControl *coinControl = NULL);*/
bool CreateTransaction(const std::vector<std::pair<CScript, int64_t> >& vecSend,
CWalletTx& wtxNew, CReserveKey& reservekey, int64_t& nFeeRet, int32_t& nChangePos, std::string& strFailReason, const CCoinControl *coinControl=NULL);
/*bool CreateTransaction(CScript scriptPubKey, int64_t nValue,
CWalletTx& wtxNew, CReserveKey& reservekey, int64_t& nFeeRet, std::string& strFailReason, const CCoinControl *coinControl = NULL);*/
bool CreateTransaction(CScript scriptPubKey, int64_t nValue,
CWalletTx& wtxNew, CReserveKey& reservekey, int64_t& nFeeRet, std::string& strFailReason, const CCoinControl *coinControl = NULL);
bool CommitTransaction(CWalletTx& wtxNew, CReserveKey& reservekey);
//std::string SendMoney(CScript scriptPubKey, int64_t nValue, CWalletTx& wtxNew);
std::string SendMoney(CScript scriptPubKey, int64_t nValue, CWalletTx& wtxNew, bool fAskFee=false);
//std::string SendMoneyToDestination(const CTxDestination &address, int64_t nValue, CWalletTx& wtxNew);
std::string SendMoneyToDestination(const CTxDestination& address, int64_t nValue, CWalletTx& wtxNew, bool fAskFee=false);
bool NewStealthAddress(std::string& sError, std::string& sLabel, CStealthAddress& sxAddr);
bool AddStealthAddress(CStealthAddress& sxAddr);
bool UnlockStealthAddresses(const CKeyingMaterial& vMasterKeyIn);
bool UpdateStealthAddress(std::string &addr, std::string &label, bool addIfNotExist);
bool CreateStealthTransaction(CScript scriptPubKey, int64_t nValue, std::vector<uint8_t>& P, CWalletTx& wtxNew, CReserveKey& reservekey, int64_t& nFeeRet, const CCoinControl* coinControl=NULL);
std::string SendStealthMoney(CScript scriptPubKey, int64_t nValue, std::vector<uint8_t>& P, CWalletTx& wtxNew, bool fAskFee=false);
bool SendStealthMoneyToDestination(CStealthAddress& sxAddress, int64_t nValue, CWalletTx& wtxNew, std::string& sError, bool fAskFee=false);
bool FindStealthTransactions(const CTransaction& tx);
bool ScanForOpenNameTransactions(const uint256 &hash, const CTransaction& tx, const CBlock* pblock, bool fUpdate);
bool FindOpennameTransactions(const CTransaction& tx,int BlockHeight,unsigned int nTime);
bool NewKeyPool();
bool TopUpKeyPool(unsigned int kpSize = 0);
int64_t AddReserveKey(const CKeyPool& keypool);
void ReserveKeyFromKeyPool(int64_t& nIndex, CKeyPool& keypool);
void KeepKey(int64_t nIndex);
void ReturnKey(int64_t nIndex);
bool GetKeyFromPool(CPubKey &key);
int64_t GetOldestKeyPoolTime();
void GetAllReserveKeys(std::set<CKeyID>& setAddress) const;
std::set< std::set<CTxDestination> > GetAddressGroupings();
std::map<CTxDestination, int64_t> GetAddressBalances();
std::set<CTxDestination> GetAccountAddresses(std::string strAccount) const;
bool IsMine(const CTxIn& txin) const;
int64_t GetDebit(const CTxIn& txin) const;
bool IsMine(const CTxOut& txout) const
{
return ::IsMine(*this, txout.scriptPubKey);
}
int64_t GetCredit(const CTxOut& txout) const
{
if (!MoneyRange(txout.nValue))
throw std::runtime_error("CWallet::GetCredit() : value out of range");
return (IsMine(txout) ? txout.nValue : 0);
}
bool IsChange(const CTxOut& txout) const;
int64_t GetChange(const CTxOut& txout) const
{
if (!MoneyRange(txout.nValue))
throw std::runtime_error("CWallet::GetChange() : value out of range");
return (IsChange(txout) ? txout.nValue : 0);
}
bool IsMine(const CTransaction& tx) const
{
BOOST_FOREACH(const CTxOut& txout, tx.vout)
if (IsMine(txout))
return true;
return false;
}
bool IsFromMe(const CTransaction& tx) const
{
return (GetDebit(tx) > 0);
}
int64_t GetDebit(const CTransaction& tx) const
{
int64_t nDebit = 0;
BOOST_FOREACH(const CTxIn& txin, tx.vin)
{
nDebit += GetDebit(txin);
if (!MoneyRange(nDebit))
throw std::runtime_error("CWallet::GetDebit() : value out of range");
}
return nDebit;
}
int64_t GetCredit(const CTransaction& tx) const
{
int64_t nCredit = 0;
BOOST_FOREACH(const CTxOut& txout, tx.vout)
{
nCredit += GetCredit(txout);
if (!MoneyRange(nCredit))
throw std::runtime_error("CWallet::GetCredit() : value out of range");
}
return nCredit;
}
int64_t GetChange(const CTransaction& tx) const
{
int64_t nChange = 0;
BOOST_FOREACH(const CTxOut& txout, tx.vout)
{
nChange += GetChange(txout);
if (!MoneyRange(nChange))
throw std::runtime_error("CWallet::GetChange() : value out of range");
}
return nChange;
}
void SetBestChain(const CBlockLocator& loc);
DBErrors LoadWallet(bool& fFirstRunRet);
DBErrors ZapWalletTx();
bool SetAddressBook(const CTxDestination& address, const std::string& strName, const std::string& purpose);
bool DelAddressBook(const CTxDestination& address);
void UpdatedTransaction(const uint256 &hashTx);
void Inventory(const uint256 &hash)
{
{
LOCK(cs_wallet);
std::map<uint256, int>::iterator mi = mapRequestCount.find(hash);
if (mi != mapRequestCount.end())
(*mi).second++;
}
}
unsigned int GetKeyPoolSize()
{
AssertLockHeld(cs_wallet); // setKeyPool
return setKeyPool.size();
}
bool SetDefaultKey(const CPubKey &vchPubKey);
// signify that a particular wallet feature is now used. this may change nWalletVersion and nWalletMaxVersion if those are lower
bool SetMinVersion(enum WalletFeature, CWalletDB* pwalletdbIn = NULL, bool fExplicit = false);
// change which version we're allowed to upgrade to (note that this does not immediately imply upgrading to that format)
bool SetMaxVersion(int nVersion);
// get the current wallet format (the oldest client version guaranteed to understand this wallet)
int GetVersion() { LOCK(cs_wallet); return nWalletVersion; }
// Get wallet transactions that conflict with given transaction (spend same outputs)
std::set<uint256> GetConflicts(const uint256& txid) const;
/** Address book entry changed.
* @note called with lock cs_wallet held.
*/
boost::signals2::signal<void (CWallet *wallet, const CTxDestination
&address, const std::string &label, bool isMine,
const std::string &purpose,
ChangeType status)> NotifyAddressBookChanged;
/** Wallet transaction added, removed or updated.
* @note called with lock cs_wallet held.
*/
boost::signals2::signal<void (CWallet *wallet, const uint256 &hashTx,
ChangeType status)> NotifyTransactionChanged;
/** Show progress e.g. for rescan */
boost::signals2::signal<void (const std::string &title, int nProgress)> ShowProgress;
bool CreateRawTransaction(const std::vector<std::pair<CScript, int64> >& vecSend, CTransaction& txNew,
int64& nFeeRet, std::string& strFailReason, bool isMultiSig, CReserveKey& reservekey, const CCoinControl *coinControl=NULL);
/*
* for shared wallet
*/
bool IsMyShare(const CTxIn& txin) const;
bool IsMyShare(const CTxOut& txout) const;
bool IsMyShare(const CTransaction& tx) const;
int64 GetSharedBalance() const;
int64 GetSharedUnconfirmedBalance() const;
int64 GetSharedImmatureBalance() const;
int64 GetSharedCredit(const CTxOut& txout) const
{
if (!MoneyRange(txout.nValue))
throw std::runtime_error("CWallet::GetSharedCredit() : value out of range");
return (IsMyShare(txout) ? txout.nValue : 0);
}
bool IsFromMyShare(const CTransaction& tx) const
{
return (GetShareDebit(tx) > 0);
}
int64 GetShareDebit(const CTransaction& tx) const
{
int64 nDebit = 0;
BOOST_FOREACH(const CTxIn& txin, tx.vin)
{
nDebit += GetShareDebit(txin);
if (!MoneyRange(nDebit))
throw std::runtime_error("CWallet::GetDebit() : value out of range");
}
return nDebit;
}
int64 GetShareDebit(const CTxIn& txin) const;
void AvailableSharedCoins(std::vector<COutput>& vCoins, bool fOnlyConfirmed=true, const CCoinControl *coinControl=NULL) const;
bool SelectSharedCoins(int64 nTargetValue, std::set<std::pair<const CWalletTx*,unsigned int> >& setCoinsRet, int64_t& nValueRet, const CCoinControl* coinControl=NULL) const;
};
/** A key allocated from the key pool. */
class CReserveKey
{
protected:
CWallet* pwallet;
int64_t nIndex;
CPubKey vchPubKey;
public:
CReserveKey(CWallet* pwalletIn)
{
nIndex = -1;
pwallet = pwalletIn;
}
~CReserveKey()
{
ReturnKey();
}
void ReturnKey();
bool GetReservedKey(CPubKey &pubkey);
void KeepKey();
};
//typedef std::map<std::string, std::string> mapValue_t;
static void ReadOrderPos(int64_t& nOrderPos, mapValue_t& mapValue)
{
if (!mapValue.count("n"))
{
nOrderPos = -1; // TODO: calculate elsewhere
return;
}
nOrderPos = atoi64(mapValue["n"].c_str());
}
static void WriteOrderPos(const int64_t& nOrderPos, mapValue_t& mapValue)
{
if (nOrderPos == -1)
return;
mapValue["n"] = i64tostr(nOrderPos);
}
/** A transaction with a bunch of additional info that only the owner cares about.
* It includes any unrecorded transactions needed to link it back to the block chain.
*/
class CWalletTx : public CMerkleTx
{
private:
const CWallet* pwallet;
public:
std::vector<CMerkleTx> vtxPrev;
mapValue_t mapValue;
std::vector<std::pair<std::string, std::string> > vOrderForm;
unsigned int fTimeReceivedIsTxTime;
unsigned int nTimeReceived; // time received by this node
unsigned int nTimeSmart;
char fFromMe;
std::string strFromAccount;
std::vector<char> vfSpent; // which outputs are already spent
int64_t nOrderPos; // position in ordered transaction list
// memory only
mutable bool fDebitCached;
mutable bool fCreditCached;
mutable bool fImmatureCreditCached;
mutable bool fAvailableCreditCached;
mutable bool fChangeCached;
mutable int64_t nDebitCached;
mutable int64_t nCreditCached;
mutable int64_t nImmatureCreditCached;
mutable int64_t nAvailableCreditCached;
mutable int64_t nChangeCached;
CWalletTx()
{
Init(NULL);
}
CWalletTx(const CWallet* pwalletIn)
{
Init(pwalletIn);
}
CWalletTx(const CWallet* pwalletIn, const CMerkleTx& txIn) : CMerkleTx(txIn)
{
Init(pwalletIn);
}
CWalletTx(const CWallet* pwalletIn, const CTransaction& txIn) : CMerkleTx(txIn)
{
Init(pwalletIn);
}
void Init(const CWallet* pwalletIn)
{
pwallet = pwalletIn;
mapValue.clear();
vOrderForm.clear();
fTimeReceivedIsTxTime = false;
nTimeReceived = 0;
nTimeSmart = 0;
fFromMe = false;
strFromAccount.clear();
fDebitCached = false;
fCreditCached = false;
fImmatureCreditCached = false;
fAvailableCreditCached = false;
fChangeCached = false;
nDebitCached = 0;
nCreditCached = 0;
nImmatureCreditCached = 0;
nAvailableCreditCached = 0;
nChangeCached = 0;
nOrderPos = -1;
fSharedAvailableCreditCached = false;
}
IMPLEMENT_SERIALIZE
(
CWalletTx* pthis = const_cast<CWalletTx*>(this);
if (fRead)
pthis->Init(NULL);
char fSpent = false;
if (!fRead)
{
pthis->mapValue["fromaccount"] = pthis->strFromAccount;
WriteOrderPos(pthis->nOrderPos, pthis->mapValue);
if (nTimeSmart)
pthis->mapValue["timesmart"] = strprintf("%u", nTimeSmart);
}
nSerSize += SerReadWrite(s, *(CMerkleTx*)this, nType, nVersion,ser_action);
std::vector<CMerkleTx> vUnused; // Used to be vtxPrev
READWRITE(vUnused);
READWRITE(mapValue);
READWRITE(vOrderForm);
READWRITE(fTimeReceivedIsTxTime);
READWRITE(nTimeReceived);
READWRITE(fFromMe);
READWRITE(fSpent);
if (fRead)
{
pthis->strFromAccount = pthis->mapValue["fromaccount"];
ReadOrderPos(pthis->nOrderPos, pthis->mapValue);
pthis->nTimeSmart = mapValue.count("timesmart") ? (unsigned int)atoi64(pthis->mapValue["timesmart"]) : 0;
}
pthis->mapValue.erase("fromaccount");
pthis->mapValue.erase("version");
pthis->mapValue.erase("spent");
pthis->mapValue.erase("n");
pthis->mapValue.erase("timesmart");
)
// make sure balances are recalculated
void MarkDirty()
{
fCreditCached = false;
fAvailableCreditCached = false;
fDebitCached = false;
fChangeCached = false;
}
void BindWallet(CWallet *pwalletIn)
{
pwallet = pwalletIn;
MarkDirty();
}
bool IsSpent(unsigned int nOut) const
{
if (nOut >= vout.size())
throw std::runtime_error("CWalletTx::IsSpent() : nOut out of range");
if (nOut >= vfSpent.size())
return false;
return (!!vfSpent[nOut]);
}
int64_t GetDebit() const
{
if (vin.empty())
return 0;
if (fDebitCached)
return nDebitCached;
nDebitCached = pwallet->GetDebit(*this);
fDebitCached = true;
return nDebitCached;
}
int64_t GetCredit(bool fUseCache=true) const
{
// Must wait until coinbase is safely deep enough in the chain before valuing it
if (IsCoinBase() && GetBlocksToMaturity() > 0)
return 0;
// GetBalance can assume transactions in mapWallet won't change
if (fUseCache && fCreditCached)
return nCreditCached;
nCreditCached = pwallet->GetCredit(*this);
fCreditCached = true;
return nCreditCached;
}
int64_t GetImmatureCredit(bool fUseCache=true) const
{
if (IsCoinBase() && GetBlocksToMaturity() > 0 && IsInMainChain())
{
if (fUseCache && fImmatureCreditCached)
return nImmatureCreditCached;
nImmatureCreditCached = pwallet->GetCredit(*this);
fImmatureCreditCached = true;
return nImmatureCreditCached;
}
return 0;
}
int64_t GetAvailableCredit(bool fUseCache=true) const
{
if (pwallet == 0)
return 0;
// Must wait until coinbase is safely deep enough in the chain before valuing it
if (IsCoinBase() && GetBlocksToMaturity() > 0)
return 0;
if (fUseCache && fAvailableCreditCached)
return nAvailableCreditCached;
int64_t nCredit = 0;
uint256 hashTx = GetHash();
for (unsigned int i = 0; i < vout.size(); i++)
{
if (!pwallet->IsSpent(hashTx, i))
{
const CTxOut &txout = vout[i];
nCredit += pwallet->GetCredit(txout);
if (!MoneyRange(nCredit))
throw std::runtime_error("CWalletTx::GetAvailableCredit() : value out of range");
}
}
nAvailableCreditCached = nCredit;
fAvailableCreditCached = true;
return nCredit;
}
int64_t GetChange() const
{
if (fChangeCached)
return nChangeCached;
nChangeCached = pwallet->GetChange(*this);
fChangeCached = true;
return nChangeCached;
}
void GetAmounts(std::list<std::pair<CTxDestination, int64_t> >& listReceived,
std::list<std::pair<CTxDestination, int64_t> >& listSent, int64_t& nFee, std::string& strSentAccount) const;
void GetAccountAmounts(const std::string& strAccount, int64_t& nReceived,
int64_t& nSent, int64_t& nFee) const;
bool IsFromMe() const
{
return (GetDebit() > 0);
}
bool IsConfirmed() const
{
// Quick answer in most cases
if (!IsFinal())
return false;
if (GetDepthInMainChain() >= 1)
return true;
if (!IsFromMe()) // using wtx's cached debit
return false;
// If no confirmations but it's from us, we can still
// consider it confirmed if all dependencies are confirmed
std::map<uint256, const CMerkleTx*> mapPrev;
std::vector<const CMerkleTx*> vWorkQueue;
vWorkQueue.reserve(vtxPrev.size()+1);
vWorkQueue.push_back(this);
for (unsigned int i = 0; i < vWorkQueue.size(); i++)
{
const CMerkleTx* ptx = vWorkQueue[i];
if (!ptx->IsFinal())
return false;
if (ptx->GetDepthInMainChain() >= 1)
continue;
if (!pwallet->IsFromMe(*ptx))
return false;
if (mapPrev.empty())
{
BOOST_FOREACH(const CMerkleTx& tx, vtxPrev)
mapPrev[tx.GetHash()] = &tx;
}
BOOST_FOREACH(const CTxIn& txin, ptx->vin)
{
if (!mapPrev.count(txin.prevout.hash))
return false;
vWorkQueue.push_back(mapPrev[txin.prevout.hash]);
}
}
return true;
}
bool IsTrusted() const
{
// Quick answer in most cases
if (!IsFinalTx(*this))
return false;
int nDepth = GetDepthInMainChain();
if (nDepth >= 1)
return true;
if (nDepth < 0)
return false;
if (!bSpendZeroConfChange || !IsFromMe()) // using wtx's cached debit
return false;
// Trusted if all inputs are from us and are in the mempool:
BOOST_FOREACH(const CTxIn& txin, vin)
{
// Transactions not sent by us: not trusted
const CWalletTx* parent = pwallet->GetWalletTx(txin.prevout.hash);
if (parent == NULL)
return false;
const CTxOut& parentOut = parent->vout[txin.prevout.n];
if (!pwallet->IsMine(parentOut))
return false;
}
return true;
}
bool WriteToDisk();
int64_t GetTxTime() const;
int GetRequestCount() const;
void RelayWalletTransaction();
std::set<uint256> GetConflicts() const;
/*
* for shared wallet
*/
mutable bool fSharedAvailableCreditCached;
mutable int64 nSharedAvailableCreditCached;
int64 GetSharedAvailableCredit(bool fUseCache=true) const
{
// Must wait until coinbase is safely deep enough in the chain before valuing it
if (IsCoinBase() && GetBlocksToMaturity() > 0)
return 0;
//if (fUseCache && fSharedAvailableCreditCached)
//return nSharedAvailableCreditCached;
int64 nCredit = 0;
for (unsigned int i = 0; i < vout.size(); i++)
{
if (!IsSpent(i))
{
const CTxOut &txout = vout[i];
nCredit += pwallet->GetSharedCredit(txout);
if (!MoneyRange(nCredit))
throw std::runtime_error("CWalletTx::GetSharedAvailableCredit() : value out of range");
}
}
nSharedAvailableCreditCached = nCredit;
fSharedAvailableCreditCached = true;
return nCredit;
}
};
class COutput
{
public:
const CWalletTx *tx;
int i;
int nDepth;
COutput(const CWalletTx *txIn, int iIn, int nDepthIn)
{
tx = txIn; i = iIn; nDepth = nDepthIn;
}
std::string ToString() const
{
return strprintf("COutput(%s, %d, %d) [%s]", tx->GetHash().ToString().c_str(), i, nDepth, FormatMoney(tx->vout[i].nValue).c_str());
}
void print() const
{
LogPrintf("%s\n", ToString().c_str());
}
};
/** Private key that includes an expiration date in case it never gets used. */
class CWalletKey
{
public:
CPrivKey vchPrivKey;
int64_t nTimeCreated;
int64_t nTimeExpires;
std::string strComment;
//// todo: add something to note what created it (user, getnewaddress, change)
//// maybe should have a map<string, string> property map
CWalletKey(int64_t nExpires=0)
{
nTimeCreated = (nExpires ? GetTime() : 0);
nTimeExpires = nExpires;
}
IMPLEMENT_SERIALIZE
(
if (!(nType & SER_GETHASH))
READWRITE(nVersion);
READWRITE(vchPrivKey);
READWRITE(nTimeCreated);
READWRITE(nTimeExpires);
READWRITE(LIMITED_STRING(strComment, 65536));
)
};
/** Account information.
* Stored in wallet with key "acc"+string account name.
*/
class CAccount
{
public:
CPubKey vchPubKey;
CAccount()
{
SetNull();
}
void SetNull()
{
vchPubKey = CPubKey();
}
IMPLEMENT_SERIALIZE
(
if (!(nType & SER_GETHASH))
READWRITE(nVersion);
READWRITE(vchPubKey);
)
};
/** Internal transfers.
* Database key is acentry<account><counter>.
*/
class CAccountingEntry
{
public:
std::string strAccount;
int64_t nCreditDebit;
int64_t nTime;
std::string strOtherAccount;
std::string strComment;
mapValue_t mapValue;
int64_t nOrderPos; // position in ordered transaction list
uint64_t nEntryNo;
CAccountingEntry()
{
SetNull();
}
void SetNull()
{
nCreditDebit = 0;
nTime = 0;
strAccount.clear();
strOtherAccount.clear();
strComment.clear();
nOrderPos = -1;
}
IMPLEMENT_SERIALIZE
(
CAccountingEntry& me = *const_cast<CAccountingEntry*>(this);
if (!(nType & SER_GETHASH))
READWRITE(nVersion);
// Note: strAccount is serialized as part of the key, not here.
READWRITE(nCreditDebit);
READWRITE(nTime);
READWRITE(LIMITED_STRING(strOtherAccount, 65536));
if (!fRead)
{
WriteOrderPos(nOrderPos, me.mapValue);
if (!(mapValue.empty() && _ssExtra.empty()))
{
CDataStream ss(nType, nVersion);
ss.insert(ss.begin(), '\0');
ss << mapValue;
ss.insert(ss.end(), _ssExtra.begin(), _ssExtra.end());
me.strComment.append(ss.str());
}
}
READWRITE(LIMITED_STRING(strComment, 65536));
size_t nSepPos = strComment.find("\0", 0, 1);
if (fRead)
{
me.mapValue.clear();
if (std::string::npos != nSepPos)
{
CDataStream ss(std::vector<char>(strComment.begin() + nSepPos + 1, strComment.end()), nType, nVersion);
ss >> me.mapValue;
me._ssExtra = std::vector<char>(ss.begin(), ss.end());
}
ReadOrderPos(me.nOrderPos, me.mapValue);
}
if (std::string::npos != nSepPos)
me.strComment.erase(nSepPos);
me.mapValue.erase("n");
)
private:
std::vector<char> _ssExtra;
};
#endif
| {
"content_hash": "46ccc706a53b4244352665ca9bebd1ab",
"timestamp": "",
"source": "github",
"line_count": 1031,
"max_line_length": 203,
"avg_line_length": 32.09602327837052,
"alnum_prop": 0.6381493457435556,
"repo_name": "mirrax/Feathercoin2",
"id": "db9ae3cf76250fb61699a483c505b08bd1fa76e4",
"size": "33091",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/wallet.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "15630"
},
{
"name": "C",
"bytes": "103733"
},
{
"name": "C++",
"bytes": "3492615"
},
{
"name": "CSS",
"bytes": "1127"
},
{
"name": "Groff",
"bytes": "18284"
},
{
"name": "HTML",
"bytes": "50620"
},
{
"name": "Makefile",
"bytes": "8837"
},
{
"name": "Objective-C",
"bytes": "1052"
},
{
"name": "Objective-C++",
"bytes": "6262"
},
{
"name": "Protocol Buffer",
"bytes": "2308"
},
{
"name": "Python",
"bytes": "103780"
},
{
"name": "QMake",
"bytes": "2191"
},
{
"name": "Shell",
"bytes": "46281"
}
],
"symlink_target": ""
} |
<!DOCTYPE html>
<html lang="en" xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta charset="utf-8" />
<title>Goals for Version 2.0</title>
<style type="text/css">
BODY {
Font-family: Verdana;
}
P {
Font-size: 12px;
}
</style>
</head>
<body>
<p>
This document defines the goals for version 2.0</p>
<p>
</p>
<h1>Maintainability<br /></h1>
<ul>
<li>Implement Syntax Analysis</li>
<li>Implement Code Analysis</li>
</ul>
<h1>Modularity</h1>
<ul>
<li>Expose sub-components (like the HTML parser) to public</li>
</ul>
<h1>Performance</h1>
<ul>
<li>Reduce memory footprint</li>
</ul>
<h1>Conformance</h1>
<ul>
<li>Pass ACID1 Test</li>
</ul>
<p>
</p>
<h1>Implementation</h1>
<h2>HTML5 Parsing</h2>
<p>
Fully compatible parsing of HTML5 and HTML4 documents. Parsing of FRAMESET excluded as well as any SCRIPT related functionality.</p>
<h2>HTML DOM</h2>
<h3>Partial DOM Level 2 Core</h3>
<p>
Implement partial DOM Level 2 Core specification. Implement interfaces and functionality that is related to read-only accessing the parsed document.
</p>
<p>
</p>
<p>
</p>
</body>
</html> | {
"content_hash": "ca3d2e16a7d9f4791267e58d370e30af",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 156,
"avg_line_length": 25.166666666666668,
"alnum_prop": 0.5584988962472406,
"repo_name": "todor-dk/HTML-Renderer",
"id": "d6734496ae0e5cfb678b459368b789eb2c2612f3",
"size": "1361",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Source/Read Me.html",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ASP",
"bytes": "690"
},
{
"name": "Batchfile",
"bytes": "7229"
},
{
"name": "C#",
"bytes": "2654781"
},
{
"name": "HTML",
"bytes": "2938670481"
},
{
"name": "JavaScript",
"bytes": "51318"
},
{
"name": "PowerShell",
"bytes": "2715"
}
],
"symlink_target": ""
} |
\documentclass[10pt,xcolor=pdflatex]{beamer}
\usepackage{newcent}
\usepackage[utf8]{inputenc}
\usepackage[czech]{babel}
\usepackage{hyperref} % \href for image (\includegraphics)
\usepackage{fancyvrb}
\usetheme{FIT}
\usepackage{soul} % for crossing text (\st{})
\definecolor{sourcesclr}{rgb}{.38,.38,.38}
\newcommand{\srctext}[1]{{\fontsize{7}{9}\selectfont\textcolor{sourcesclr}{#1}}}
\newenvironment<>{positiveblock}[1]{%
\begin{actionenv}#2%
\def\insertblocktitle{#1}%
\par%
\mode<presentation>{%
\setbeamercolor{block title}{fg=white,bg=green!20!black}
\setbeamercolor{block body}{fg=black,bg=green!40}
\setbeamercolor{itemize item}{fg=green!20!black}
\setbeamertemplate{itemize item}[triangle]
}%
\usebeamertemplate{block begin}}
{\par\usebeamertemplate{block end}\end{actionenv}}
\newenvironment<>{negativeblock}[1]{%
\begin{actionenv}#2%
\def\insertblocktitle{#1}%
\par%
\mode<presentation>{%
\setbeamercolor{block title}{fg=white,bg=red!20!black}
\setbeamercolor{block body}{fg=black,bg=red!20}
\setbeamercolor{itemize item}{fg=red!20!black}
\setbeamertemplate{itemize item}[triangle]
}%
\usebeamertemplate{block begin}}
{\par\usebeamertemplate{block end}\end{actionenv}}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\title{Lego Mindstorm EV3 ve výuce programování a robotiky}
\author[]{Jaroslav Páral}
\institute[]{Vysokého učení technického v Brně, Fakulta informačních technologií\\
Bo\v{z}et\v{e}chova 1/2, 612 66 Brno - Kr\'alovo Pole\\
[email protected]}
\date{13. června 2017}
%\date{\today}
%\date{} % bez data
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\begin{document}
\frame[plain]{\titlepage}
\begin{frame}\frametitle{Výukový roboti}
\vspace{15mm}
\begin{figure}[h]
\begin{minipage}[b]{.45\textwidth}
\centering
\includegraphics[width=\textwidth]{../text/images/pololu-3pi-robot-8-on-line.jpg}
%\caption[Robot Pololu 3pi]{Robot Pololu 3pi\protect\footnotemark}
\label{fig:pololu-3pi-robot-8-on-line}
\end{minipage}
\hfill
\begin{minipage}[b]{.45\textwidth}
\centering
\includegraphics[width=\textwidth]{../text/images/Edison-Educational-robot.jpg}
%\caption[Robot Edison]{Robot Edison\protect\footnotemark}
\label{fig:Edison-Educational-robot}
\end{minipage}
\end{figure}
\vspace{15mm}
\srctext{Zdroj levý obrázek: \url{https://www.pololu.com/product/975} \\}
\srctext{Zdroj pravý obrázek: \url{https://meetedison.com/meet-edison-v2-0/}}
\end{frame}
\begin{frame}\frametitle{LEGO MINDSTORMS EV3}
\begin{figure}[h]
\centering
\includegraphics[width=190px]{img/lego-mindstorms-ev3_Robotics-for-Kids.png}
%\caption[\legoEV{ }-- samobalancující robot]{\legoEV{ }-- balancující robot\protect\footnotemark}
\label{fig:lego-mindstorms-ev3_Robotics-for-Kids}
\end{figure}
\srctext{Zdroj obrázek: \url{https://www.bermotech.com/training/coding-for-teenagers-and-children/y-robotics-with-lego-mindstorm-ev3/}}
\end{frame}
\begin{frame}\frametitle{Originální vývojové prostředí pro LEGO}
\begin{columns}
\column{.48\textwidth}
\begin{positiveblock}{Výhody}
\begin{itemize}
\item jednoduché rozhraní
\item intuitivní používání
\item vhodné "od 7 let"
\end{itemize}
\end{positiveblock}
\column{.48\textwidth}
\end{columns}
\vspace{10mm}
\begin{figure}[h]
\centering
\includegraphics[width=\textwidth]{../text/images/lego-soft_live-debugging_line-advance.png}
\end{figure}
\end{frame}
\begin{frame}\frametitle{Originální vývojové prostředí pro LEGO}
\begin{columns}
\column{.48\textwidth}
\begin{positiveblock}{Výhody}
\begin{itemize}
\item jednoduché rozhraní
\item intuitivní používání
\item vhodné "od 7 let"
\end{itemize}
\end{positiveblock}
\column{.48\textwidth}
\begin{negativeblock}{Problémy}
\begin{itemize}
\item rozsáhlé programy
\item pokročilé úpravy
\item orientace
\end{itemize}
\end{negativeblock}
\end{columns}
\vspace{7mm}
\begin{figure}[h]
\centering
\includegraphics[width=\textwidth]{../text/images/lego-soft_legolib_converge_array.png}
%\caption[Ukázka nepřehlednosti rozsáhlejších programů]{Ukázka nepřehlednosti rozsáhlejších programů - žluté dráhy značí předávání vstupních a~výstupních parametrů mezi jednotlivými bloky - velmi špatně se zjišťuje a~kontroluje správnost zapojení žlutých drah.}
\label{fig:lego-soft_legolib_converge_array}
\end{figure}
\end{frame}
\begin{frame}\frametitle{Systém EV3RT}
\large
\begin{columns}
\column{.48\textwidth}
Vlastnosti:
\begin{itemize}
\item RTOS
\item open-source
\item multiplatformní
\item až o 3 řády výkonnější
\end{itemize}
\pause
\column{.48\textwidth}
Připravil jsem:
\begin{itemize}
\item C++ API
\pause
\item vývojové prostředí
\pause
\item dokumentaci
\pause
\item ukázkové příklady % tutoriály
\end{itemize}
\end{columns}
\end{frame}
\begin{frame}\frametitle{C++ API}
\centering
\large
Cíl: Usnadnit přechod ze standardního vývojového prostředí.
\vspace{10mm}
\includegraphics[width=140px]{img/doc-motor_on.png}
\end{frame}
\begin{frame}\frametitle{Vývojové prostředí}
\centering
\large
Visual Studio Code
\begin{figure}[h]
\includegraphics[width=\textwidth]{../text/images/visual-studio-code_intellisense-param.png}
\end{figure}
\end{frame}
\begin{frame}\frametitle{Dokumentace}
\vspace{-5mm}
\begin{figure}[h]
\centering
\href{http://rb3rt.readthedocs.io/cs/latest/ev3cxx\_motor-class.html\#onforseconds}{
\includegraphics[width=320px]{img/web-documentation.png}
}
\end{figure}
\end{frame}
\begin{frame}\frametitle{Závěr}
\begin{columns}
\column{.48\textwidth}
\begin{itemize}
\item výkonnostní testy
\item C++ API
\item vývojové prostředí
\item dokumentaci
\item ukázkové příklady % tutoriály
\item prakticky využíváno
\end{itemize}
\pause
\column{.48\textwidth}
\includegraphics[width=120px]{img/lego-mindstorms-ev3-education-kit-with-software.JPG}
\end{columns}
\centering
\vspace{10mm}
\large
Děkuji Vám za pozornost.
\vspace{10mm}
\small
Dokumentace k projektu: \url{http://rb3rt.readthedocs.io}
\vspace{10mm}
\raggedright
\srctext{Zdroj obrázek: \url{https://www.generationrobots.com/en/402314-lego-mindstorms-ev3-education-kit-with-software.html}}
\end{frame}
\begin{frame}\frametitle{Otázka od oponenta}
\centering
\large
Otázka:\\
V práci zmiňujete práci se studenty. Konzultoval jste postup či podobu také s pedagogy?
\vspace{10mm}
Odpověď:\\
Konzultoval jsem postup s lektory z Robotárny (Dům dětí a mládeže Brno, Helceletova), kteří již několik let vedou kroužky s LEGO MINDSTORMS.
\end{frame}
\begin{frame}\frametitle{Ukázkový příklad}
\centering
{
\includegraphics[width=\textwidth]{img/ev3cxx_robotutorial_05-switch.png}
}
\end{frame}
\begin{frame}\frametitle{Ukázkový příklad}
\centering
\href{http://rb3rt.readthedocs.io/cs/latest/ev3cxx_robotutorial/05-switch.html}{
\includegraphics[width=\textwidth]{img/ev3cxx_robotutorial_05-switch_source-code.png}
}
\end{frame}
% \bluepage{Thank You For Your Attention !}
\end{document}
\begin{frame}\frametitle{Závěr}
Připravil jsem:
\begin{itemize}
\item C++ API
\item vývojové prostředí
\item dokumentaci
\item ukázkové příklady % tutoriály
\end{itemize}
\pause
\centering
\vspace{10mm}
\large
Děkuji Vám za pozornost.
\vspace{10mm}
\small
Dokumentace k projektu: \url{http://rb3rt.readthedocs.io}
\end{frame}
\begin{frame}\frametitle{Systém EV3RT}
Vlastnosti:
\begin{itemize}
\item RTOS
\item open-source
\item multiplatformní
\item o 2 až 3 řády výkonnější
\end{itemize}
\end{frame}
\begin{frame}\frametitle{Alternativní prostředí}
\begin{columns}
\column{.48\textwidth}
\begin{positiveblock}{Prostředí}
\begin{itemize}
\item ROS
\item Matlab/LabVIEW
\item ROBOTC
\item {\it ev3dev}
\item EV3RT
\end{itemize}
\end{positiveblock}
\column{.48\textwidth}
\begin{negativeblock}{Požadavky}
\begin{itemize}
\item
\item
\item
\item
\item
\end{itemize}
\end{negativeblock}
\end{columns}
\end{frame}
\begin{frame}\frametitle{Alternativní prostředí}
\begin{columns}
\column{.48\textwidth}
\begin{positiveblock}{Prostředí}
\begin{itemize}
\item \st{ROS}
\item Matlab/LabVIEW
\item ROBOTC
\item {\it ev3dev}
\item EV3RT
\end{itemize}
\end{positiveblock}
\column{.48\textwidth}
\begin{negativeblock}{Požadavky}
\begin{itemize}
\item nezávislé na PC
\item
\item
\item
\item
\end{itemize}
\end{negativeblock}
\end{columns}
\end{frame}
\begin{frame}\frametitle{Alternativní prostředí}
\begin{columns}
\column{.48\textwidth}
\begin{positiveblock}{Prostředí}
\begin{itemize}
\item \st{ROS}
\item \st{Matlab/LabVIEW}
\item \st{ROBOTC}
\item {\it ev3dev}
\item EV3RT
\end{itemize}
\end{positiveblock}
\column{.48\textwidth}
\begin{negativeblock}{Požadavky}
\begin{itemize}
\item nezávislé na PC
\item licenční poplatky
\item multiplatformní
\item
\item
\end{itemize}
\end{negativeblock}
\end{columns}
\end{frame}
\begin{frame}\frametitle{Alternativní prostředí}
\begin{columns}
\column{.48\textwidth}
\begin{positiveblock}{Prostředí}
\begin{itemize}
\item \st{ROS}
\item \st{Matlab/LabVIEW}
\item \st{ROBOTC}
\item \st{{ev3dev}}
\item EV3RT
\end{itemize}
\end{positiveblock}
\column{.48\textwidth}
\begin{negativeblock}{Požadavky}
\begin{itemize}
\item nezávislé na PC
\item licenční poplatky
\item multiplatformní
\item výkon
\item real-time běh
\end{itemize}
\end{negativeblock}
\end{columns}
\end{frame}
| {
"content_hash": "4ef177547eb1764f3e62284c030c8ffa",
"timestamp": "",
"source": "github",
"line_count": 420,
"max_line_length": 269,
"avg_line_length": 29.03095238095238,
"alnum_prop": 0.5663085376855573,
"repo_name": "JarekParal/bcthesis",
"id": "1daf27bd6dc66f55efd557a3b346b8440cf999b7",
"size": "12425",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "presentation/presentation.tex",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "1654"
},
{
"name": "TeX",
"bytes": "216483"
}
],
"symlink_target": ""
} |
ActiveRecord::Schema.define(version: 20161216081042) do
create_table "pictures", force: :cascade do |t|
t.string "image"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "posts", force: :cascade do |t|
t.string "title"
t.text "summary"
t.text "body"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.string "image"
end
create_table "taggings", force: :cascade do |t|
t.integer "post_id"
t.integer "tag_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["post_id"], name: "index_taggings_on_post_id"
t.index ["tag_id"], name: "index_taggings_on_tag_id"
end
create_table "tags", force: :cascade do |t|
t.string "name"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
end
| {
"content_hash": "a2445c6d94693dc87edf17319a350a6f",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 58,
"avg_line_length": 27.666666666666668,
"alnum_prop": 0.628696604600219,
"repo_name": "TombaX/BlogRails5",
"id": "e498deb0b3d7d8b1dc7dd27b51c1d01a2213e60a",
"size": "1636",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "db/schema.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1370"
},
{
"name": "CoffeeScript",
"bytes": "1295"
},
{
"name": "HTML",
"bytes": "9050"
},
{
"name": "JavaScript",
"bytes": "1250"
},
{
"name": "Ruby",
"bytes": "34255"
}
],
"symlink_target": ""
} |
layout: post
title: Radionica? Veblog?
date: '2003-02-10 09:02:14 +0100'
mt_id: 3
post_id: 3
author: jablan
---
**Radionica** je samo lako pamtljivo ime za sajt sa prilično širokom tematikom. Većina zapisa je na ovaj ili onaj način vezana za softver, razvoj programa i veb sajtova i proizašla je iz svakodnevne prakse rada u softverskoj industriji, kao i korišćenja tuđeg sotvera. Na ovom mestu mlađi programeri će možda naći neki koristan savet koji će im olakšati put do kvalitetnog kooda.
Za one koji se pitaju šta je veblog, kratko objašnjenje. **Veblog** (ili skraćeno blog) je u poslednje vreme na netu vrlo popularna forma internet izražavanja. Ustvari, predstavlja svojevrstan niz zabeleški jednog (ili više) autora poređanih hronološki, sve to na veb sajtu (veblog od engl. _web_ što znači veb ;), i engl. _log_ što znači otprilike "dnevnik", ili "zapisi").
Fotografije u zaglavlju su delo autora radionice. Obrađene su izvanrednim besplatnim programom za obradu slike [GIMP](http://www.gimp.org/).
Svim posetiocima želim prijatno i zanimljivo surfovanje. Uživajte!
| {
"content_hash": "c9b81954cd243a89fcb6b009c8b3b71c",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 380,
"avg_line_length": 72,
"alnum_prop": 0.7796296296296297,
"repo_name": "jablan/stari-blog",
"id": "cde32b480ab5427ecd5418bc04047105d3fa9777",
"size": "1113",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "_posts/2003-02-10-radionica-veblog.markdown",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "11626"
},
{
"name": "HTML",
"bytes": "4946"
}
],
"symlink_target": ""
} |
package template.server
object Index {
def apply(cacheBreaker: String, environment: Environment) = {
val mainScript = environment match {
case Environment.Dev => "client-fastopt.js"
case Environment.Prod => "client-opt.js"
}
s"""<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<title>Akka Http and ScalaJS Template</title>
<style>
html {
height: 100%;
}
body {
height: 100%;
margin: 0;
}
#app {
height: 100%;
}
</style>
</head>
<body>
<div id="app"></div>
<script type="text/javascript" src="/${cacheBreaker}/client-jsdeps.js"></script>
<script type="text/javascript" src="/${cacheBreaker}/${mainScript}"></script>
<script type="text/javascript">
template.client.Main().main();
</script>
</body>
</html>
"""
}
}
| {
"content_hash": "e8e182ac655b81db04ac567de90a5dca",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 84,
"avg_line_length": 21.897435897435898,
"alnum_prop": 0.5749414519906323,
"repo_name": "markschaake/akka-http-scalajs",
"id": "a7b197b052a708e00e7adfcf392ab3233b183241",
"size": "854",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "server/src/main/scala/server/Index.scala",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Scala",
"bytes": "26144"
}
],
"symlink_target": ""
} |
import argparse
import io
import json
import logging
import pandas as pd
import sys
from pprint import pprint
from . import engage, events, export, funnel, retention
def events_get(args):
print(events.events(args.event, args.type, args.unit, args.interval,
"csv" if args.csv else "json"))
def events_top(args):
print(events.top(args.type, args.limit))
def events_names(args):
print(events.names(args.type, args.limit))
def export_data(args):
print(export.export(args.from_date, args.to_date, args.event, args.where,
args.bucket))
def funnel_list(args):
print("\n".join(["%s\t%s" % (f["funnel_id"], f["name"]) for f in funnel.list()]))
def funnel_show(args):
funnel_data = funnel.funnel(args.funnel_id)
if args.csv:
dates = funnel_data["meta"]["dates"]
columns = [step["event"] for step in
funnel_data["data"][dates[0]]["steps"]]
df = pd.DataFrame(columns=columns, index=dates)
for date in funnel_data["meta"]["dates"]:
for step in funnel_data["data"][date]["steps"]:
df.loc[date, step["event"]] = step["count"]
buf = io.StringIO()
df.to_csv(buf, args.fs, index_label="week")
buf.seek(0)
print(buf.read())
else:
print(json.dumps(funnel_data))
def engage_list(args):
pprint(engage.list(where=args.where, session_id=args.session_id,
page=args.page))
def retention_data(args):
data = retention.retention(args.from_date, args.to_date,
retention_type=args.type,
born_event=args.born_event, event=args.event,
born_where=args.born_where, where=args.where,
interval=args.interval,
interval_count=args.interval_count,
unit=args.unit, on=args.on, limit=args.limit)
if args.csv:
dates = sorted(data.keys())
columns = range(args.interval_count + 1)
df = pd.DataFrame(columns=columns, index=dates)
for date in dates:
row = data[date]["counts"]
for i in range(len(row)):
df.loc[date, i] = row[i]
buf = io.StringIO()
if args.unit is None:
if args.interval is None:
label = "day"
else:
label = "%d days" % args.interval
else:
label = args.unit
df.to_csv(buf, args.fs, index_label=label)
buf.seek(0)
print(buf.read())
else:
print(json.dumps(data))
if __name__ == "__main__":
# Argument Parser
parser = argparse.ArgumentParser(description="Retrieve Mixpanel data")
parser.add_argument("-v", "--verbose", dest="verbose",
help="verbose logging output", action="store_true")
subparsers = parser.add_subparsers(title="subcommands")
#
# Events
#
p_events = subparsers.add_parser("events", help="Query events")
sp_events = p_events.add_subparsers(title="events subcommands")
p_event_get = sp_events.add_parser("get", help="Get unique, total, or "
"average data for a set of events over "
"the last N days, weeks, or months.")
p_event_get.add_argument("--event", "-e", action="append", required=True,
help="The event that you wish to get data for. "
"This argument may be included multiple times")
p_event_get.add_argument("--type", "-t", required=True,
choices=("general", "unique", "average"),
help="The analysis type you would like to get "
"data for - such as general, unique, or average "
"events.")
p_event_get.add_argument("--unit", "-u", required=True,
choices=("minute", "hour", "day", "week", "month"),
help="Determines the level of granularity of the "
"data you get back. Note that you cannot get "
"hourly uniques.")
p_event_get.add_argument("--interval", "-i", required=True, type=int,
help="The number of \"units\" to return data for.")
p_event_get.add_argument("--csv", action="store_true",
help="Print output in CSV format")
p_event_get.set_defaults(func=events_get)
p_event_top = sp_events.add_parser("top", help="Get the top events for "
"today, with their counts and the "
"normalized percent change from "
"yesterday.")
p_event_top.add_argument("type", choices=("general", "unique", "average"),
help="The analysis type you would like to get "
"data for - such as general, unique, or average "
"events.")
p_event_top.add_argument("--limit", "-l", type=int, help="The maximum "
"number of events to return. Defaults to 100. The "
"maximum this value can be is 100.")
p_event_top.set_defaults(func=events_top)
p_event_names = sp_events.add_parser("names", help="Get a list of the most "
"common events over the last 31 days.")
p_event_names.add_argument("type", choices=("general", "unique", "average"),
help="The analysis type you would like to get "
"data for - such as general, unique, or average "
"events.")
p_event_names.add_argument("--limit", "-l", type=int, help="The maximum "
"number of events to return. Defaults to 255.")
p_event_names.set_defaults(func=events_names)
#
# Engage
#
p_engage = subparsers.add_parser("engage", help="Gets user data")
p_engage.add_argument("-w", "--where", type=str, help="An expression to "
"filter people by. See the expression section.")
p_engage.add_argument("-p", "--page", type=int, help="Which page of the "
"results to retrieve. Pages start at zero. If the "
"\"page\" parameter is provided, the session_id "
"parameter must also be provided.")
p_engage.add_argument("-s", "--session_id", type=str, help="A string id "
"provided in the results of a previous query. Using "
"a session_id speeds up api response, and allows "
"paging through results.")
p_engage.set_defaults(func=engage_list)
#
# Export
#
p_export = subparsers.add_parser("export", help="Export a date range")
p_export.add_argument("from_date", help="The date in yyyy-mm-dd format "
"from which to begin querying for the event from. "
"This date is inclusive.")
p_export.add_argument("to_date", help="The date in yyyy-mm-dd format "
"from which to stop querying for the event from. "
"This date is inclusive.")
p_export.add_argument("--event", "-e", action="append",
help="Limit data to certain events. This argument "
"may be included multiple times")
p_export.add_argument("--where", "-w", help="An expression to filter "
"events by. See the expression section on the main "
"data export API page.")
p_export.add_argument("--bucket", "-b", help="[Platform] - the specific "
"data bucket you would like to query.")
p_export.set_defaults(func=export_data)
#
# Funnel
#
p_funnel = subparsers.add_parser("funnel", help="Perform actions on funnels")
sp_funnel = p_funnel.add_subparsers(title="funnel subcommands")
p_funnel_list = sp_funnel.add_parser("list", help="Get the names and funnel_ids of your funnels.")
p_funnel_list.set_defaults(func=funnel_list)
p_funnel_show = sp_funnel.add_parser("show", help="Show the data in one funnel")
p_funnel_show.add_argument("funnel_id", type=int, help="The ID of the funnel to display, returned by funnel list")
p_funnel_show.add_argument("--csv", action="store_true",
help="Print output in CSV format")
p_funnel_show.add_argument("--fs", type=str, default=",",
help="Field separator to use when printing CSV "
"output. Defaults to ','.")
p_funnel_show.set_defaults(func=funnel_show)
#
# Retention
#
p_retention = subparsers.add_parser("retention",
help="Get cohort analysis.")
p_retention.add_argument("from_date", type=str,
help="The date in yyyy-mm-dd format from which to "
"begin generating cohorts from. This date is inclusive.")
p_retention.add_argument("to_date", type=str,
help="The date in yyyy-mm-dd format from which to "
"stop generating cohorts from. This date is inclusive.")
p_retention.add_argument("--type", "-t", type=str,
choices=(None, "birth", "compounded"),
help="Must be either 'birth' or 'compounded'. "
"Defaults to 'birth'.")
p_retention.add_argument("--born_event", "-b", type=str,
help="The first event a user must do to be "
"counted in a birth retention cohort. Required "
"when retention_type is 'birth'; ignored otherwise.")
p_retention.add_argument("--event", "-e", type=str,
help="The event to generate returning counts for. "
"Applies to both birth and compounded retention. "
"If not specified, we look across all events.")
p_retention.add_argument("--born_where", type=str,
help="An expression to filter born_events by. See "
"the expression section.")
p_retention.add_argument("--where", "-w", type=str,
help="An expression to filter the returning "
"events by. See the expression section.")
p_retention.add_argument("--interval", "-i", type=int,
help="The number of days you want your results "
"bucketed into. The default value is 1 or "
"specified by unit.")
p_retention.add_argument("--interval_count", "-c", type=int, default=1,
help="The number of intervals you want; defaults "
"to 1.")
p_retention.add_argument("--unit", "-u", type=str,
choices=(None, "day", "week", "month"),
help="This is an alternate way of specifying "
"interval and can be 'day', 'week', or 'month'.")
p_retention.add_argument("--on", type=str,
help="The property expression to segment the "
"second event on. See the expression section.")
p_retention.add_argument("--limit", "-l", type=int,
help="Return the top limit segmentation values. "
"This parameter does nothing if 'on' is not specified.")
p_retention.add_argument("--csv", action="store_true",
help="Print output in CSV format")
p_retention.add_argument("--fs", type=str, default=",",
help="Field separator to use when printing CSV "
"output. Defaults to ','.")
p_retention.set_defaults(func=retention_data)
#
# Handle input
#
args = parser.parse_args()
logging.basicConfig(level=(logging.DEBUG if args.verbose
else logging.WARN))
if not hasattr(args, "func"):
parser.print_help()
sys.exit(2)
args.func(args)
| {
"content_hash": "20f7b74c42a25317e2aeeb7330e062ec",
"timestamp": "",
"source": "github",
"line_count": 256,
"max_line_length": 118,
"avg_line_length": 49.23828125,
"alnum_prop": 0.5247124157080524,
"repo_name": "atomic-labs/mixpanel-cli",
"id": "df19dcef3947312f3542f575d7f4e36aa1c70fcf",
"size": "12615",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mixpanel_cli/__main__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "20154"
}
],
"symlink_target": ""
} |
package com.laranevans.springrestreact.config;
import com.laranevans.springrestreact.model.domain.Possession;
import com.laranevans.springrestreact.model.domain.Person;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.rest.core.config.RepositoryRestConfiguration;
import org.springframework.data.rest.webmvc.config.RepositoryRestMvcConfiguration;
@Configuration
public class RestMvcConfiguration extends RepositoryRestMvcConfiguration {
@Override
public RepositoryRestConfiguration config() {
RepositoryRestConfiguration config = super.config();
/*
Setting the baseUri effectively namespaces your REST endpoints, making them easier to secure. You can just
secure "/api/**" and you're good.
*/
config.setBaseUri("/api");
/*
Without this along with the exposeIdsFor call in configureRepositoryRestConfiguration below, id values aren't
returned when new domain entities are created with POST requests. Having the id in the response is valuable
because without it you need to send an additional GET request to retrieve the id. Having it in there saves a
round trip when creating things.
*/
config.setReturnBodyOnCreate(true);
/*
Returning the body on update is nice because updates can cause things like instance version and updated_at
values can be changed when updating other attributes of an object. Returning the updated object in the
response body makes those updates immediately available to you, again, saving you a round trip.
*/
config.setReturnBodyOnUpdate(true);
return config;
}
@Override
protected void configureRepositoryRestConfiguration(RepositoryRestConfiguration config) {
/*
Note the refences to the specific sample domain model classes.
You should/would change the arguments to the classes in your domain model.
Having the IDs exposed makes it possible to use the IDs in javascript. You COULD refer to the URI of the
instance as an identifier. But, to me it's just easier to keep the API on the client similar to the API on
the server.
*/
config.exposeIdsFor(Person.class, Possession.class);
}
} | {
"content_hash": "80417e8aa03b89f1f8873b5e139a9cdf",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 118,
"avg_line_length": 47.48979591836735,
"alnum_prop": 0.7262569832402235,
"repo_name": "laran/spring-data-rest-mvc-react",
"id": "ed09a5b81de1e377b39ad0e688fac9139d4fe01d",
"size": "2327",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/java/com/laranevans/springrestreact/config/RestMvcConfiguration.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "80"
},
{
"name": "HTML",
"bytes": "1814"
},
{
"name": "Java",
"bytes": "20838"
},
{
"name": "JavaScript",
"bytes": "18887"
}
],
"symlink_target": ""
} |
const SET_PLAYER_READY = 'SET_PLAYER_READY';
const TOGGLE_PLAYER_READY = 'TOGGLE_PLAYER_READY';
export function setReady({ readyState, target }) {
return {
readyState,
target,
source: target,
type: SET_PLAYER_READY,
};
}
export function toggleReady({ target }) {
return {
target,
source: target,
type: TOGGLE_PLAYER_READY,
};
}
export function resetReady({ target }) {
return setReady({ target, to: false });
}
export default function ready(state = false, action) {
switch (action.type) {
case SET_PLAYER_READY:
return action.to;
case TOGGLE_PLAYER_READY:
return !state;
default:
return state;
}
}
| {
"content_hash": "1fd3a2d7164948cefcbad2ace42834e6",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 54,
"avg_line_length": 19.823529411764707,
"alnum_prop": 0.6454005934718101,
"repo_name": "inooid/react-redux-card-game",
"id": "b620a0891d7947362e83a2cf5b36901d0f78b122",
"size": "674",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/redux/modules/ready.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "6855"
},
{
"name": "HTML",
"bytes": "185"
},
{
"name": "JavaScript",
"bytes": "80059"
}
],
"symlink_target": ""
} |
<?php
class SeleniumTestHTMLLogger {
public function setHeaders() {
global $wgOut;
$wgOut->addHeadItem( 'selenium', '<style type="text/css">
.selenium pre {
overflow-x: auto; /* Use horizontal scroller if needed; for Firefox 2, not needed in Firefox 3 */
white-space: pre-wrap; /* css-3 */
white-space: -moz-pre-wrap !important; /* Mozilla, since 1999 */
white-space: -pre-wrap; /* Opera 4-6 */
white-space: -o-pre-wrap; /* Opera 7 */
/* width: 99%; */
word-wrap: break-word; /* Internet Explorer 5.5+ */
}
.selenium-success { color: green }
</style>' );
}
public function write( $message, $mode = false ) {
global $wgOut;
$out = '';
if ( $mode == SeleniumTestSuite::RESULT_OK ) {
$out .= '<span class="selenium-success">';
}
$out .= htmlspecialchars( $message );
if ( $mode == SeleniumTestSuite::RESULT_OK ) {
$out .= '</span>';
}
if ( $mode != SeleniumTestSuite::CONTINUE_LINE ) {
$out .= '<br />';
}
$wgOut->addHTML( $out );
}
}
| {
"content_hash": "a4549789438e9a9e9f3f03e5360df32e",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 100,
"avg_line_length": 27.805555555555557,
"alnum_prop": 0.5994005994005994,
"repo_name": "AKFourSeven/antoinekougblenou",
"id": "21332cf000c39a5d6298a6fa6c5198031f1064c1",
"size": "1001",
"binary": false,
"copies": "85",
"ref": "refs/heads/master",
"path": "old/wiki/tests/selenium/SeleniumTestHTMLLogger.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "2564"
},
{
"name": "JavaScript",
"bytes": "1614651"
},
{
"name": "PHP",
"bytes": "63555075"
},
{
"name": "Perl",
"bytes": "27348"
},
{
"name": "Python",
"bytes": "46036"
},
{
"name": "Shell",
"bytes": "4214"
}
],
"symlink_target": ""
} |
package de.swoeste.infinitum.common.utils.properties;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Enumeration;
import java.util.List;
import java.util.Properties;
/**
* @author swoeste
*/
public class SortedProperties extends ExtendedProperties {
private static final long serialVersionUID = -1246466856419039790L;
/**
* Constructor for a new SortedProperties.
*/
public SortedProperties() {
super();
}
/**
* Constructor for a new SortedProperties.
*
* @param defaults
*/
public SortedProperties(final Properties defaults) {
super(defaults);
}
/**
* {@inheritDoc}
*/
@Override
public synchronized Enumeration keys() {
final Enumeration<Object> keys = super.keys();
final List keyList = new ArrayList();
while (keys.hasMoreElements()) {
keyList.add(keys.nextElement());
}
Collections.sort(keyList);
return Collections.enumeration(keyList);
}
}
| {
"content_hash": "7978b7cd06a81e3ced41f35288ee604f",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 71,
"avg_line_length": 21.346938775510203,
"alnum_prop": 0.6405353728489483,
"repo_name": "swoeste/infinitum-svn-search",
"id": "030dbc55fcbe5a8ace0d5fc38babc5066c49e0d2",
"size": "1857",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "core/utils/src/main/java/de/swoeste/infinitum/common/utils/properties/SortedProperties.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "12551"
},
{
"name": "CSS",
"bytes": "24007"
},
{
"name": "HTML",
"bytes": "18053"
},
{
"name": "Java",
"bytes": "163733"
},
{
"name": "Shell",
"bytes": "154858"
},
{
"name": "TypeScript",
"bytes": "18778"
}
],
"symlink_target": ""
} |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using java = biz.ritter.javapi;
namespace biz.ritter.javapi.awt {
/**
* RenderingHints
* @author Alexey A. Petrenko
*/
public class RenderingHints : java.util.Map<Object, Object>, java.lang.Cloneable {
public static readonly Key KEY_ALPHA_INTERPOLATION = new KeyImpl(1);
public static readonly Object VALUE_ALPHA_INTERPOLATION_DEFAULT = new KeyValue(KEY_ALPHA_INTERPOLATION);
public static readonly Object VALUE_ALPHA_INTERPOLATION_SPEED = new KeyValue(KEY_ALPHA_INTERPOLATION);
public static readonly Object VALUE_ALPHA_INTERPOLATION_QUALITY = new KeyValue(KEY_ALPHA_INTERPOLATION);
public static readonly Key KEY_ANTIALIASING = new KeyImpl(2);
public static readonly Object VALUE_ANTIALIAS_DEFAULT = new KeyValue(KEY_ANTIALIASING);
public static readonly Object VALUE_ANTIALIAS_ON = new KeyValue(KEY_ANTIALIASING);
public static readonly Object VALUE_ANTIALIAS_OFF = new KeyValue(KEY_ANTIALIASING);
public static readonly Key KEY_COLOR_RENDERING = new KeyImpl(3);
public static readonly Object VALUE_COLOR_RENDER_DEFAULT = new KeyValue(KEY_COLOR_RENDERING);
public static readonly Object VALUE_COLOR_RENDER_SPEED = new KeyValue(KEY_COLOR_RENDERING);
public static readonly Object VALUE_COLOR_RENDER_QUALITY = new KeyValue(KEY_COLOR_RENDERING);
public static readonly Key KEY_DITHERING = new KeyImpl(4);
public static readonly Object VALUE_DITHER_DEFAULT = new KeyValue(KEY_DITHERING);
public static readonly Object VALUE_DITHER_DISABLE = new KeyValue(KEY_DITHERING);
public static readonly Object VALUE_DITHER_ENABLE = new KeyValue(KEY_DITHERING);
public static readonly Key KEY_FRACTIONALMETRICS = new KeyImpl(5);
public static readonly Object VALUE_FRACTIONALMETRICS_DEFAULT = new KeyValue(KEY_FRACTIONALMETRICS);
public static readonly Object VALUE_FRACTIONALMETRICS_ON = new KeyValue(KEY_FRACTIONALMETRICS);
public static readonly Object VALUE_FRACTIONALMETRICS_OFF = new KeyValue(KEY_FRACTIONALMETRICS);
public static readonly Key KEY_INTERPOLATION = new KeyImpl(6);
public static readonly Object VALUE_INTERPOLATION_BICUBIC = new KeyValue(KEY_INTERPOLATION);
public static readonly Object VALUE_INTERPOLATION_BILINEAR = new KeyValue(KEY_INTERPOLATION);
public static readonly Object VALUE_INTERPOLATION_NEAREST_NEIGHBOR = new KeyValue(KEY_INTERPOLATION);
public static readonly Key KEY_RENDERING = new KeyImpl(7);
public static readonly Object VALUE_RENDER_DEFAULT = new KeyValue(KEY_RENDERING);
public static readonly Object VALUE_RENDER_SPEED = new KeyValue(KEY_RENDERING);
public static readonly Object VALUE_RENDER_QUALITY = new KeyValue(KEY_RENDERING);
public static readonly Key KEY_STROKE_CONTROL = new KeyImpl(8);
public static readonly Object VALUE_STROKE_DEFAULT = new KeyValue(KEY_STROKE_CONTROL);
public static readonly Object VALUE_STROKE_NORMALIZE = new KeyValue(KEY_STROKE_CONTROL);
public static readonly Object VALUE_STROKE_PURE = new KeyValue(KEY_STROKE_CONTROL);
public static readonly Key KEY_TEXT_ANTIALIASING = new KeyImpl(9);
public static readonly Object VALUE_TEXT_ANTIALIAS_DEFAULT = new KeyValue(KEY_TEXT_ANTIALIASING);
public static readonly Object VALUE_TEXT_ANTIALIAS_ON = new KeyValue(KEY_TEXT_ANTIALIASING);
public static readonly Object VALUE_TEXT_ANTIALIAS_OFF = new KeyValue(KEY_TEXT_ANTIALIASING);
private java.util.HashMap<Object, Object> map = new java.util.HashMap<Object, Object>();
public RenderingHints(java.util.Map<Key, Object> map) :base(){
if (map != null) {
putAll((java.util.Map<Object,Object>)map);
}
}
public RenderingHints(Key key, Object value) :base() {
put(key, value);
}
public void add(RenderingHints hints) {
map.putAll(hints.map);
}
public Object put(Object key, Object value) {
if (!((Key)key).isCompatibleValue(value)) {
throw new java.lang.IllegalArgumentException();
}
return map.put(key, value);
}
public Object remove(Object key) {
return map.remove(key);
}
public Object get(Object key) {
return map.get(key);
}
public java.util.Set<Object> keySet() {
return map.keySet();
}
public java.util.Set<java.util.MapNS.Entry<Object, Object>> entrySet() {
return map.entrySet();
}
public void putAll(java.util.Map<Object, Object> m) {
if (m is RenderingHints) {
map.putAll(((RenderingHints) m).map);
} else {
java.util.Set<java.util.MapNS.Entry<Object,Object>> entries = m.entrySet();
if (entries != null){
java.util.Iterator<java.util.MapNS.Entry<Object,Object>> it = entries.iterator();
while (it.hasNext()) {
java.util.MapNS.Entry<Object,Object> entry = it.next();
Key key = (Key) entry.getKey();
Object val = entry.getValue();
put(key, val);
}
}
}
}
public java.util.Collection<Object> values() {
return map.values();
}
public bool containsValue(Object value) {
return map.containsValue(value);
}
public bool containsKey(Object key) {
if (key == null) {
throw new java.lang.NullPointerException();
}
return map.containsKey(key);
}
public bool isEmpty() {
return map.isEmpty();
}
public void clear() {
map.clear();
}
public int size() {
return map.size();
}
public override bool Equals(Object o) {
if (!(o is java.util.Map<Object, Object>)) {
return false;
}
java.util.Map<Object, Object> m = (java.util.Map<Object, Object>)o;
java.util.Set<Object> keys = keySet();
if (!keys.equals(m.keySet())) {
return false;
}
java.util.Iterator<Object> it = keys.iterator();
while (it.hasNext()) {
Key key = (Key)it.next();
Object v1 = get(key);
Object v2 = m.get(key);
if (!(v1==null?v2==null:v1.equals(v2))) {
return false;
}
}
return true;
}
public override int GetHashCode() {
return map.GetHashCode();
}
public Object clone() {
RenderingHints clone = new RenderingHints(null);
clone.map = (java.util.HashMap<Object, Object>)this.map.clone();
return clone;
}
public override String ToString() {
return "RenderingHints["+map.toString()+"]"; //$NON-NLS-1$ //$NON-NLS-2$
}
/**
* Key
*/
public abstract class Key {
private readonly int key;
protected Key(int key) {
this.key = key;
}
public override bool Equals(Object o) {
return this == o;
}
public override int GetHashCode() {
return java.lang.SystemJ.identityHashCode(this);
}
protected int intKey() {
return key;
}
public abstract bool isCompatibleValue(Object val);
}
/**
* Private implementation of Key class
*/
private class KeyImpl : Key {
protected internal KeyImpl(int key) :base(key) {
}
public override bool isCompatibleValue(Object val) {
if (!(val is KeyValue)) {
return false;
}
return ((KeyValue)val).key == this;
}
}
/**
* Private class KeyValue is used as value for Key class instance.
*/
private class KeyValue {
internal readonly Key key;
protected internal KeyValue(Key key) {
this.key = key;
}
}
}
}
| {
"content_hash": "ae1d3bb1f40d407c4306bbb250e82ce1",
"timestamp": "",
"source": "github",
"line_count": 245,
"max_line_length": 108,
"avg_line_length": 35.183673469387756,
"alnum_prop": 0.6261020881670534,
"repo_name": "sailesh341/JavApi",
"id": "c63173056c9187f71fec5f6ca787a960edd24ba3",
"size": "8622",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "JavApi_Core/java/awt/RenderingHints.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C#",
"bytes": "6253300"
},
{
"name": "Java",
"bytes": "410496"
}
],
"symlink_target": ""
} |
ACCEPTED
#### According to
GRIN Taxonomy for Plants
#### Published in
null
#### Original name
null
### Remarks
null | {
"content_hash": "70d58f1840ab5fc0a67520446e7d1dc9",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 24,
"avg_line_length": 9.153846153846153,
"alnum_prop": 0.6890756302521008,
"repo_name": "mdoering/backbone",
"id": "3f612b27da460a70ab6210bceeb739f9a1715fe0",
"size": "197",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Magnoliophyta/Liliopsida/Poales/Poaceae/Elymus/Elymus dahuricus/Elymus dahuricus dahuricus/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine.impl.cmd;
import java.io.Serializable;
import org.camunda.bpm.engine.impl.interceptor.Command;
import org.camunda.bpm.engine.impl.interceptor.CommandContext;
import org.camunda.bpm.engine.impl.persistence.entity.CommentEntity;
import org.camunda.bpm.engine.impl.util.ClockUtil;
import org.camunda.bpm.engine.task.Event;
/**
* @author Tom Baeyens
*/
public class AddCommentCmd implements Command<Object>, Serializable {
private static final long serialVersionUID = 1L;
protected String taskId;
protected String processInstanceId;
protected String message;
public AddCommentCmd(String taskId, String processInstanceId, String message) {
this.taskId = taskId;
this.processInstanceId = processInstanceId;
this.message = message;
}
public Object execute(CommandContext commandContext) {
String userId = commandContext.getAuthenticatedUserId();
CommentEntity comment = new CommentEntity();
comment.setUserId(userId);
comment.setType(CommentEntity.TYPE_COMMENT);
comment.setTime(ClockUtil.getCurrentTime());
comment.setTaskId(taskId);
comment.setProcessInstanceId(processInstanceId);
comment.setAction(Event.ACTION_ADD_COMMENT);
String eventMessage = message.replaceAll("\\s+", " ");
if (eventMessage.length()>163) {
eventMessage = eventMessage.substring(0, 160)+"...";
}
comment.setMessage(eventMessage);
comment.setFullMessage(message);
commandContext
.getCommentManager()
.insert(comment);
return null;
}
}
| {
"content_hash": "91b4af806fa0911b53e59e546e51528f",
"timestamp": "",
"source": "github",
"line_count": 66,
"max_line_length": 81,
"avg_line_length": 32.40909090909091,
"alnum_prop": 0.7367928938756428,
"repo_name": "1and1/camunda-bpm-platform",
"id": "507ef421dea8c588969f9a696d9ec995649e0135",
"size": "2139",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "engine/src/main/java/org/camunda/bpm/engine/impl/cmd/AddCommentCmd.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "3524"
},
{
"name": "CSS",
"bytes": "41455"
},
{
"name": "HTML",
"bytes": "207609"
},
{
"name": "Java",
"bytes": "8707800"
},
{
"name": "JavaScript",
"bytes": "636111"
},
{
"name": "Shell",
"bytes": "3423"
},
{
"name": "XSLT",
"bytes": "13061"
}
],
"symlink_target": ""
} |
<template name="inviteFriends">
<div class="add-friends well">
<button type="button" class="close" aria-hidden="true">×</button>
{{> inviteFriendsInput}}
</div>
</template> | {
"content_hash": "0a4c4f35c11d43836b3cc9393bcfa18c",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 75,
"avg_line_length": 31.666666666666668,
"alnum_prop": 0.6684210526315789,
"repo_name": "pushpickup/pushpickup",
"id": "6750bcf4194616e12f1da9a5a2663f4588936006",
"size": "190",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "client/views/games/common/invite-friends.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "330"
},
{
"name": "CoffeeScript",
"bytes": "8240"
},
{
"name": "HTML",
"bytes": "61307"
},
{
"name": "JavaScript",
"bytes": "227227"
},
{
"name": "Shell",
"bytes": "554"
}
],
"symlink_target": ""
} |
using System.Data.SqlClient;
using System.Linq;
using EIDSS.Reports.BaseControls.BaseDataSetTableAdapters;
namespace EIDSS.Reports.Document.ActiveSurveillance {
public partial class SessionFarmReportDataSet {
}
}
namespace EIDSS.Reports.Document.ActiveSurveillance.SessionFarmReportDataSetTableAdapters
{
public partial class SessionFarmAdapter
{
private SqlTransaction m_Transaction;
internal SqlTransaction Transaction
{
get { return m_Transaction; }
set
{
m_Transaction = value;
BaseAdapter.SetTransaction(Adapter, CommandCollection, value);
}
}
internal int CommandTimeout
{
get { return CommandCollection.Select(c => c.CommandTimeout).FirstOrDefault(); }
set
{
foreach (SqlCommand command in CommandCollection)
{
command.CommandTimeout = value;
}
}
}
}
} | {
"content_hash": "bf2454fdfa1715308e1277c57dcd76e6",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 92,
"avg_line_length": 26.51219512195122,
"alnum_prop": 0.5712971481140754,
"repo_name": "EIDSS/EIDSS-Legacy",
"id": "3845069b2451fc1ba070ea6ffc90c357427ab7e5",
"size": "1089",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "EIDSS v6.1/vb/EIDSS/EIDSS.Reports/Document/ActiveSurveillance/SessionFarmReportDataSet.cs",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "ASP",
"bytes": "256377"
},
{
"name": "Batchfile",
"bytes": "30009"
},
{
"name": "C#",
"bytes": "106160789"
},
{
"name": "CSS",
"bytes": "833586"
},
{
"name": "HTML",
"bytes": "7507"
},
{
"name": "Java",
"bytes": "2188690"
},
{
"name": "JavaScript",
"bytes": "17000221"
},
{
"name": "PLSQL",
"bytes": "2499"
},
{
"name": "PLpgSQL",
"bytes": "6422"
},
{
"name": "Pascal",
"bytes": "159898"
},
{
"name": "PowerShell",
"bytes": "339522"
},
{
"name": "Puppet",
"bytes": "3758"
},
{
"name": "SQLPL",
"bytes": "12198"
},
{
"name": "Smalltalk",
"bytes": "301266"
},
{
"name": "Visual Basic",
"bytes": "20819564"
},
{
"name": "XSLT",
"bytes": "4253600"
}
],
"symlink_target": ""
} |
ACCEPTED
#### According to
International Plant Names Index
#### Published in
null
#### Original name
null
### Remarks
null | {
"content_hash": "d9e529f8b39d75bdf7acaa7dd0e17fed",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 31,
"avg_line_length": 9.692307692307692,
"alnum_prop": 0.7063492063492064,
"repo_name": "mdoering/backbone",
"id": "af6410c64389edc3324a4b8b8b82b8864d1289cb",
"size": "176",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Magnoliophyta/Magnoliopsida/Asterales/Asteraceae/Grindelia tarapacana/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
export default {
currentCall: 'Current Call',
viewCalls: 'View Calls',
incomingCall: '{numberOf} Incoming Call',
incomingCalls: '{numberOf} Incoming Calls',
callOnHold: '{numberOf} Call On Hold',
callsOnHold: '{numberOf} Calls On Hold',
};
| {
"content_hash": "212d6552ac7d404b935211636e581d78",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 45,
"avg_line_length": 31.5,
"alnum_prop": 0.7023809523809523,
"repo_name": "ringcentral/ringcentral-js-widget",
"id": "9c728611fc474011d34d20b1631d925bae4b69f2",
"size": "252",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "packages/ringcentral-widgets/components/CallMonitorBar/i18n/en-US.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "121878"
},
{
"name": "HTML",
"bytes": "2967"
},
{
"name": "JavaScript",
"bytes": "655889"
},
{
"name": "Shell",
"bytes": "1135"
}
],
"symlink_target": ""
} |
require 'mongo'
require 'lite_spec_helper'
describe Mongo::Crypt::ExplicitDecryptionContext do
require_libmongocrypt
include_context 'define shared FLE helpers'
let(:credentials) { Mongo::Crypt::KMS::Credentials.new(kms_providers) }
let(:mongocrypt) { Mongo::Crypt::Handle.new(credentials, logger: logger) }
let(:context) { described_class.new(mongocrypt, io, value) }
let(:logger) { nil }
let(:io) { double("Mongo::ClientEncryption::IO") }
# A binary string representing a value previously encrypted by libmongocrypt
let(:encrypted_data) do
"\x01\xDF2~\x89\xD2+N}\x84;i(\xE5\xF4\xBF \x024\xE5\xD2\n\x9E\x97\x9F\xAF\x9D\xC7\xC9\x1A\a\x87z\xAE_;r\xAC\xA9\xF6n\x1D\x0F\xB5\xB1#O\xB7\xCA\xEE$/\xF1\xFA\b\xA7\xEC\xDB\xB6\xD4\xED\xEAMw3+\xBBv\x18\x97\xF9\x99\xD5\x13@\x80y\n{\x19R\xD3\xF0\xA1C\x05\xF7)\x93\x9Bh\x8AA.\xBB\xD3&\xEA"
end
let(:value) do
{ 'v': BSON::Binary.new(encrypted_data, :ciphertext) }
end
describe '#initialize' do
context 'when mongocrypt is initialized with local KMS provider options' do
include_context 'with local kms_providers'
it 'initializes context' do
expect do
context
end.not_to raise_error
end
end
context 'when mongocrypt is initialized with AWS KMS provider options' do
include_context 'with AWS kms_providers'
it 'initializes context' do
expect do
context
end.not_to raise_error
end
end
context 'when mongocrypt is initialized with Azure KMS provider options' do
include_context 'with Azure kms_providers'
it 'initializes context' do
expect do
context
end.not_to raise_error
end
end
context 'when mongocrypt is initialized with GCP KMS provider options' do
include_context 'with GCP kms_providers'
it 'initializes context' do
expect do
context
end.not_to raise_error
end
end
context 'when mongocrypt is initialized with KMIP KMS provider options' do
include_context 'with KMIP kms_providers'
it 'initializes context' do
expect do
context
end.not_to raise_error
end
end
context 'with verbose logging' do
include_context 'with local kms_providers'
before(:all) do
# Logging from libmongocrypt requires the C library to be built with the -DENABLE_TRACE=ON
# option; none of the pre-built packages on Evergreen have been built with logging enabled.
#
# It is still useful to be able to run these tests locally to confirm that logging is working
# while debugging any problems.
#
# For now, skip this test by default and revisit once we have determined how we want to
# package libmongocrypt with the Ruby driver (see: https://jira.mongodb.org/browse/RUBY-1966)
skip "These tests require libmongocrypt to be built with the '-DENABLE_TRACE=ON' cmake option." +
" They also require the MONGOCRYPT_TRACE environment variable to be set to 'ON'."
end
let(:logger) do
::Logger.new(STDOUT).tap do |logger|
logger.level = ::Logger::DEBUG
end
end
it 'receives log messages from libmongocrypt' do
expect(logger).to receive(:debug).with(/mongocrypt_ctx_explicit_decrypt_init/)
context
end
end
end
end
| {
"content_hash": "874bada941acbd1d31b613281216a575",
"timestamp": "",
"source": "github",
"line_count": 103,
"max_line_length": 288,
"avg_line_length": 33.067961165048544,
"alnum_prop": 0.6635349383440986,
"repo_name": "mongodb/mongo-ruby-driver",
"id": "337d55f5c18d1f3952fc5047cef74c6aee168577",
"size": "3455",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "spec/mongo/crypt/explicit_decryption_context_spec.rb",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "745"
},
{
"name": "HTML",
"bytes": "49593"
},
{
"name": "Ruby",
"bytes": "5008268"
},
{
"name": "Shell",
"bytes": "44951"
}
],
"symlink_target": ""
} |
<?php
// Copyright 2017 DAIMTO ([Linda Lawton](https://twitter.com/LindaLawtonDK)) : [www.daimto.com](http://www.daimto.com/)
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
// the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
// an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.
//------------------------------------------------------------------------------
// <auto-generated>
// This code was generated by DAIMTO-Google-apis-Sample-generator 1.0.0
// Template File Name: methodTemplate.tt
// Build date: 2017-10-08
// PHP generator version: 1.0.0
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
// </auto-generated>
//------------------------------------------------------------------------------
// About
//
// Unofficial sample for the Service Management v1 API for PHP.
// This sample is designed to be used with the Google PHP client library. (https://github.com/google/google-api-php-client)
//
// API Description: Google Service Management allows service producers to publish their services on Google Cloud Platform so that they can be discovered and used by service consumers.
// API Documentation Link https://cloud.google.com/service-management/
//
// Discovery Doc https://www.googleapis.com/discovery/v1/apis/servicemanagement/v1/rest
//
//------------------------------------------------------------------------------
// Installation
//
// The preferred method is via https://getcomposer.org. Follow the installation instructions https://getcomposer.org/doc/00-intro.md
// if you do not already have composer installed.
//
// Once composer is installed, execute the following command in your project root to install this library:
//
// composer require google/apiclient:^2.0
//
//------------------------------------------------------------------------------
// Load the Google API PHP Client Library.
require_once __DIR__ . '/vendor/autoload.php';
session_start();
/***************************************************
* Include this line for service account authencation. Note: Not all APIs support service accounts.
//require_once __DIR__ . '/ServiceAccount.php';
* Include the following four lines Oauth2 authencation.
* require_once __DIR__ . '/Oauth2Authentication.php';
* $_SESSION['mainScript'] = basename($_SERVER['PHP_SELF']); // Oauth2callback.php will return here.
* $client = getGoogleClient();
* $service = new Google_Service_Servicemanagement($client);
****************************************************/
// Option paramaters can be set as needed.
$optParams = array(
//'view' => '[YourValue]', // Specifies which parts of the Service Config should be returned in theresponse.
'fields' => '*'
);
// Single Request.
$results = configsGetExample($service, $serviceName, $configId, $optParams);
/**
* Gets a service configuration (version) for a managed service.
* @service Authenticated Servicemanagement service.
* @optParams Optional paramaters are not required by a request.
* @serviceName The name of the service. See the [overview](/service-management/overview)
for naming requirements. For example: `example.googleapis.com`.
* @configId The id of the service configuration resource.
* @return Service
*/
function configsGetExample($service, $serviceName, $configId, $optParams)
{
try
{
// Parameter validation.
if ($service == null)
throw new Exception("service is required.");
if ($optParams == null)
throw new Exception("optParams is required.");
if (serviceName == null)
throw new Exception("serviceName is required.");
if (configId == null)
throw new Exception("configId is required.");
// Make the request and return the results.
return $service->configs->GetConfigs($serviceName, $configId, $optParams);
}
catch (Exception $e)
{
print "An error occurred: " . $e->getMessage();
}
}
?>
| {
"content_hash": "300cc04d09ce2caa8aaf8bc08afcef94",
"timestamp": "",
"source": "github",
"line_count": 98,
"max_line_length": 183,
"avg_line_length": 43.734693877551024,
"alnum_prop": 0.6511899206719552,
"repo_name": "LindaLawton/Google-APIs-PHP-Samples",
"id": "1d10f9e6b2c31fbb573489062f24767b2b85c1bb",
"size": "4288",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Samples/Google Service Management API/v1/ConfigsGetSample.php",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
@interface TWPhotoLoader ()
@property (strong, nonatomic) NSMutableArray *allPhotos;
@property (strong, nonatomic) ALAssetsLibrary *assetsLibrary;
@property (readwrite, copy, nonatomic) void(^loadBlock)(NSArray *photos, NSError *error);
@end
@implementation TWPhotoLoader
+ (TWPhotoLoader *)sharedLoader {
static TWPhotoLoader *loader;
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^{
loader = [[TWPhotoLoader alloc] init];
});
return loader;
}
+ (void)loadAllPhotos:(void (^)(NSArray *photos, NSError *error))completion {
[[TWPhotoLoader sharedLoader] setLoadBlock:completion];
[[TWPhotoLoader sharedLoader] startLoading];
}
- (void)startLoading {
ALAssetsGroupEnumerationResultsBlock assetsEnumerationBlock = ^(ALAsset *result, NSUInteger index, BOOL *stop) {
if (result) {
TWPhoto *photo = [TWPhoto new];
photo.asset = result;
[self.allPhotos insertObject:photo atIndex:0];
}
};
ALAssetsLibraryGroupsEnumerationResultsBlock listGroupBlock = ^(ALAssetsGroup *group, BOOL *stop) {
ALAssetsFilter *onlyPhotosFilter = [ALAssetsFilter allPhotos];
[group setAssetsFilter:onlyPhotosFilter];
if ([group numberOfAssets] > 0) {
if ([[group valueForProperty:ALAssetsGroupPropertyType] intValue] == ALAssetsGroupSavedPhotos) {
[group enumerateAssetsUsingBlock:assetsEnumerationBlock];
}
}
if (group == nil) {
self.loadBlock(self.allPhotos, nil);
}
};
[self.assetsLibrary enumerateGroupsWithTypes:ALAssetsGroupAll usingBlock:listGroupBlock failureBlock:^(NSError *error) {
self.loadBlock(nil, error);
}];
}
- (NSMutableArray *)allPhotos {
if (_allPhotos == nil) {
_allPhotos = [NSMutableArray array];
}
return _allPhotos;
}
- (ALAssetsLibrary *)assetsLibrary {
if (_assetsLibrary == nil) {
_assetsLibrary = [[ALAssetsLibrary alloc] init];
}
return _assetsLibrary;
}
@end
| {
"content_hash": "4928eae0a73e4d1dacfb97da4005b01f",
"timestamp": "",
"source": "github",
"line_count": 70,
"max_line_length": 124,
"avg_line_length": 29.857142857142858,
"alnum_prop": 0.654066985645933,
"repo_name": "ambujshukla/InstagramPhotoPicker",
"id": "46a456befc930027f6d1ccbf16022ab261e9d8fc",
"size": "2191",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "TWPhotoPicker/TWPhotoLoader.m",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "435"
},
{
"name": "Objective-C",
"bytes": "26803"
},
{
"name": "Ruby",
"bytes": "972"
},
{
"name": "Shell",
"bytes": "4024"
}
],
"symlink_target": ""
} |
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<title>mango.unittest.TestCase.assertNotIn — mango 0.9.8 documentation</title>
<link rel="stylesheet" href="../../../_static/default.css" type="text/css" />
<link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" />
<script type="text/javascript">
var DOCUMENTATION_OPTIONS = {
URL_ROOT: '../../../',
VERSION: '0.9.8',
COLLAPSE_INDEX: false,
FILE_SUFFIX: '.html',
HAS_SOURCE: true
};
</script>
<script type="text/javascript" src="../../../_static/jquery.js"></script>
<script type="text/javascript" src="../../../_static/underscore.js"></script>
<script type="text/javascript" src="../../../_static/doctools.js"></script>
<script type="text/javascript" src="http://cdn.mathjax.org/mathjax/latest/MathJax.js?config=TeX-AMS-MML_HTMLorMML"></script>
<link rel="top" title="mango 0.9.8 documentation" href="../../../index.html" />
<link rel="up" title="mango.unittest.TestCase" href="../../mango.unittest.TestCase.html" />
<link rel="next" title="mango.unittest.TestCase.assertNotIsInstance" href="mango.unittest.TestCase.assertNotIsInstance.html" />
<link rel="prev" title="mango.unittest.TestCase.assertNotEquals" href="mango.unittest.TestCase.assertNotEquals.html" />
</head>
<body>
<div class="related">
<h3>Navigation</h3>
<ul>
<li class="right" style="margin-right: 10px">
<a href="../../../genindex.html" title="General Index"
accesskey="I">index</a></li>
<li class="right" >
<a href="../../../py-modindex.html" title="Python Module Index"
>modules</a> |</li>
<li class="right" >
<a href="mango.unittest.TestCase.assertNotIsInstance.html" title="mango.unittest.TestCase.assertNotIsInstance"
accesskey="N">next</a> |</li>
<li class="right" >
<a href="mango.unittest.TestCase.assertNotEquals.html" title="mango.unittest.TestCase.assertNotEquals"
accesskey="P">previous</a> |</li>
<li><a href="../../../index.html">mango 0.9.8 documentation</a> »</li>
<li><a href="../../../unittest.html" >Top Level Unit Testing (<tt class="docutils literal"><span class="pre">mango.tests</span></tt>)</a> »</li>
<li><a href="../../mango.unittest.TestCase.html" accesskey="U">mango.unittest.TestCase</a> »</li>
</ul>
</div>
<div class="document">
<div class="documentwrapper">
<div class="bodywrapper">
<div class="body">
<div class="section" id="mango-unittest-testcase-assertnotin">
<h1>mango.unittest.TestCase.assertNotIn<a class="headerlink" href="#mango-unittest-testcase-assertnotin" title="Permalink to this headline">¶</a></h1>
<dl class="method">
<dt id="mango.unittest.TestCase.assertNotIn">
<tt class="descclassname">TestCase.</tt><tt class="descname">assertNotIn</tt><big>(</big><em>member</em>, <em>container</em>, <em>msg=None</em><big>)</big><a class="headerlink" href="#mango.unittest.TestCase.assertNotIn" title="Permalink to this definition">¶</a></dt>
<dd><p>Just like self.assertTrue(a not in b), but with a nicer default message.</p>
</dd></dl>
</div>
</div>
</div>
</div>
<div class="sphinxsidebar">
<div class="sphinxsidebarwrapper">
<h4>Previous topic</h4>
<p class="topless"><a href="mango.unittest.TestCase.assertNotEquals.html"
title="previous chapter">mango.unittest.TestCase.assertNotEquals</a></p>
<h4>Next topic</h4>
<p class="topless"><a href="mango.unittest.TestCase.assertNotIsInstance.html"
title="next chapter">mango.unittest.TestCase.assertNotIsInstance</a></p>
<h3>This Page</h3>
<ul class="this-page-menu">
<li><a href="../../../_sources/generated/generated/methods/mango.unittest.TestCase.assertNotIn.txt"
rel="nofollow">Show Source</a></li>
</ul>
<div id="searchbox" style="display: none">
<h3>Quick search</h3>
<form class="search" action="../../../search.html" method="get">
<input type="text" name="q" />
<input type="submit" value="Go" />
<input type="hidden" name="check_keywords" value="yes" />
<input type="hidden" name="area" value="default" />
</form>
<p class="searchtip" style="font-size: 90%">
Enter search terms or a module, class or function name.
</p>
</div>
<script type="text/javascript">$('#searchbox').show(0);</script>
</div>
</div>
<div class="clearer"></div>
</div>
<div class="related">
<h3>Navigation</h3>
<ul>
<li class="right" style="margin-right: 10px">
<a href="../../../genindex.html" title="General Index"
>index</a></li>
<li class="right" >
<a href="../../../py-modindex.html" title="Python Module Index"
>modules</a> |</li>
<li class="right" >
<a href="mango.unittest.TestCase.assertNotIsInstance.html" title="mango.unittest.TestCase.assertNotIsInstance"
>next</a> |</li>
<li class="right" >
<a href="mango.unittest.TestCase.assertNotEquals.html" title="mango.unittest.TestCase.assertNotEquals"
>previous</a> |</li>
<li><a href="../../../index.html">mango 0.9.8 documentation</a> »</li>
<li><a href="../../../unittest.html" >Top Level Unit Testing (<tt class="docutils literal"><span class="pre">mango.tests</span></tt>)</a> »</li>
<li><a href="../../mango.unittest.TestCase.html" >mango.unittest.TestCase</a> »</li>
</ul>
</div>
<div class="footer">
© Copyright 2014, Department of Applied Mathematics, The Australian National University.
Created using <a href="http://sphinx-doc.org/">Sphinx</a> 1.2.3.
</div>
</body>
</html> | {
"content_hash": "dbe837a1399a942c43656ccd16a3f174",
"timestamp": "",
"source": "github",
"line_count": 128,
"max_line_length": 268,
"avg_line_length": 48.0625,
"alnum_prop": 0.6087451235370611,
"repo_name": "pymango/pymango",
"id": "250e916921208473c2345d9ca6e48f82328374e3",
"size": "6154",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "doc/sphinx_html/generated/generated/methods/mango.unittest.TestCase.assertNotIn.html",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CMake",
"bytes": "1621"
},
{
"name": "Python",
"bytes": "652240"
}
],
"symlink_target": ""
} |
// flow-typed signature: a0664698f9fe39edd3896da6e9b3bd6d
// flow-typed version: <<STUB>>/graphql-anywhere_v^3.0.0/flow_v0.51.0
/**
* This is an autogenerated libdef stub for:
*
* 'graphql-anywhere'
*
* Fill this stub out by replacing all the `any` types.
*
* Once filled out, we encourage you to share your work with the
* community by sending a pull request to:
* https://github.com/flowtype/flow-typed
*/
declare module 'graphql-anywhere' {
declare module.exports: any;
}
/**
* We include stubs for each file inside this npm package in case you need to
* require those files directly. Feel free to delete any files that aren't
* needed.
*/
declare module 'graphql-anywhere/lib/src/directives' {
declare module.exports: any;
}
declare module 'graphql-anywhere/lib/src/getFromAST' {
declare module.exports: any;
}
declare module 'graphql-anywhere/lib/src/graphql' {
declare module.exports: any;
}
declare module 'graphql-anywhere/lib/src/index' {
declare module.exports: any;
}
declare module 'graphql-anywhere/lib/src/storeUtils' {
declare module.exports: any;
}
declare module 'graphql-anywhere/lib/src/utilities' {
declare module.exports: any;
}
// Filename aliases
declare module 'graphql-anywhere/lib/src/directives.js' {
declare module.exports: $Exports<'graphql-anywhere/lib/src/directives'>;
}
declare module 'graphql-anywhere/lib/src/getFromAST.js' {
declare module.exports: $Exports<'graphql-anywhere/lib/src/getFromAST'>;
}
declare module 'graphql-anywhere/lib/src/graphql.js' {
declare module.exports: $Exports<'graphql-anywhere/lib/src/graphql'>;
}
declare module 'graphql-anywhere/lib/src/index.js' {
declare module.exports: $Exports<'graphql-anywhere/lib/src/index'>;
}
declare module 'graphql-anywhere/lib/src/storeUtils.js' {
declare module.exports: $Exports<'graphql-anywhere/lib/src/storeUtils'>;
}
declare module 'graphql-anywhere/lib/src/utilities.js' {
declare module.exports: $Exports<'graphql-anywhere/lib/src/utilities'>;
}
| {
"content_hash": "7368b890b6bcf1ca6ebcbe984068906d",
"timestamp": "",
"source": "github",
"line_count": 67,
"max_line_length": 77,
"avg_line_length": 29.865671641791046,
"alnum_prop": 0.7436281859070465,
"repo_name": "hsavit1/gosofi_webpage",
"id": "cdeda3ea7c2dab0a22dd5fc5a81ccb5dcca6f2b9",
"size": "2001",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "node_modules/react-apollo/flow-typed/npm/graphql-anywhere_vx.x.x.js",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2768"
},
{
"name": "JavaScript",
"bytes": "14179050"
}
],
"symlink_target": ""
} |
extern crate crayon;
extern crate env_logger;
use crayon::impl_vertex;
use crayon::prelude::*;
impl_vertex! {
Vertex {
position => [Position; Float; 2; false],
}
}
struct Window {
surface: SurfaceHandle,
shader: ShaderHandle,
mesh: MeshHandle,
texture: TextureHandle,
vcmds: CommandBuffer,
}
impl Window {
fn build() -> CrResult<Self> {
let verts: [Vertex; 4] = [
Vertex::new([-1.0, -1.0]),
Vertex::new([1.0, -1.0]),
Vertex::new([1.0, 1.0]),
Vertex::new([-1.0, 1.0]),
];
let idxes: [u16; 6] = [0, 1, 2, 0, 2, 3];
// Create vertex buffer object.
let mut params = MeshParams::default();
params.num_verts = 4;
params.num_idxes = 6;
params.layout = Vertex::layout();
let data = MeshData {
vptr: Vertex::encode(&verts[..]).into(),
iptr: IndexFormat::encode(&idxes).into(),
};
let mesh = video::create_mesh(params, Some(data))?;
// Create the view state.
let setup = SurfaceParams::default();
let surface = video::create_surface(setup)?;
// Create shader state.
let attributes = AttributeLayout::build()
.with(Attribute::Position, 2)
.finish();
let uniforms = UniformVariableLayout::build()
.with("renderedTexture", UniformVariableType::Texture)
.finish();
let mut params = ShaderParams::default();
params.attributes = attributes;
params.uniforms = uniforms;
let vs = include_str!("shaders/texture.vs").to_owned();
let fs = include_str!("shaders/texture.fs").to_owned();
let shader = video::create_shader(params, vs, fs)?;
let texture = video::create_texture_from("res:crate.bmp")?;
Ok(Window {
surface,
shader,
mesh,
texture,
vcmds: CommandBuffer::new(),
})
}
}
impl Drop for Window {
fn drop(&mut self) {
video::delete_mesh(self.mesh);
video::delete_shader(self.shader);
video::delete_surface(self.surface);
}
}
impl LifecycleListener for Window {
fn on_update(&mut self) -> CrResult<()> {
let mut dc = Draw::new(self.shader, self.mesh);
dc.set_uniform_variable("renderedTexture", self.texture);
self.vcmds.draw(dc);
self.vcmds.submit(self.surface)?;
Ok(())
}
}
main!({
#[cfg(not(target_arch = "wasm32"))]
let res = format!("file://{}/examples/resources/", env!("CARGO_MANIFEST_DIR"));
#[cfg(target_arch = "wasm32")]
let res = format!("http://localhost:8080/examples/resources/");
let mut params = Params::default();
params.window.title = "CR: Texture".into();
params.window.size = (464, 434).into();
params.res.shortcuts.add("res:", res).unwrap();
params.res.dirs.push("res:".into());
crayon::application::setup(params, Window::build).unwrap();
});
| {
"content_hash": "664b504898c65851512d7faf4f2eae5a",
"timestamp": "",
"source": "github",
"line_count": 105,
"max_line_length": 83,
"avg_line_length": 28.704761904761906,
"alnum_prop": 0.5600530856005309,
"repo_name": "shawnscode/crayon",
"id": "199239667a4424047ba785d9c1e593826242912d",
"size": "3014",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "examples/texture.rs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "GLSL",
"bytes": "2334"
},
{
"name": "HTML",
"bytes": "120"
},
{
"name": "JavaScript",
"bytes": "808"
},
{
"name": "Perl",
"bytes": "36971"
},
{
"name": "Rust",
"bytes": "621029"
},
{
"name": "Shell",
"bytes": "835"
}
],
"symlink_target": ""
} |
package org.hibersap;
public class ConfigurationException
extends HibersapException {
private static final long serialVersionUID = 1L;
public ConfigurationException( final String msg ) {
super( msg );
}
public ConfigurationException( final String msg, final Throwable t ) {
super( msg, t );
}
}
| {
"content_hash": "76eae43b39dab5d819b710b52eadd08c",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 74,
"avg_line_length": 20.294117647058822,
"alnum_prop": 0.6695652173913044,
"repo_name": "drbunsen-bsteam/hibersap",
"id": "6b46d57989c3be38f5286b0140905846fa415c07",
"size": "1000",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "hibersap-core/src/main/java/org/hibersap/ConfigurationException.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "480196"
},
{
"name": "Shell",
"bytes": "889"
},
{
"name": "TeX",
"bytes": "77351"
}
],
"symlink_target": ""
} |
/////////////////////////////////////////////////////////////////////////////
// Name: wx/object.h
// Purpose: wxObject class, plus run-time type information macros
// Author: Julian Smart
// Modified by: Ron Lee
// Created: 01/02/97
// RCS-ID: $Id$
// Copyright: (c) 1997 Julian Smart
// (c) 2001 Ron Lee <[email protected]>
// Licence: wxWindows licence
/////////////////////////////////////////////////////////////////////////////
#ifndef _WX_OBJECTH__
#define _WX_OBJECTH__
// ----------------------------------------------------------------------------
// headers
// ----------------------------------------------------------------------------
#include "wx/memory.h"
#define wxDECLARE_CLASS_INFO_ITERATORS() \
class WXDLLIMPEXP_BASE const_iterator \
{ \
typedef wxHashTable_Node Node; \
public: \
typedef const wxClassInfo* value_type; \
typedef const value_type& const_reference; \
typedef const_iterator itor; \
typedef value_type* ptr_type; \
\
Node* m_node; \
wxHashTable* m_table; \
public: \
typedef const_reference reference_type; \
typedef ptr_type pointer_type; \
\
const_iterator(Node* node, wxHashTable* table) \
: m_node(node), m_table(table) { } \
const_iterator() : m_node(NULL), m_table(NULL) { } \
value_type operator*() const; \
itor& operator++(); \
const itor operator++(int); \
bool operator!=(const itor& it) const \
{ return it.m_node != m_node; } \
bool operator==(const itor& it) const \
{ return it.m_node == m_node; } \
}; \
\
static const_iterator begin_classinfo(); \
static const_iterator end_classinfo()
// based on the value of wxUSE_EXTENDED_RTTI symbol,
// only one of the RTTI system will be compiled:
// - the "old" one (defined by rtti.h) or
// - the "new" one (defined by xti.h)
#include "wx/xti.h"
#include "wx/rtti.h"
#define wxIMPLEMENT_CLASS(name, basename) \
wxIMPLEMENT_ABSTRACT_CLASS(name, basename)
#define wxIMPLEMENT_CLASS2(name, basename1, basename2) \
wxIMPLEMENT_ABSTRACT_CLASS2(name, basename1, basename2)
// -----------------------------------
// for pluggable classes
// -----------------------------------
// NOTE: this should probably be the very first statement
// in the class declaration so wxPluginSentinel is
// the first member initialised and the last destroyed.
// _DECLARE_DL_SENTINEL(name) wxPluginSentinel m_pluginsentinel;
#if wxUSE_NESTED_CLASSES
#define _DECLARE_DL_SENTINEL(name, exportdecl) \
class exportdecl name##PluginSentinel { \
private: \
static const wxString sm_className; \
public: \
name##PluginSentinel(); \
~name##PluginSentinel(); \
}; \
name##PluginSentinel m_pluginsentinel
#define _IMPLEMENT_DL_SENTINEL(name) \
const wxString name::name##PluginSentinel::sm_className(#name); \
name::name##PluginSentinel::name##PluginSentinel() { \
wxPluginLibrary *e = (wxPluginLibrary*) wxPluginLibrary::ms_classes.Get(#name); \
if( e != 0 ) { e->RefObj(); } \
} \
name::name##PluginSentinel::~name##PluginSentinel() { \
wxPluginLibrary *e = (wxPluginLibrary*) wxPluginLibrary::ms_classes.Get(#name); \
if( e != 0 ) { e->UnrefObj(); } \
}
#else
#define _DECLARE_DL_SENTINEL(name)
#define _IMPLEMENT_DL_SENTINEL(name)
#endif // wxUSE_NESTED_CLASSES
#define wxDECLARE_PLUGGABLE_CLASS(name) \
wxDECLARE_DYNAMIC_CLASS(name); _DECLARE_DL_SENTINEL(name, WXDLLIMPEXP_CORE)
#define wxDECLARE_ABSTRACT_PLUGGABLE_CLASS(name) \
wxDECLARE_ABSTRACT_CLASS(name); _DECLARE_DL_SENTINEL(name, WXDLLIMPEXP_CORE)
#define wxDECLARE_USER_EXPORTED_PLUGGABLE_CLASS(name, usergoo) \
wxDECLARE_DYNAMIC_CLASS(name); _DECLARE_DL_SENTINEL(name, usergoo)
#define wxDECLARE_USER_EXPORTED_ABSTRACT_PLUGGABLE_CLASS(name, usergoo) \
wxDECLARE_ABSTRACT_CLASS(name); _DECLARE_DL_SENTINEL(name, usergoo)
#define wxIMPLEMENT_PLUGGABLE_CLASS(name, basename) \
wxIMPLEMENT_DYNAMIC_CLASS(name, basename) _IMPLEMENT_DL_SENTINEL(name)
#define wxIMPLEMENT_PLUGGABLE_CLASS2(name, basename1, basename2) \
wxIMPLEMENT_DYNAMIC_CLASS2(name, basename1, basename2) _IMPLEMENT_DL_SENTINEL(name)
#define wxIMPLEMENT_ABSTRACT_PLUGGABLE_CLASS(name, basename) \
wxIMPLEMENT_ABSTRACT_CLASS(name, basename) _IMPLEMENT_DL_SENTINEL(name)
#define wxIMPLEMENT_ABSTRACT_PLUGGABLE_CLASS2(name, basename1, basename2) \
wxIMPLEMENT_ABSTRACT_CLASS2(name, basename1, basename2) _IMPLEMENT_DL_SENTINEL(name)
#define wxIMPLEMENT_USER_EXPORTED_PLUGGABLE_CLASS(name, basename) \
wxIMPLEMENT_PLUGGABLE_CLASS(name, basename)
#define wxIMPLEMENT_USER_EXPORTED_PLUGGABLE_CLASS2(name, basename1, basename2) \
wxIMPLEMENT_PLUGGABLE_CLASS2(name, basename1, basename2)
#define wxIMPLEMENT_USER_EXPORTED_ABSTRACT_PLUGGABLE_CLASS(name, basename) \
wxIMPLEMENT_ABSTRACT_PLUGGABLE_CLASS(name, basename)
#define wxIMPLEMENT_USER_EXPORTED_ABSTRACT_PLUGGABLE_CLASS2(name, basename1, basename2) \
wxIMPLEMENT_ABSTRACT_PLUGGABLE_CLASS2(name, basename1, basename2)
#define wxCLASSINFO(name) (&name::ms_classInfo)
#define wxIS_KIND_OF(obj, className) obj->IsKindOf(&className::ms_classInfo)
// Just seems a bit nicer-looking (pretend it's not a macro)
#define wxIsKindOf(obj, className) obj->IsKindOf(&className::ms_classInfo)
// this cast does some more checks at compile time as it uses static_cast
// internally
//
// note that it still has different semantics from dynamic_cast<> and so can't
// be replaced by it as long as there are any compilers not supporting it
#define wxDynamicCast(obj, className) \
((className *) wxCheckDynamicCast( \
const_cast<wxObject *>(static_cast<const wxObject *>(\
const_cast<className *>(static_cast<const className *>(obj)))), \
&className::ms_classInfo))
// The 'this' pointer is always true, so use this version
// to cast the this pointer and avoid compiler warnings.
#define wxDynamicCastThis(className) \
(IsKindOf(&className::ms_classInfo) ? (className *)(this) : (className *)0)
// FIXME-VC6: dummy argument needed because VC6 doesn't support explicitly
// choosing the template function to call
template <class T>
inline T *wxCheckCast(const void *ptr, T * = NULL)
{
wxASSERT_MSG( wxDynamicCast(ptr, T), "wxStaticCast() used incorrectly" );
return const_cast<T *>(static_cast<const T *>(ptr));
}
#define wxStaticCast(obj, className) wxCheckCast((obj), (className *)NULL)
// ----------------------------------------------------------------------------
// set up memory debugging macros
// ----------------------------------------------------------------------------
/*
Which new/delete operator variants do we want?
_WX_WANT_NEW_SIZET_WXCHAR_INT = void *operator new (size_t size, wxChar *fileName = 0, int lineNum = 0)
_WX_WANT_DELETE_VOID = void operator delete (void * buf)
_WX_WANT_DELETE_VOID_CONSTCHAR_SIZET = void operator delete (void *buf, const char *_fname, size_t _line)
_WX_WANT_DELETE_VOID_WXCHAR_INT = void operator delete(void *buf, wxChar*, int)
_WX_WANT_ARRAY_NEW_SIZET_WXCHAR_INT = void *operator new[] (size_t size, wxChar *fileName , int lineNum = 0)
_WX_WANT_ARRAY_DELETE_VOID = void operator delete[] (void *buf)
_WX_WANT_ARRAY_DELETE_VOID_WXCHAR_INT = void operator delete[] (void* buf, wxChar*, int )
*/
#if wxUSE_MEMORY_TRACING
// All compilers get this one
#define _WX_WANT_NEW_SIZET_WXCHAR_INT
// Everyone except Visage gets the next one
#ifndef __VISAGECPP__
#define _WX_WANT_DELETE_VOID
#endif
// Only visage gets this one under the correct circumstances
#if defined(__VISAGECPP__) && __DEBUG_ALLOC__
#define _WX_WANT_DELETE_VOID_CONSTCHAR_SIZET
#endif
// Only VC++ 6 gets overloaded delete that matches new
#if (defined(__VISUALC__) && (__VISUALC__ >= 1200))
#define _WX_WANT_DELETE_VOID_WXCHAR_INT
#endif
// Now see who (if anyone) gets the array memory operators
#if wxUSE_ARRAY_MEMORY_OPERATORS
// Everyone except Visual C++ (cause problems for VC++ - crashes)
#if !defined(__VISUALC__)
#define _WX_WANT_ARRAY_NEW_SIZET_WXCHAR_INT
#endif
// Everyone except Visual C++ (cause problems for VC++ - crashes)
#if !defined(__VISUALC__)
#define _WX_WANT_ARRAY_DELETE_VOID
#endif
#endif // wxUSE_ARRAY_MEMORY_OPERATORS
#endif // wxUSE_MEMORY_TRACING
// ----------------------------------------------------------------------------
// Compatibility macro aliases DECLARE group
// ----------------------------------------------------------------------------
// deprecated variants _not_ requiring a semicolon after them and without wx prefix.
// (note that also some wx-prefixed macro do _not_ require a semicolon because
// it's not always possible to force the compire to require it)
#define DECLARE_CLASS_INFO_ITERATORS() wxDECLARE_CLASS_INFO_ITERATORS();
#define DECLARE_ABSTRACT_CLASS(n) wxDECLARE_ABSTRACT_CLASS(n);
#define DECLARE_DYNAMIC_CLASS_NO_ASSIGN(n) wxDECLARE_DYNAMIC_CLASS_NO_ASSIGN(n);
#define DECLARE_DYNAMIC_CLASS_NO_COPY(n) wxDECLARE_DYNAMIC_CLASS_NO_COPY(n);
#define DECLARE_DYNAMIC_CLASS(n) wxDECLARE_DYNAMIC_CLASS(n);
#define DECLARE_CLASS(n) wxDECLARE_CLASS(n);
#define DECLARE_PLUGGABLE_CLASS(n) wxDECLARE_PLUGGABLE_CLASS(n);
#define DECLARE_ABSTRACT_PLUGGABLE_CLASS(n) wxDECLARE_ABSTRACT_PLUGGABLE_CLASS(n);
#define DECLARE_USER_EXPORTED_PLUGGABLE_CLASS(n,u) wxDECLARE_USER_EXPORTED_PLUGGABLE_CLASS(n,u);
#define DECLARE_USER_EXPORTED_ABSTRACT_PLUGGABLE_CLASS(n,u) wxDECLARE_USER_EXPORTED_ABSTRACT_PLUGGABLE_CLASS(n,u);
// ----------------------------------------------------------------------------
// wxRefCounter: ref counted data "manager"
// ----------------------------------------------------------------------------
class WXDLLIMPEXP_BASE wxRefCounter
{
public:
wxRefCounter() { m_count = 1; }
int GetRefCount() const { return m_count; }
void IncRef() { m_count++; }
void DecRef();
protected:
// this object should never be destroyed directly but only as a
// result of a DecRef() call:
virtual ~wxRefCounter() { }
private:
// our refcount:
int m_count;
// It doesn't make sense to copy the reference counted objects, a new ref
// counter should be created for a new object instead and compilation
// errors in the code using wxRefCounter due to the lack of copy ctor often
// indicate a problem, e.g. a forgotten copy ctor implementation somewhere.
wxDECLARE_NO_COPY_CLASS(wxRefCounter);
};
// ----------------------------------------------------------------------------
// wxObjectRefData: ref counted data meant to be stored in wxObject
// ----------------------------------------------------------------------------
typedef wxRefCounter wxObjectRefData;
// ----------------------------------------------------------------------------
// wxObjectDataPtr: helper class to avoid memleaks because of missing calls
// to wxObjectRefData::DecRef
// ----------------------------------------------------------------------------
template <class T>
class wxObjectDataPtr
{
public:
typedef T element_type;
wxEXPLICIT wxObjectDataPtr(T *ptr = NULL) : m_ptr(ptr) {}
// copy ctor
wxObjectDataPtr(const wxObjectDataPtr<T> &tocopy)
: m_ptr(tocopy.m_ptr)
{
if (m_ptr)
m_ptr->IncRef();
}
~wxObjectDataPtr()
{
if (m_ptr)
m_ptr->DecRef();
}
T *get() const { return m_ptr; }
// test for pointer validity: defining conversion to unspecified_bool_type
// and not more obvious bool to avoid implicit conversions to integer types
typedef T *(wxObjectDataPtr<T>::*unspecified_bool_type)() const;
operator unspecified_bool_type() const
{
return m_ptr ? &wxObjectDataPtr<T>::get : NULL;
}
T& operator*() const
{
wxASSERT(m_ptr != NULL);
return *(m_ptr);
}
T *operator->() const
{
wxASSERT(m_ptr != NULL);
return get();
}
void reset(T *ptr)
{
if (m_ptr)
m_ptr->DecRef();
m_ptr = ptr;
}
wxObjectDataPtr& operator=(const wxObjectDataPtr &tocopy)
{
if (m_ptr)
m_ptr->DecRef();
m_ptr = tocopy.m_ptr;
if (m_ptr)
m_ptr->IncRef();
return *this;
}
wxObjectDataPtr& operator=(T *ptr)
{
if (m_ptr)
m_ptr->DecRef();
m_ptr = ptr;
return *this;
}
private:
T *m_ptr;
};
// ----------------------------------------------------------------------------
// wxObject: the root class of wxWidgets object hierarchy
// ----------------------------------------------------------------------------
class WXDLLIMPEXP_BASE wxObject
{
wxDECLARE_ABSTRACT_CLASS(wxObject);
public:
wxObject() { m_refData = NULL; }
virtual ~wxObject() { UnRef(); }
wxObject(const wxObject& other)
{
m_refData = other.m_refData;
if (m_refData)
m_refData->IncRef();
}
wxObject& operator=(const wxObject& other)
{
if ( this != &other )
{
Ref(other);
}
return *this;
}
bool IsKindOf(const wxClassInfo *info) const;
// Turn on the correct set of new and delete operators
#ifdef _WX_WANT_NEW_SIZET_WXCHAR_INT
void *operator new ( size_t size, const wxChar *fileName = NULL, int lineNum = 0 );
#endif
#ifdef _WX_WANT_DELETE_VOID
void operator delete ( void * buf );
#endif
#ifdef _WX_WANT_DELETE_VOID_CONSTCHAR_SIZET
void operator delete ( void *buf, const char *_fname, size_t _line );
#endif
#ifdef _WX_WANT_DELETE_VOID_WXCHAR_INT
void operator delete ( void *buf, const wxChar*, int );
#endif
#ifdef _WX_WANT_ARRAY_NEW_SIZET_WXCHAR_INT
void *operator new[] ( size_t size, const wxChar *fileName = NULL, int lineNum = 0 );
#endif
#ifdef _WX_WANT_ARRAY_DELETE_VOID
void operator delete[] ( void *buf );
#endif
#ifdef _WX_WANT_ARRAY_DELETE_VOID_WXCHAR_INT
void operator delete[] (void* buf, const wxChar*, int );
#endif
// ref counted data handling methods
// get/set
wxObjectRefData *GetRefData() const { return m_refData; }
void SetRefData(wxObjectRefData *data) { m_refData = data; }
// make a 'clone' of the object
void Ref(const wxObject& clone);
// destroy a reference
void UnRef();
// Make sure this object has only one reference
void UnShare() { AllocExclusive(); }
// check if this object references the same data as the other one
bool IsSameAs(const wxObject& o) const { return m_refData == o.m_refData; }
protected:
// ensure that our data is not shared with anybody else: if we have no
// data, it is created using CreateRefData() below, if we have shared data
// it is copied using CloneRefData(), otherwise nothing is done
void AllocExclusive();
// both methods must be implemented if AllocExclusive() is used, not pure
// virtual only because of the backwards compatibility reasons
// create a new m_refData
virtual wxObjectRefData *CreateRefData() const;
// create a new m_refData initialized with the given one
virtual wxObjectRefData *CloneRefData(const wxObjectRefData *data) const;
wxObjectRefData *m_refData;
};
inline wxObject *wxCheckDynamicCast(wxObject *obj, wxClassInfo *classInfo)
{
return obj && obj->GetClassInfo()->IsKindOf(classInfo) ? obj : NULL;
}
#include "wx/xti2.h"
// ----------------------------------------------------------------------------
// more debugging macros
// ----------------------------------------------------------------------------
#if wxUSE_DEBUG_NEW_ALWAYS
#define WXDEBUG_NEW new(__TFILE__,__LINE__)
#if wxUSE_GLOBAL_MEMORY_OPERATORS
#define new WXDEBUG_NEW
#elif defined(__VISUALC__)
// Including this file redefines new and allows leak reports to
// contain line numbers
#include "wx/msw/msvcrt.h"
#endif
#endif // wxUSE_DEBUG_NEW_ALWAYS
// ----------------------------------------------------------------------------
// Compatibility macro aliases IMPLEMENT group
// ----------------------------------------------------------------------------
// deprecated variants _not_ requiring a semicolon after them and without wx prefix.
// (note that also some wx-prefixed macro do _not_ require a semicolon because
// it's not always possible to force the compire to require it)
#define IMPLEMENT_DYNAMIC_CLASS(n,b) wxIMPLEMENT_DYNAMIC_CLASS(n,b)
#define IMPLEMENT_DYNAMIC_CLASS2(n,b1,b2) wxIMPLEMENT_DYNAMIC_CLASS2(n,b1,b2)
#define IMPLEMENT_ABSTRACT_CLASS(n,b) wxIMPLEMENT_ABSTRACT_CLASS(n,b)
#define IMPLEMENT_ABSTRACT_CLASS2(n,b1,b2) wxIMPLEMENT_ABSTRACT_CLASS2(n,b1,b2)
#define IMPLEMENT_CLASS(n,b) wxIMPLEMENT_CLASS(n,b)
#define IMPLEMENT_CLASS2(n,b1,b2) wxIMPLEMENT_CLASS2(n,b1,b2)
#define IMPLEMENT_PLUGGABLE_CLASS(n,b) wxIMPLEMENT_PLUGGABLE_CLASS(n,b)
#define IMPLEMENT_PLUGGABLE_CLASS2(n,b,b2) wxIMPLEMENT_PLUGGABLE_CLASS2(n,b,b2)
#define IMPLEMENT_ABSTRACT_PLUGGABLE_CLASS(n,b) wxIMPLEMENT_ABSTRACT_PLUGGABLE_CLASS(n,b)
#define IMPLEMENT_ABSTRACT_PLUGGABLE_CLASS2(n,b,b2) wxIMPLEMENT_ABSTRACT_PLUGGABLE_CLASS2(n,b,b2)
#define IMPLEMENT_USER_EXPORTED_PLUGGABLE_CLASS(n,b) wxIMPLEMENT_USER_EXPORTED_PLUGGABLE_CLASS(n,b)
#define IMPLEMENT_USER_EXPORTED_PLUGGABLE_CLASS2(n,b,b2) wxIMPLEMENT_USER_EXPORTED_PLUGGABLE_CLASS2(n,b,b2)
#define IMPLEMENT_USER_EXPORTED_ABSTRACT_PLUGGABLE_CLASS(n,b) wxIMPLEMENT_USER_EXPORTED_ABSTRACT_PLUGGABLE_CLASS(n,b)
#define IMPLEMENT_USER_EXPORTED_ABSTRACT_PLUGGABLE_CLASS2(n,b,b2) wxIMPLEMENT_USER_EXPORTED_ABSTRACT_PLUGGABLE_CLASS2(n,b,b2)
#define CLASSINFO(n) wxCLASSINFO(n)
#endif // _WX_OBJECTH__
| {
"content_hash": "234c01a5445698a424b5590266854c1b",
"timestamp": "",
"source": "github",
"line_count": 498,
"max_line_length": 127,
"avg_line_length": 39.91566265060241,
"alnum_prop": 0.5581044370661032,
"repo_name": "clinthidinger/ofxWxWidgets",
"id": "cb9d8f8c98ff700ce587223aae669ae5fb4ebc5c",
"size": "19878",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "libs/wxWidgets/include/wx/object.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "2033008"
},
{
"name": "C++",
"bytes": "7596589"
},
{
"name": "Objective-C",
"bytes": "170354"
}
],
"symlink_target": ""
} |
<?php
namespace Conpago\Migrations\Contract;
/**
* Provides adapter of application presentation layer for migration process.
*/
interface IMigrateCommandPresenter
{
/**
* Report start of migration process.
*
* @param integer $count Number of migrations to run in current course.
*
* @return void
*/
public function migrationStarted(int $count): void;
/**
* Report finish of migration process.
*
* @return void
*/
public function migrationEnded(): void;
/**
* Report run of migration.
*
* @param integer $number Sequence number of migrations in current course.
* @param integer $count Number of migrations to run in current course.
*
* @return void
*/
public function runningMigration(int $number, int $count): void;
}
| {
"content_hash": "1436c0aafc74f4562d644fb927e1f024",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 78,
"avg_line_length": 22.594594594594593,
"alnum_prop": 0.6423444976076556,
"repo_name": "conpago/conpago-migrations-contract",
"id": "45ea18d8e60de74d0f359822e3b74f033bf28709",
"size": "1084",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Conpago/Migrations/Contract/IMigrateCommandPresenter.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PHP",
"bytes": "1598"
}
],
"symlink_target": ""
} |
namespace ConApp.Model
{
public class MathDemo
{
public int Value;
public int GetSquare()
{
return Value * Value;
}
public static int GetSquareOf(int x)
{
return x * x;
}
public static double GetPi()
{
return 3.14159;
}
}
} | {
"content_hash": "9aeb5727a9eae48238a8e91432f621d6",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 44,
"avg_line_length": 16.136363636363637,
"alnum_prop": 0.4507042253521127,
"repo_name": "ifairy/CSharpExample",
"id": "3f587b814a00bc48d02a068254caa88beff78118",
"size": "357",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "CSharpExample/CSharpBasic/ConApp/Model/MathDemo.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP",
"bytes": "26770"
},
{
"name": "C",
"bytes": "15994"
},
{
"name": "C#",
"bytes": "1826295"
},
{
"name": "C++",
"bytes": "11256"
},
{
"name": "CSS",
"bytes": "3736808"
},
{
"name": "HTML",
"bytes": "2955643"
},
{
"name": "JavaScript",
"bytes": "5001680"
},
{
"name": "PHP",
"bytes": "1056"
},
{
"name": "PowerShell",
"bytes": "17832"
},
{
"name": "Python",
"bytes": "115"
}
],
"symlink_target": ""
} |
Subsets and Splits