text
stringlengths 2
1.04M
| meta
dict |
---|---|
<?php
/**
* @namespace
*/
namespace ZendTest\Feed\PubSubHubbub;
/**
* Note that $this->_baseuri must point to a directory on a web server
* containing all the files under the _files directory. You should symlink
* or copy these files and set '_baseuri' properly using the constant in
* TestConfiguration.php (based on TestConfiguration.php.dist)
*
* You can also set the proper constant in your test configuration file to
* point to the right place.
*
* @category Zend
* @package Zend_Feed
* @subpackage UnitTests
* @group Zend_Feed
* @group Zend_Feed_Subsubhubbub
* @copyright Copyright (c) 2005-2010 Zend Technologies USA Inc. (http://www.zend.com)
* @license http://framework.zend.com/license/new-bsd New BSD License
*/
class SubscriberHttpTest extends \PHPUnit_Framework_TestCase
{
protected $_subscriber = null;
protected $_baseuri;
protected $_client = null;
protected $_adapter = null;
protected $_config = array(
'adapter' => 'Zend_Http_Client_Adapter_Socket'
);
public function setUp()
{
if (defined('TESTS_Zend_Feed_PubSubHubbub_BASEURI') &&
\Zend\Uri\Url::check(TESTS_Zend_Feed_PubSubHubbub_BASEURI)) {
$this->_baseuri = TESTS_Zend_Feed_PubSubHubbub_BASEURI;
if (substr($this->_baseuri, -1) != '/') $this->_baseuri .= '/';
$name = $this->getName();
if (($pos = strpos($name, ' ')) !== false) {
$name = substr($name, 0, $pos);
}
$uri = $this->_baseuri . $name . '.php';
$this->_adapter = new $this->_config['adapter'];
$this->_client = new \Zend\Http\Client($uri, $this->_config);
$this->_client->setAdapter($this->_adapter);
\Zend\Feed\PubSubHubbub\PubSubHubbub::setHttpClient($this->_client);
$this->_subscriber = new \Zend\Feed\PubSubHubbub\Subscriber\Subscriber;
$this->_storage = $this->_getCleanMock('Zend_Feed_PubSubHubbub_Entity_TopicSubscription');
$this->_subscriber->setStorage($this->_storage);
} else {
// Skip tests
$this->markTestSkipped("Zend_Feed_PubSubHubbub_Subscriber dynamic tests'
. ' are not enabled in TestConfiguration.php");
}
}
public function testSubscriptionRequestSendsExpectedPostData()
{
$this->_subscriber->setTopicUrl('http://www.example.com/topic');
$this->_subscriber->addHubUrl($this->_baseuri . '/testRawPostData.php');
$this->_subscriber->setCallbackUrl('http://www.example.com/callback');
$this->_subscriber->setTestStaticToken('abc'); // override for testing
$this->_subscriber->subscribeAll();
$this->assertEquals(
'hub.callback=http%3A%2F%2Fwww.example.com%2Fcallback%3Fxhub.subscription%3D5536df06b5d'
.'cb966edab3a4c4d56213c16a8184b&hub.lease_seconds=2592000&hub.mode='
.'subscribe&hub.topic=http%3A%2F%2Fwww.example.com%2Ftopic&hub.veri'
.'fy=sync&hub.verify=async&hub.verify_token=abc',
$this->_client->getLastResponse()->getBody());
}
public function testUnsubscriptionRequestSendsExpectedPostData()
{
$this->_subscriber->setTopicUrl('http://www.example.com/topic');
$this->_subscriber->addHubUrl($this->_baseuri . '/testRawPostData.php');
$this->_subscriber->setCallbackUrl('http://www.example.com/callback');
$this->_subscriber->setTestStaticToken('abc'); //override for testing
$this->_subscriber->unsubscribeAll();
$this->assertEquals(
'hub.callback=http%3A%2F%2Fwww.example.com%2Fcallback%3Fxhub.subscription%3D5536df06b5d'
.'cb966edab3a4c4d56213c16a8184b&hub.mode=unsubscribe&hub.topic=http'
.'%3A%2F%2Fwww.example.com%2Ftopic&hub.verify=sync&hub.verify=async'
.'&hub.verify_token=abc',
$this->_client->getLastResponse()->getBody());
}
protected function _getCleanMock($className) {
$class = new \ReflectionClass($className);
$methods = $class->getMethods();
$stubMethods = array();
foreach ($methods as $method) {
if ($method->isPublic() || ($method->isProtected()
&& $method->isAbstract())) {
$stubMethods[] = $method->getName();
}
}
$mocked = $this->getMock(
$className,
$stubMethods,
array(),
$className . '_SubscriberHttpTestMock_' . uniqid(),
false
);
return $mocked;
}
}
| {
"content_hash": "694a6aeab01998d9fa6a251ceff0b55c",
"timestamp": "",
"source": "github",
"line_count": 119,
"max_line_length": 102,
"avg_line_length": 39.016806722689076,
"alnum_prop": 0.606289037260392,
"repo_name": "heiglandreas/zf2",
"id": "a792c2f642771c1a34a8a62d315ff776555cef3d",
"size": "5333",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tests/Zend/Feed/PubSubHubbub/SubscriberHttpTest.php",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "40638"
},
{
"name": "JavaScript",
"bytes": "30072"
},
{
"name": "PHP",
"bytes": "26561813"
},
{
"name": "Puppet",
"bytes": "2646"
},
{
"name": "Ruby",
"bytes": "10"
},
{
"name": "Shell",
"bytes": "6527"
},
{
"name": "TypeScript",
"bytes": "3445"
}
],
"symlink_target": ""
} |
package com.parse.ui;
import android.app.Activity;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.TextView;
import com.parse.ParseException;
import com.parse.ParseUser;
import com.parse.RequestPasswordResetCallback;
/**
* Fragment for the login help screen for resetting the user's password.
*/
public class ParseLoginHelpFragment extends ParseLoginFragmentBase implements OnClickListener {
public interface ParseOnLoginHelpSuccessListener {
public void onLoginHelpSuccess();
}
private TextView instructionsTextView;
private EditText emailField;
private Button submitButton;
private boolean emailSent = false;
private ParseOnLoginHelpSuccessListener onLoginHelpSuccessListener;
private ParseLoginConfig config;
private static final String LOG_TAG = "ParseLoginHelpFragment";
public static ParseLoginHelpFragment newInstance(Bundle configOptions) {
ParseLoginHelpFragment loginHelpFragment = new ParseLoginHelpFragment();
loginHelpFragment.setArguments(configOptions);
return loginHelpFragment;
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup parent,
Bundle savedInstanceState) {
config = ParseLoginConfig.fromBundle(getArguments(), getActivity());
View v = inflater.inflate(R.layout.com_parse_ui_parse_login_help_fragment,
parent, false);
ImageView appLogo = (ImageView) v.findViewById(R.id.app_logo);
instructionsTextView = (TextView) v
.findViewById(R.id.login_help_instructions);
emailField = (EditText) v.findViewById(R.id.login_help_email_input);
submitButton = (Button) v.findViewById(R.id.login_help_submit);
if (appLogo != null && config.getAppLogo() != null) {
appLogo.setImageResource(config.getAppLogo());
}
submitButton.setOnClickListener(this);
return v;
}
@Override
public void onAttach(Activity activity) {
super.onAttach(activity);
if (activity instanceof ParseOnLoadingListener) {
onLoadingListener = (ParseOnLoadingListener) activity;
} else {
throw new IllegalArgumentException(
"Activity must implemement ParseOnLoadingListener");
}
if (activity instanceof ParseOnLoginHelpSuccessListener) {
onLoginHelpSuccessListener = (ParseOnLoginHelpSuccessListener) activity;
} else {
throw new IllegalArgumentException(
"Activity must implemement ParseOnLoginHelpSuccessListener");
}
}
@Override
public void onClick(View v) {
if (!emailSent) {
String email = emailField.getText().toString();
if (email.length() == 0) {
showToast(R.string.com_parse_ui_no_email_toast);
} else {
loadingStart();
ParseUser.requestPasswordResetInBackground(email,
new RequestPasswordResetCallback() {
@Override
public void done(ParseException e) {
if (isActivityDestroyed()) {
return;
}
loadingFinish();
if (e == null) {
instructionsTextView
.setText(R.string.com_parse_ui_login_help_email_sent);
emailField.setVisibility(View.INVISIBLE);
submitButton
.setText(R.string.com_parse_ui_login_help_login_again_button_label);
emailSent = true;
} else {
debugLog(getString(R.string.com_parse_ui_login_warning_password_reset_failed) +
e.toString());
if (e.getCode() == ParseException.INVALID_EMAIL_ADDRESS ||
e.getCode() == ParseException.EMAIL_NOT_FOUND) {
showToast(R.string.com_parse_ui_invalid_email_toast);
} else {
showToast(R.string.com_parse_ui_login_help_submit_failed_unknown);
}
}
}
});
}
} else {
onLoginHelpSuccessListener.onLoginHelpSuccess();
}
}
@Override
protected String getLogTag() {
return LOG_TAG;
}
}
| {
"content_hash": "e74fa8ed421aae02904405d9841c75a0",
"timestamp": "",
"source": "github",
"line_count": 131,
"max_line_length": 97,
"avg_line_length": 33.030534351145036,
"alnum_prop": 0.6609660272706263,
"repo_name": "samwong1990/Peid",
"id": "42186c3c5be852c61b00bfc1abfd512e3f7d5ffe",
"size": "5435",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "Peid/ParseLoginUI/src/com/parse/ui/ParseLoginHelpFragment.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "217515"
},
{
"name": "Groovy",
"bytes": "4842"
},
{
"name": "Java",
"bytes": "1765059"
},
{
"name": "JavaScript",
"bytes": "136990"
}
],
"symlink_target": ""
} |
ACCEPTED
#### According to
Index Fungorum
#### Published in
Fungi hungarici no. 206 (1883)
#### Original name
Entyloma winteri Linh.
### Remarks
null | {
"content_hash": "c26d92562c597eb844ecc50c05cb60b0",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 30,
"avg_line_length": 11.76923076923077,
"alnum_prop": 0.7058823529411765,
"repo_name": "mdoering/backbone",
"id": "030292a64ae96f64a5a03a76fb68a4d8450bd251",
"size": "199",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Fungi/Basidiomycota/Exobasidiomycetes/Entylomatales/Entylomataceae/Entyloma/Entyloma winteri/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
<?php
/**
* This file is automatically @generated by {@link BuildMetadataPHPFromXml}.
* Please don't modify it directly.
*/
return array (
'generalDesc' =>
array (
'NationalNumberPattern' => '[01]\\d{1,2}',
'PossibleNumberPattern' => '\\d{2,3}',
),
'fixedLine' =>
array (
'NationalNumberPattern' => '[01]\\d{1,2}',
'PossibleNumberPattern' => '\\d{2,3}',
),
'mobile' =>
array (
'NationalNumberPattern' => '[01]\\d{1,2}',
'PossibleNumberPattern' => '\\d{2,3}',
),
'tollFree' =>
array (
'NationalNumberPattern' => 'NA',
'PossibleNumberPattern' => 'NA',
),
'premiumRate' =>
array (
'NationalNumberPattern' => 'NA',
'PossibleNumberPattern' => 'NA',
),
'sharedCost' =>
array (
'NationalNumberPattern' => 'NA',
'PossibleNumberPattern' => 'NA',
),
'personalNumber' =>
array (
'NationalNumberPattern' => 'NA',
'PossibleNumberPattern' => 'NA',
),
'voip' =>
array (
'NationalNumberPattern' => 'NA',
'PossibleNumberPattern' => 'NA',
),
'pager' =>
array (
'NationalNumberPattern' => 'NA',
'PossibleNumberPattern' => 'NA',
),
'uan' =>
array (
'NationalNumberPattern' => 'NA',
'PossibleNumberPattern' => 'NA',
),
'emergency' =>
array (
'NationalNumberPattern' => '
0[123]|
112
',
'PossibleNumberPattern' => '\\d{2,3}',
'ExampleNumber' => '112',
),
'voicemail' =>
array (
'NationalNumberPattern' => 'NA',
'PossibleNumberPattern' => 'NA',
),
'shortCode' =>
array (
'NationalNumberPattern' => '
0[123]|
112
',
'PossibleNumberPattern' => '\\d{2,3}',
'ExampleNumber' => '112',
),
'standardRate' =>
array (
'NationalNumberPattern' => 'NA',
'PossibleNumberPattern' => 'NA',
),
'carrierSpecific' =>
array (
'NationalNumberPattern' => 'NA',
'PossibleNumberPattern' => 'NA',
),
'noInternationalDialling' =>
array (
'NationalNumberPattern' => 'NA',
'PossibleNumberPattern' => 'NA',
),
'id' => 'RU',
'countryCode' => 0,
'internationalPrefix' => '',
'sameMobileAndFixedLinePattern' => true,
'numberFormat' =>
array (
),
'intlNumberFormat' =>
array (
),
'mainCountryForCode' => false,
'leadingZeroPossible' => false,
'mobileNumberPortableRegion' => false,
);
| {
"content_hash": "be619886bd412aa6af4c2e752a36d0b5",
"timestamp": "",
"source": "github",
"line_count": 110,
"max_line_length": 76,
"avg_line_length": 21.51818181818182,
"alnum_prop": 0.5555555555555556,
"repo_name": "Xephi/libphonenumber-for-php",
"id": "d60af664044a943f0a08620343ab90a78015f93b",
"size": "2367",
"binary": false,
"copies": "18",
"ref": "refs/heads/master",
"path": "src/libphonenumber/data/ShortNumberMetadata_RU.php",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "PHP",
"bytes": "16430130"
}
],
"symlink_target": ""
} |
A javascript halfedge datastructure for your 3d models.
# Usage with Three.js
```html
...
<script type="text/javascript" src="Halfedge.js"></script>
<script>
...
var geometry;
var heData;
geometry = new THREE.BoxGeometry( 1, 1, 1 );
heData = new HE_Datastructure( geometry.vertices, geometry.faces );
heData.build();
var faceOfInterestID = 0;
var result = heData.findAdjacentFacesToFace( faceOfInterestID );
for( var i = 0; i < result.length; i++ )
{
console.log("FaceID(" + i + "): " + result[i]);
}
...
</script>
```
| {
"content_hash": "cebd128a002c99ac6162d435ba1578bc",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 69,
"avg_line_length": 18.451612903225808,
"alnum_prop": 0.6223776223776224,
"repo_name": "Subimago/Halfedge.js",
"id": "acd7c66895788b7c42c9a0ea433f69d569d3716b",
"size": "586",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "README.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "7460"
}
],
"symlink_target": ""
} |
import json
import os
import shutil
from datetime import date, timedelta
import mock
from django.conf import settings
from django.core import management
from olympia import amo
from olympia.amo.tests import TestCase
from olympia.addons.models import Addon, Persona
from olympia.stats.management.commands import (
save_stats_to_file, serialize_stats)
from olympia.stats.management.commands.download_counts_from_file import is_valid_source # noqa
from olympia.stats.management.commands.update_counts_from_file import Command
from olympia.stats.models import (
DownloadCount, ThemeUpdateCount, UpdateCount, ThemeUserCount)
from olympia.zadmin.models import DownloadSource
hive_folder = os.path.join(settings.ROOT, 'src/olympia/stats/fixtures/files')
class FixturesFolderMixin(object):
# You have to define these two values in your subclasses.
date = 'YYYY-MM-DD'
source_folder = 'dummy'
def clean_up_files(self):
dirpath = os.path.join(hive_folder, self.date)
if os.path.isdir(dirpath):
for name in os.listdir(dirpath):
os.unlink(os.path.join(dirpath, name))
os.rmdir(dirpath)
def setUp(self):
super(FixturesFolderMixin, self).setUp()
self.clean_up_files()
shutil.copytree(os.path.join(hive_folder, self.source_folder),
os.path.join(hive_folder, self.date))
def tearDown(self):
self.clean_up_files()
super(FixturesFolderMixin, self).tearDown()
class TestADICommand(FixturesFolderMixin, TestCase):
fixtures = ('base/addon_3615', 'base/featured', 'addons/persona',
'base/appversion.json')
date = '2014-07-10'
source_folder = 'src'
def setUp(self):
super(TestADICommand, self).setUp()
self.command = Command()
@mock.patch(
'olympia.stats.management.commands.update_counts_from_file.'
'save_stats_to_file')
def test_update_counts_from_file(self, mock_save_stats_to_file):
management.call_command('update_counts_from_file', hive_folder,
date=self.date)
assert UpdateCount.objects.all().count() == 1
update_count = UpdateCount.objects.last()
assert update_count.count == 5
assert update_count.date == date(2014, 7, 10)
assert update_count.versions == {u'3.8': 2, u'3.7': 3}
assert update_count.statuses == {u'userEnabled': 5}
application = u'{ec8030f7-c20a-464f-9b0e-13a3a9e97384}'
assert update_count.applications[application] == {u'3.6': 18}
assert update_count.oses == {u'WINNT': 5}
assert update_count.locales == {u'en-us': 1, u'en-US': 4}
# save_stats_to_file is called with a non-saved model.
update_count.id = None
mock_save_stats_to_file.assert_called_once_with(update_count)
def test_update_version(self):
# Initialize the known addons and their versions.
self.command.addons_versions = {3615: ['3.5', '3.6']}
uc = UpdateCount(addon_id=3615)
self.command.update_version(uc, '3.6', 123)
assert uc.versions == {'3.6': 123}
# Test very long version:
self.command.update_version(uc, '1' * 33, 1)
assert uc.versions == {'3.6': 123, '1' * 32: 1} # Trimmed.
def test_update_status(self):
uc = UpdateCount(addon_id=3615)
self.command.update_status(uc, 'foobar', 123) # Non-existent status.
assert not uc.statuses
self.command.update_status(uc, 'userEnabled', 123)
assert uc.statuses == {'userEnabled': 123}
def test_update_app(self):
firefox_guid = '{ec8030f7-c20a-464f-9b0e-13a3a9e97384}'
uc = UpdateCount(addon_id=3615)
self.command.update_app(uc, 'foobar', '1.0', 123) # Non-existent app.
assert not uc.applications
# Malformed versions.
self.command.update_app(uc, firefox_guid, '3.0.1.2', 123)
self.command.update_app(uc, firefox_guid, '3.0123', 123)
self.command.update_app(uc, firefox_guid, '3.0c2', 123)
self.command.update_app(uc, firefox_guid, 'a.b.c', 123)
assert not uc.applications
# Well formed versions.
self.command.update_app(uc, firefox_guid, '1.0', 123)
self.command.update_app(uc, firefox_guid, '1.0.1', 124)
self.command.update_app(uc, firefox_guid, '1.0a1', 125)
self.command.update_app(uc, firefox_guid, '1.0b2', 126)
assert uc.applications == {firefox_guid: {
'1.0': 123,
'1.0.1': 124,
'1.0a1': 125,
'1.0b2': 126}}
def test_update_os(self):
uc = UpdateCount(addon_id=3615)
self.command.update_os(uc, 'foobar', 123) # Non-existent OS.
assert not uc.oses
self.command.update_os(uc, 'WINNT', 123)
assert uc.oses == {'WINNT': 123}
def test_update_locale(self):
current_locales = [ # Taken from the language pack index.
'ach', 'af', 'ak', 'an', 'ar', 'as', 'ast', 'ast-ES', 'az',
'bb-BK', 'be', 'bg', 'bn-BD', 'bn-IN', 'br', 'bs', 'ca',
'ca-valencia', 'cs', 'csb', 'cy', 'cy-GB', 'da', 'de', 'dsb', 'el',
'en-GB', 'en-ZA', 'eo', 'es-AR', 'es-CL', 'es-ES', 'es-MX', 'et',
'eu', 'fa', 'ff', 'fi', 'fj-FJ', 'fr', 'fur-IT', 'fy-NL', 'ga-IE',
'gd', 'gl', 'gu-IN', 'he', 'hi', 'hi-IN', 'hr', 'hsb', 'hu',
'hy-AM', 'id', 'is', 'it', 'ja', 'kk', 'km', 'kn', 'ko', 'ku',
'lg', 'lij', 'lt', 'lv', 'mai', 'mg', 'mk', 'ml', 'mr', 'ms',
'nb-NO', 'nl', 'nn-NO', 'nr', 'nso', 'or', 'pa-IN', 'pl', 'pt-BR',
'pt-PT', 'rm', 'ro', 'ru', 'si', 'sk', 'sl', 'son', 'sq', 'sr',
'ss', 'st', 'sv-SE', 'sw', 'sw-TZ', 'ta', 'ta-IN', 'ta-LK', 'te',
'th', 'tn', 'tr', 'ts', 'uk', 've', 'vi', 'wa', 'wo-SN', 'xh',
'zap-MX-diiste', 'zh-CN', 'zh-TW', 'zu']
uc = UpdateCount(addon_id=3615)
self.command.update_locale(uc, 'foobar', 123) # Non-existent locale.
assert not uc.locales
for locale in current_locales:
self.command.update_locale(uc, locale, 1)
assert len(uc.locales) == len(current_locales)
def test_trim_field(self):
uc = UpdateCount(addon_id=3615, count=1, date='2015-01-11')
self.command.trim_field(uc.versions) # Empty field.
assert not uc.versions
uc.versions = {'3.6': 123, '3.7': 321}
self.command.trim_field(uc.versions) # Small enough to fit in the db.
assert uc.versions == {'3.6': 123, '3.7': 321} # Unchanged.
very_long_key = 'x' * (2 ** 16)
uc.versions[very_long_key] = 1
self.command.trim_field(uc.versions) # Too big, must be trimmed.
assert uc.versions == {'3.6': 123, '3.7': 321} # Keep the most used.
uc.versions[very_long_key] = 1000 # Most used.
self.command.trim_field(uc.versions) # Too big, must be trimmed.
# Nothing left: least used removed, but still too big, so all the keys
# were removed.
assert uc.versions == {}
# Make sure we can store a very large field in the database.
long_key = 'x' * 65528 # This makes the dict barely fit in the db.
uc.versions[long_key] = 1
assert len(json.dumps(uc.versions)) == (2 ** 16) - 1
uc.save()
uc = UpdateCount.objects.get(pk=uc.pk) # Reload
# Fits in the database, so no truncation.
assert len(json.dumps(uc.versions)) == (2 ** 16) - 1
@mock.patch(
'olympia.stats.management.commands.download_counts_from_file.'
'save_stats_to_file')
def test_download_counts_from_file(self, mock_save_stats_to_file):
# Create the necessary "valid download sources" entries.
DownloadSource.objects.create(name='search', type='full')
DownloadSource.objects.create(name='coll', type='prefix')
management.call_command('download_counts_from_file', hive_folder,
date=self.date)
assert DownloadCount.objects.all().count() == 1
download_count = DownloadCount.objects.last()
assert download_count.count == 2
assert download_count.date == date(2014, 7, 10)
assert download_count.sources == {u'search': 1, u'collection': 1}
# save_stats_to_file is called with a non-saved model.
download_count.id = None
mock_save_stats_to_file.assert_called_once_with(download_count)
@mock.patch('olympia.stats.management.commands.save_stats_to_file')
def test_theme_update_counts_from_file(self, mock_save_stats_to_file):
management.call_command('theme_update_counts_from_file', hive_folder,
date=self.date)
assert ThemeUpdateCount.objects.all().count() == 2
tuc1 = ThemeUpdateCount.objects.get(addon_id=3615)
assert tuc1.count == 2
# Persona 813 has addon id 15663: we need the count to be the sum of
# the "old" request on the persona_id 813 (only the one with the source
# "gp") and the "new" request on the addon_id 15663.
tuc2 = ThemeUpdateCount.objects.get(addon_id=15663)
assert tuc2.count == 15
assert mock_save_stats_to_file.call_count == 2
# save_stats_to_file is called with a non-saved model.
tuc1.id = None
tuc2.id = None
mock_save_stats_to_file.assert_has_calls(
[mock.call(tuc1), mock.call(tuc2)])
def test_update_theme_popularity_movers(self):
# Create ThemeUpdateCount entries for the persona 559 with addon_id
# 15663 and the persona 575 with addon_id 15679 for the last 28 days.
# We start from the previous day, as the theme_update_counts_from_*
# scripts are gathering data for the day before.
today = date.today()
yesterday = today - timedelta(days=1)
for i in range(28):
d = yesterday - timedelta(days=i)
ThemeUpdateCount.objects.create(addon_id=15663, count=i, date=d)
ThemeUpdateCount.objects.create(addon_id=15679,
count=i * 100, date=d)
# Compute the popularity and movers.
management.call_command('update_theme_popularity_movers')
p1 = Persona.objects.get(pk=559)
p2 = Persona.objects.get(pk=575)
# The popularity is the average over the last 7 days, and as we created
# entries with one more user per day in the past (or 100 more), the
# calculation is "sum(range(7)) / 7" (or "sum(range(7)) * 100 / 7").
assert p1.popularity == 3 # sum(range(7)) / 7
assert p2.popularity == 300 # sum(range(7)) * 100 / 7
# A ThemeUserCount row should have been created for each Persona with
# today's date and the Persona popularity.
t1 = ThemeUserCount.objects.get(addon_id=15663)
t2 = ThemeUserCount.objects.get(addon_id=15679)
assert t1.date == today
assert t1.count == p1.popularity
assert t2.date == today
assert t2.count == p2.popularity
# Three weeks avg (sum(range(21)) / 21) = 10 so (3 - 10) / 10.
# The movers is computed with the following formula:
# previous_3_weeks: the average over the 21 days before the last 7 days
# movers: (popularity - previous_3_weeks) / previous_3_weeks
# The calculation for the previous_3_weeks is:
# previous_3_weeks: (sum(range(28) - sum(range(7))) * 100 / 21 == 1700.
assert p1.movers == 0.0 # Because the popularity is <= 100.
# We round the results to cope with floating point imprecision.
assert round(p2.movers, 5) == round((300.0 - 1700) / 1700, 5)
def test_is_valid_source(self):
assert is_valid_source('foo',
fulls=['foo', 'bar'],
prefixes=['baz', 'cruux'])
assert not is_valid_source('foob',
fulls=['foo', 'bar'],
prefixes=['baz', 'cruux'])
assert is_valid_source('foobaz',
fulls=['foo', 'bar'],
prefixes=['baz', 'cruux'])
assert not is_valid_source('ba',
fulls=['foo', 'bar'],
prefixes=['baz', 'cruux'])
class TestThemeADICommand(FixturesFolderMixin, TestCase):
date = '2014-11-06'
fixtures = ['base/appversion.json']
source_folder = '1093699'
@mock.patch(
'olympia.stats.management.commands.update_counts_from_file.'
'save_stats_to_file')
def test_update_counts_from_file_bug_1093699(self,
mock_save_stats_to_file):
Addon.objects.create(guid='{fe9e9f88-42f0-40dc-970b-4b0e6b7a3d0b}',
type=amo.ADDON_THEME)
management.call_command('update_counts_from_file', hive_folder,
date=self.date)
assert UpdateCount.objects.all().count() == 1
uc = UpdateCount.objects.last()
assert uc.count == 1320
assert uc.date == date(2014, 11, 06)
assert (uc.versions ==
{u'1.7.16': 1, u'userEnabled': 3, u'1.7.13': 2, u'1.7.11': 3,
u'1.6.0': 1, u'1.7.14': 1304, u'1.7.6': 6})
assert (uc.statuses ==
{u'Unknown': 3, u'userEnabled': 1259, u'userDisabled': 58})
assert uc.oses == {u'WINNT': 1122, u'Darwin': 114, u'Linux': 84}
assert uc.locales[u'es-ES'] == 20
assert (uc.applications[u'{92650c4d-4b8e-4d2a-b7eb-24ecf4f6b63a}'] ==
{u'2.0': 3})
uc.id = None # save_stats_to_file is called with a non-saved model.
mock_save_stats_to_file.assert_called_once_with(uc)
def test_stats_from_model_theme_update_count():
result = serialize_stats(
ThemeUpdateCount(addon_id=321, date='2016-01-18', count=123))
assert json.loads(result) == {
'date': '2016-01-18',
'addon': 321,
'count': 123}
def test_stats_from_model_update_count():
result = serialize_stats(
UpdateCount(
addon_id=321, date='2016-01-18',
count=123,
versions={u'3.8': 2, u'3.7': 3},
statuses={u'userEnabled': 5},
applications={u'{ec8030f7-c20a-464f-9b0e-13a3a9e97384}':
{u'3.6': 18}},
oses={u'WINNT': 5},
locales={u'en-us': 1, u'en-US': 4}))
assert json.loads(result) == {
'date': '2016-01-18',
'addon': 321,
'count': 123,
'versions': {'3.7': 3, '3.8': 2},
'oses': {'WINNT': 5},
'applications': {
'{ec8030f7-c20a-464f-9b0e-13a3a9e97384}': {'3.6': 18}},
'locales': {'en-US': 4, 'en-us': 1},
'statuses': {'userEnabled': 5}}
def test_stats_from_model_download_count():
result = serialize_stats(
DownloadCount(
addon_id=321, date='2016-01-18', count=123,
sources={u'search': 1, u'collection': 1}))
assert json.loads(result) == {
'date': '2016-01-18',
'addon': 321,
'count': 123,
'sources': {'search': 1, 'collection': 1}}
@mock.patch('olympia.stats.management.commands.storage.save')
@mock.patch('olympia.stats.management.commands.ContentFile')
def test_save_stats_to_file(mock_ContentFile, mock_storage):
mock_ContentFile.return_value = mock.sentinel.content
theme_update_count = ThemeUpdateCount(
addon_id=321, date='2016-01-18', count=123)
save_stats_to_file(theme_update_count)
mock_storage.assert_called_once_with(
'321/2016/01/2016_01_18_themeupdatecount.json', mock.sentinel.content)
| {
"content_hash": "467adb1116ae6bb7d08c6c9d2d2b27ca",
"timestamp": "",
"source": "github",
"line_count": 358,
"max_line_length": 95,
"avg_line_length": 44.13966480446928,
"alnum_prop": 0.5801797240855587,
"repo_name": "jpetto/olympia",
"id": "cddc3e435062bc2c9ebad09ba2b998fde8b69f57",
"size": "15802",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/olympia/stats/tests/test_commands.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ApacheConf",
"bytes": "249"
},
{
"name": "CSS",
"bytes": "665496"
},
{
"name": "HTML",
"bytes": "1606994"
},
{
"name": "JavaScript",
"bytes": "1315514"
},
{
"name": "Makefile",
"bytes": "4235"
},
{
"name": "PLSQL",
"bytes": "74"
},
{
"name": "Python",
"bytes": "4026490"
},
{
"name": "Shell",
"bytes": "9145"
},
{
"name": "Smarty",
"bytes": "1930"
}
],
"symlink_target": ""
} |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package pl.almatron.subfun;
import java.sql.SQLException;
import org.testng.annotations.Test;
/**
*
* @author macvek
*/
public class UseDataSourceTest extends FunTestBase {
@Test
public void testSomeMethod() throws SQLException {
UseDataSource dataSource = context.getBean("useDataSource", UseDataSource.class);
dataSource.action();
}
}
| {
"content_hash": "77d05becac25cb32327d1baedf40ef56",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 89,
"avg_line_length": 24.565217391304348,
"alnum_prop": 0.7132743362831858,
"repo_name": "macvek/springfun",
"id": "48e662fb376fc8a20d6b96922dccaa4d886a678f",
"size": "565",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "subfun/src/test/java/pl/almatron/subfun/UseDataSourceTest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "26799"
}
],
"symlink_target": ""
} |
from __future__ import print_function
import json
import unittest
import bleach
import doctest
import mock
import multiprocessing
import os
import re
import signal
import sqlalchemy
import subprocess
import tempfile
import warnings
from datetime import timedelta
from dateutil.relativedelta import relativedelta
from email.mime.application import MIMEApplication
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from freezegun import freeze_time
from numpy.testing import assert_array_almost_equal
from six.moves.urllib.parse import urlencode
from time import sleep
from airflow import configuration
from airflow.executors import SequentialExecutor
from airflow.models import Variable
configuration.conf.load_test_config()
from airflow import jobs, models, DAG, utils, macros, settings, exceptions
from airflow.models import BaseOperator
from airflow.operators.bash_operator import BashOperator
from airflow.operators.check_operator import CheckOperator, ValueCheckOperator
from airflow.operators.dagrun_operator import TriggerDagRunOperator
from airflow.operators.python_operator import PythonOperator
from airflow.operators.dummy_operator import DummyOperator
from airflow.hooks.base_hook import BaseHook
from airflow.hooks.sqlite_hook import SqliteHook
from airflow.bin import cli
from airflow.www import app as application
from airflow.settings import Session
from airflow.utils import timezone
from airflow.utils.timezone import datetime
from airflow.utils.state import State
from airflow.utils.dates import infer_time_unit, round_time, scale_time_units
from lxml import html
from airflow.exceptions import AirflowException
from airflow.configuration import AirflowConfigException, run_command
from jinja2.sandbox import SecurityError
from jinja2 import UndefinedError
import six
NUM_EXAMPLE_DAGS = 20
DEV_NULL = '/dev/null'
TEST_DAG_FOLDER = os.path.join(
os.path.dirname(os.path.realpath(__file__)), 'dags')
DEFAULT_DATE = datetime(2015, 1, 1)
DEFAULT_DATE_ISO = DEFAULT_DATE.isoformat()
DEFAULT_DATE_DS = DEFAULT_DATE_ISO[:10]
TEST_DAG_ID = 'unit_tests'
try:
import cPickle as pickle
except ImportError:
# Python 3
import pickle
def reset(dag_id=TEST_DAG_ID):
session = Session()
tis = session.query(models.TaskInstance).filter_by(dag_id=dag_id)
tis.delete()
session.commit()
session.close()
reset()
class OperatorSubclass(BaseOperator):
"""
An operator to test template substitution
"""
template_fields = ['some_templated_field']
def __init__(self, some_templated_field, *args, **kwargs):
super(OperatorSubclass, self).__init__(*args, **kwargs)
self.some_templated_field = some_templated_field
def execute(*args, **kwargs):
pass
class CoreTest(unittest.TestCase):
default_scheduler_args = {"num_runs": 1}
def setUp(self):
configuration.conf.load_test_config()
self.dagbag = models.DagBag(
dag_folder=DEV_NULL, include_examples=True)
self.args = {'owner': 'airflow', 'start_date': DEFAULT_DATE}
self.dag = DAG(TEST_DAG_ID, default_args=self.args)
self.dag_bash = self.dagbag.dags['example_bash_operator']
self.runme_0 = self.dag_bash.get_task('runme_0')
self.run_after_loop = self.dag_bash.get_task('run_after_loop')
self.run_this_last = self.dag_bash.get_task('run_this_last')
def test_schedule_dag_no_previous_runs(self):
"""
Tests scheduling a dag with no previous runs
"""
dag = DAG(TEST_DAG_ID + 'test_schedule_dag_no_previous_runs')
dag.add_task(models.BaseOperator(
task_id="faketastic",
owner='Also fake',
start_date=datetime(2015, 1, 2, 0, 0)))
dag_run = jobs.SchedulerJob(**self.default_scheduler_args).create_dag_run(dag)
self.assertIsNotNone(dag_run)
self.assertEqual(dag.dag_id, dag_run.dag_id)
self.assertIsNotNone(dag_run.run_id)
self.assertNotEqual('', dag_run.run_id)
self.assertEqual(
datetime(2015, 1, 2, 0, 0),
dag_run.execution_date,
msg='dag_run.execution_date did not match expectation: {0}'
.format(dag_run.execution_date)
)
self.assertEqual(State.RUNNING, dag_run.state)
self.assertFalse(dag_run.external_trigger)
dag.clear()
def test_schedule_dag_fake_scheduled_previous(self):
"""
Test scheduling a dag where there is a prior DagRun
which has the same run_id as the next run should have
"""
delta = timedelta(hours=1)
dag = DAG(TEST_DAG_ID + 'test_schedule_dag_fake_scheduled_previous',
schedule_interval=delta,
start_date=DEFAULT_DATE)
dag.add_task(models.BaseOperator(
task_id="faketastic",
owner='Also fake',
start_date=DEFAULT_DATE))
scheduler = jobs.SchedulerJob(**self.default_scheduler_args)
dag.create_dagrun(run_id=models.DagRun.id_for_date(DEFAULT_DATE),
execution_date=DEFAULT_DATE,
state=State.SUCCESS,
external_trigger=True)
dag_run = scheduler.create_dag_run(dag)
self.assertIsNotNone(dag_run)
self.assertEqual(dag.dag_id, dag_run.dag_id)
self.assertIsNotNone(dag_run.run_id)
self.assertNotEqual('', dag_run.run_id)
self.assertEqual(
DEFAULT_DATE + delta,
dag_run.execution_date,
msg='dag_run.execution_date did not match expectation: {0}'
.format(dag_run.execution_date)
)
self.assertEqual(State.RUNNING, dag_run.state)
self.assertFalse(dag_run.external_trigger)
def test_schedule_dag_once(self):
"""
Tests scheduling a dag scheduled for @once - should be scheduled the first time
it is called, and not scheduled the second.
"""
dag = DAG(TEST_DAG_ID + 'test_schedule_dag_once')
dag.schedule_interval = '@once'
dag.add_task(models.BaseOperator(
task_id="faketastic",
owner='Also fake',
start_date=datetime(2015, 1, 2, 0, 0)))
dag_run = jobs.SchedulerJob(**self.default_scheduler_args).create_dag_run(dag)
dag_run2 = jobs.SchedulerJob(**self.default_scheduler_args).create_dag_run(dag)
self.assertIsNotNone(dag_run)
self.assertIsNone(dag_run2)
dag.clear()
def test_fractional_seconds(self):
"""
Tests if fractional seconds are stored in the database
"""
dag = DAG(TEST_DAG_ID + 'test_fractional_seconds')
dag.schedule_interval = '@once'
dag.add_task(models.BaseOperator(
task_id="faketastic",
owner='Also fake',
start_date=datetime(2015, 1, 2, 0, 0)))
start_date = timezone.utcnow()
run = dag.create_dagrun(
run_id='test_' + start_date.isoformat(),
execution_date=start_date,
start_date=start_date,
state=State.RUNNING,
external_trigger=False
)
run.refresh_from_db()
self.assertEqual(start_date, run.execution_date,
"dag run execution_date loses precision")
self.assertEqual(start_date, run.start_date,
"dag run start_date loses precision ")
def test_schedule_dag_start_end_dates(self):
"""
Tests that an attempt to schedule a task after the Dag's end_date
does not succeed.
"""
delta = timedelta(hours=1)
runs = 3
start_date = DEFAULT_DATE
end_date = start_date + (runs - 1) * delta
dag = DAG(TEST_DAG_ID + 'test_schedule_dag_start_end_dates',
start_date=start_date,
end_date=end_date,
schedule_interval=delta)
dag.add_task(models.BaseOperator(task_id='faketastic',
owner='Also fake'))
# Create and schedule the dag runs
dag_runs = []
scheduler = jobs.SchedulerJob(**self.default_scheduler_args)
for i in range(runs):
dag_runs.append(scheduler.create_dag_run(dag))
additional_dag_run = scheduler.create_dag_run(dag)
for dag_run in dag_runs:
self.assertIsNotNone(dag_run)
self.assertIsNone(additional_dag_run)
@freeze_time('2016-01-01')
def test_schedule_dag_no_end_date_up_to_today_only(self):
"""
Tests that a Dag created without an end_date can only be scheduled up
to and including the current datetime.
For example, if today is 2016-01-01 and we are scheduling from a
start_date of 2015-01-01, only jobs up to, but not including
2016-01-01 should be scheduled.
"""
session = settings.Session()
delta = timedelta(days=1)
start_date = DEFAULT_DATE
runs = 365
dag = DAG(TEST_DAG_ID + 'test_schedule_dag_no_end_date_up_to_today_only',
start_date=start_date,
schedule_interval=delta)
dag.add_task(models.BaseOperator(task_id='faketastic',
owner='Also fake'))
dag_runs = []
scheduler = jobs.SchedulerJob(**self.default_scheduler_args)
for i in range(runs):
dag_run = scheduler.create_dag_run(dag)
dag_runs.append(dag_run)
# Mark the DagRun as complete
dag_run.state = State.SUCCESS
session.merge(dag_run)
session.commit()
# Attempt to schedule an additional dag run (for 2016-01-01)
additional_dag_run = scheduler.create_dag_run(dag)
for dag_run in dag_runs:
self.assertIsNotNone(dag_run)
self.assertIsNone(additional_dag_run)
def test_confirm_unittest_mod(self):
self.assertTrue(configuration.conf.get('core', 'unit_test_mode'))
def test_pickling(self):
dp = self.dag.pickle()
self.assertEqual(dp.pickle.dag_id, self.dag.dag_id)
def test_rich_comparison_ops(self):
class DAGsubclass(DAG):
pass
dag_eq = DAG(TEST_DAG_ID, default_args=self.args)
dag_diff_load_time = DAG(TEST_DAG_ID, default_args=self.args)
dag_diff_name = DAG(TEST_DAG_ID + '_neq', default_args=self.args)
dag_subclass = DAGsubclass(TEST_DAG_ID, default_args=self.args)
dag_subclass_diff_name = DAGsubclass(
TEST_DAG_ID + '2', default_args=self.args)
for d in [dag_eq, dag_diff_name, dag_subclass, dag_subclass_diff_name]:
d.last_loaded = self.dag.last_loaded
# test identity equality
self.assertEqual(self.dag, self.dag)
# test dag (in)equality based on _comps
self.assertEqual(dag_eq, self.dag)
self.assertNotEqual(dag_diff_name, self.dag)
self.assertNotEqual(dag_diff_load_time, self.dag)
# test dag inequality based on type even if _comps happen to match
self.assertNotEqual(dag_subclass, self.dag)
# a dag should equal an unpickled version of itself
d = pickle.dumps(self.dag)
self.assertEqual(pickle.loads(d), self.dag)
# dags are ordered based on dag_id no matter what the type is
self.assertLess(self.dag, dag_diff_name)
self.assertGreater(self.dag, dag_diff_load_time)
self.assertLess(self.dag, dag_subclass_diff_name)
# greater than should have been created automatically by functools
self.assertGreater(dag_diff_name, self.dag)
# hashes are non-random and match equality
self.assertEqual(hash(self.dag), hash(self.dag))
self.assertEqual(hash(dag_eq), hash(self.dag))
self.assertNotEqual(hash(dag_diff_name), hash(self.dag))
self.assertNotEqual(hash(dag_subclass), hash(self.dag))
def test_check_operators(self):
conn_id = "sqlite_default"
captainHook = BaseHook.get_hook(conn_id=conn_id)
captainHook.run("CREATE TABLE operator_test_table (a, b)")
captainHook.run("insert into operator_test_table values (1,2)")
t = CheckOperator(
task_id='check',
sql="select count(*) from operator_test_table",
conn_id=conn_id,
dag=self.dag)
t.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
t = ValueCheckOperator(
task_id='value_check',
pass_value=95,
tolerance=0.1,
conn_id=conn_id,
sql="SELECT 100",
dag=self.dag)
t.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
captainHook.run("drop table operator_test_table")
def test_clear_api(self):
task = self.dag_bash.tasks[0]
task.clear(
start_date=DEFAULT_DATE, end_date=DEFAULT_DATE,
upstream=True, downstream=True)
ti = models.TaskInstance(task=task, execution_date=DEFAULT_DATE)
ti.are_dependents_done()
def test_illegal_args(self):
"""
Tests that Operators reject illegal arguments
"""
with warnings.catch_warnings(record=True) as w:
t = BashOperator(
task_id='test_illegal_args',
bash_command='echo success',
dag=self.dag,
illegal_argument_1234='hello?')
self.assertTrue(
issubclass(w[0].category, PendingDeprecationWarning))
self.assertIn(
'Invalid arguments were passed to BashOperator.',
w[0].message.args[0])
def test_bash_operator(self):
t = BashOperator(
task_id='test_bash_operator',
bash_command="echo success",
dag=self.dag)
t.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
def test_bash_operator_multi_byte_output(self):
t = BashOperator(
task_id='test_multi_byte_bash_operator',
bash_command=u"echo \u2600",
dag=self.dag,
output_encoding='utf-8')
t.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
def test_bash_operator_kill(self):
import psutil
sleep_time = "100%d" % os.getpid()
t = BashOperator(
task_id='test_bash_operator_kill',
execution_timeout=timedelta(seconds=1),
bash_command="/bin/bash -c 'sleep %s'" % sleep_time,
dag=self.dag)
self.assertRaises(
exceptions.AirflowTaskTimeout,
t.run,
start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
sleep(2)
pid = -1
for proc in psutil.process_iter():
if proc.cmdline() == ['sleep', sleep_time]:
pid = proc.pid
if pid != -1:
os.kill(pid, signal.SIGTERM)
self.fail("BashOperator's subprocess still running after stopping on timeout!")
def test_trigger_dagrun(self):
def trigga(context, obj):
if True:
return obj
t = TriggerDagRunOperator(
task_id='test_trigger_dagrun',
trigger_dag_id='example_bash_operator',
python_callable=trigga,
dag=self.dag)
t.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
def test_dryrun(self):
t = BashOperator(
task_id='test_dryrun',
bash_command="echo success",
dag=self.dag)
t.dry_run()
def test_sqlite(self):
import airflow.operators.sqlite_operator
t = airflow.operators.sqlite_operator.SqliteOperator(
task_id='time_sqlite',
sql="CREATE TABLE IF NOT EXISTS unitest (dummy VARCHAR(20))",
dag=self.dag)
t.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
def test_timeout(self):
t = PythonOperator(
task_id='test_timeout',
execution_timeout=timedelta(seconds=1),
python_callable=lambda: sleep(5),
dag=self.dag)
self.assertRaises(
exceptions.AirflowTaskTimeout,
t.run,
start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
def test_python_op(self):
def test_py_op(templates_dict, ds, **kwargs):
if not templates_dict['ds'] == ds:
raise Exception("failure")
t = PythonOperator(
task_id='test_py_op',
provide_context=True,
python_callable=test_py_op,
templates_dict={'ds': "{{ ds }}"},
dag=self.dag)
t.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
def test_complex_template(self):
def verify_templated_field(context):
self.assertEqual(context['ti'].task.some_templated_field['bar'][1],
context['ds'])
t = OperatorSubclass(
task_id='test_complex_template',
some_templated_field={
'foo': '123',
'bar': ['baz', '{{ ds }}']
},
dag=self.dag)
t.execute = verify_templated_field
t.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
def test_template_with_variable(self):
"""
Test the availability of variables in templates
"""
val = {
'test_value': 'a test value'
}
Variable.set("a_variable", val['test_value'])
def verify_templated_field(context):
self.assertEqual(context['ti'].task.some_templated_field,
val['test_value'])
t = OperatorSubclass(
task_id='test_complex_template',
some_templated_field='{{ var.value.a_variable }}',
dag=self.dag)
t.execute = verify_templated_field
t.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
def test_template_with_json_variable(self):
"""
Test the availability of variables (serialized as JSON) in templates
"""
val = {
'test_value': {'foo': 'bar', 'obj': {'v1': 'yes', 'v2': 'no'}}
}
Variable.set("a_variable", val['test_value'], serialize_json=True)
def verify_templated_field(context):
self.assertEqual(context['ti'].task.some_templated_field,
val['test_value']['obj']['v2'])
t = OperatorSubclass(
task_id='test_complex_template',
some_templated_field='{{ var.json.a_variable.obj.v2 }}',
dag=self.dag)
t.execute = verify_templated_field
t.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
def test_template_with_json_variable_as_value(self):
"""
Test the availability of variables (serialized as JSON) in templates, but
accessed as a value
"""
val = {
'test_value': {'foo': 'bar'}
}
Variable.set("a_variable", val['test_value'], serialize_json=True)
def verify_templated_field(context):
self.assertEqual(context['ti'].task.some_templated_field,
u'{"foo": "bar"}')
t = OperatorSubclass(
task_id='test_complex_template',
some_templated_field='{{ var.value.a_variable }}',
dag=self.dag)
t.execute = verify_templated_field
t.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
def test_template_non_bool(self):
"""
Test templates can handle objects with no sense of truthiness
"""
class NonBoolObject(object):
def __len__(self):
return NotImplemented
def __bool__(self):
return NotImplemented
t = OperatorSubclass(
task_id='test_bad_template_obj',
some_templated_field=NonBoolObject(),
dag=self.dag)
t.resolve_template_files()
def test_import_examples(self):
self.assertEqual(len(self.dagbag.dags), NUM_EXAMPLE_DAGS)
def test_local_task_job(self):
TI = models.TaskInstance
ti = TI(
task=self.runme_0, execution_date=DEFAULT_DATE)
job = jobs.LocalTaskJob(task_instance=ti, ignore_ti_state=True)
job.run()
def test_raw_job(self):
TI = models.TaskInstance
ti = TI(
task=self.runme_0, execution_date=DEFAULT_DATE)
ti.dag = self.dag_bash
ti.run(ignore_ti_state=True)
def test_doctests(self):
modules = [utils, macros]
for mod in modules:
failed, tests = doctest.testmod(mod)
if failed:
raise Exception("Failed a doctest")
def test_variable_set_get_round_trip(self):
Variable.set("tested_var_set_id", "Monday morning breakfast")
self.assertEqual("Monday morning breakfast", Variable.get("tested_var_set_id"))
def test_variable_set_get_round_trip_json(self):
value = {"a": 17, "b": 47}
Variable.set("tested_var_set_id", value, serialize_json=True)
self.assertEqual(value, Variable.get("tested_var_set_id", deserialize_json=True))
def test_get_non_existing_var_should_return_default(self):
default_value = "some default val"
self.assertEqual(default_value, Variable.get("thisIdDoesNotExist",
default_var=default_value))
def test_get_non_existing_var_should_not_deserialize_json_default(self):
default_value = "}{ this is a non JSON default }{"
self.assertEqual(default_value, Variable.get("thisIdDoesNotExist",
default_var=default_value,
deserialize_json=True))
def test_variable_setdefault_round_trip(self):
key = "tested_var_setdefault_1_id"
value = "Monday morning breakfast in Paris"
Variable.setdefault(key, value)
self.assertEqual(value, Variable.get(key))
def test_variable_setdefault_round_trip_json(self):
key = "tested_var_setdefault_2_id"
value = {"city": 'Paris', "Hapiness": True}
Variable.setdefault(key, value, deserialize_json=True)
self.assertEqual(value, Variable.get(key, deserialize_json=True))
def test_variable_setdefault_existing_json(self):
key = "tested_var_setdefault_2_id"
value = {"city": 'Paris', "Hapiness": True}
Variable.set(key, value, serialize_json=True)
val = Variable.setdefault(key, value, deserialize_json=True)
# Check the returned value, and the stored value are handled correctly.
self.assertEqual(value, val)
self.assertEqual(value, Variable.get(key, deserialize_json=True))
def test_parameterized_config_gen(self):
cfg = configuration.parameterized_config(configuration.DEFAULT_CONFIG)
# making sure some basic building blocks are present:
self.assertIn("[core]", cfg)
self.assertIn("dags_folder", cfg)
self.assertIn("sql_alchemy_conn", cfg)
self.assertIn("fernet_key", cfg)
# making sure replacement actually happened
self.assertNotIn("{AIRFLOW_HOME}", cfg)
self.assertNotIn("{FERNET_KEY}", cfg)
def test_config_use_original_when_original_and_fallback_are_present(self):
self.assertTrue(configuration.conf.has_option("core", "FERNET_KEY"))
self.assertFalse(configuration.conf.has_option("core", "FERNET_KEY_CMD"))
FERNET_KEY = configuration.conf.get('core', 'FERNET_KEY')
configuration.conf.set("core", "FERNET_KEY_CMD", "printf HELLO")
FALLBACK_FERNET_KEY = configuration.conf.get(
"core",
"FERNET_KEY"
)
self.assertEqual(FERNET_KEY, FALLBACK_FERNET_KEY)
# restore the conf back to the original state
configuration.conf.remove_option("core", "FERNET_KEY_CMD")
def test_config_throw_error_when_original_and_fallback_is_absent(self):
self.assertTrue(configuration.conf.has_option("core", "FERNET_KEY"))
self.assertFalse(configuration.conf.has_option("core", "FERNET_KEY_CMD"))
FERNET_KEY = configuration.conf.get("core", "FERNET_KEY")
configuration.conf.remove_option("core", "FERNET_KEY")
with self.assertRaises(AirflowConfigException) as cm:
configuration.conf.get("core", "FERNET_KEY")
exception = str(cm.exception)
message = "section/key [core/fernet_key] not found in config"
self.assertEqual(message, exception)
# restore the conf back to the original state
configuration.conf.set("core", "FERNET_KEY", FERNET_KEY)
self.assertTrue(configuration.conf.has_option("core", "FERNET_KEY"))
def test_config_override_original_when_non_empty_envvar_is_provided(self):
key = "AIRFLOW__CORE__FERNET_KEY"
value = "some value"
self.assertNotIn(key, os.environ)
os.environ[key] = value
FERNET_KEY = configuration.conf.get('core', 'FERNET_KEY')
self.assertEqual(value, FERNET_KEY)
# restore the envvar back to the original state
del os.environ[key]
def test_config_override_original_when_empty_envvar_is_provided(self):
key = "AIRFLOW__CORE__FERNET_KEY"
value = ""
self.assertNotIn(key, os.environ)
os.environ[key] = value
FERNET_KEY = configuration.conf.get('core', 'FERNET_KEY')
self.assertEqual(value, FERNET_KEY)
# restore the envvar back to the original state
del os.environ[key]
def test_round_time(self):
rt1 = round_time(datetime(2015, 1, 1, 6), timedelta(days=1))
self.assertEqual(datetime(2015, 1, 1, 0, 0), rt1)
rt2 = round_time(datetime(2015, 1, 2), relativedelta(months=1))
self.assertEqual(datetime(2015, 1, 1, 0, 0), rt2)
rt3 = round_time(datetime(2015, 9, 16, 0, 0), timedelta(1), datetime(
2015, 9, 14, 0, 0))
self.assertEqual(datetime(2015, 9, 16, 0, 0), rt3)
rt4 = round_time(datetime(2015, 9, 15, 0, 0), timedelta(1), datetime(
2015, 9, 14, 0, 0))
self.assertEqual(datetime(2015, 9, 15, 0, 0), rt4)
rt5 = round_time(datetime(2015, 9, 14, 0, 0), timedelta(1), datetime(
2015, 9, 14, 0, 0))
self.assertEqual(datetime(2015, 9, 14, 0, 0), rt5)
rt6 = round_time(datetime(2015, 9, 13, 0, 0), timedelta(1), datetime(
2015, 9, 14, 0, 0))
self.assertEqual(datetime(2015, 9, 14, 0, 0), rt6)
def test_infer_time_unit(self):
self.assertEqual('minutes', infer_time_unit([130, 5400, 10]))
self.assertEqual('seconds', infer_time_unit([110, 50, 10, 100]))
self.assertEqual('hours', infer_time_unit([100000, 50000, 10000, 20000]))
self.assertEqual('days', infer_time_unit([200000, 100000]))
def test_scale_time_units(self):
# use assert_almost_equal from numpy.testing since we are comparing
# floating point arrays
arr1 = scale_time_units([130, 5400, 10], 'minutes')
assert_array_almost_equal(arr1, [2.167, 90.0, 0.167], decimal=3)
arr2 = scale_time_units([110, 50, 10, 100], 'seconds')
assert_array_almost_equal(arr2, [110.0, 50.0, 10.0, 100.0], decimal=3)
arr3 = scale_time_units([100000, 50000, 10000, 20000], 'hours')
assert_array_almost_equal(arr3, [27.778, 13.889, 2.778, 5.556],
decimal=3)
arr4 = scale_time_units([200000, 100000], 'days')
assert_array_almost_equal(arr4, [2.315, 1.157], decimal=3)
def test_duplicate_dependencies(self):
regexp = "Dependency (.*)runme_0(.*)run_after_loop(.*) " \
"already registered"
with self.assertRaisesRegexp(AirflowException, regexp):
self.runme_0.set_downstream(self.run_after_loop)
with self.assertRaisesRegexp(AirflowException, regexp):
self.run_after_loop.set_upstream(self.runme_0)
def test_bad_trigger_rule(self):
with self.assertRaises(AirflowException):
DummyOperator(
task_id='test_bad_trigger',
trigger_rule="non_existant",
dag=self.dag)
def test_terminate_task(self):
"""If a task instance's db state get deleted, it should fail"""
TI = models.TaskInstance
dag = self.dagbag.dags.get('test_utils')
task = dag.task_dict.get('sleeps_forever')
ti = TI(task=task, execution_date=DEFAULT_DATE)
job = jobs.LocalTaskJob(
task_instance=ti, ignore_ti_state=True, executor=SequentialExecutor())
# Running task instance asynchronously
p = multiprocessing.Process(target=job.run)
p.start()
sleep(5)
settings.engine.dispose()
session = settings.Session()
ti.refresh_from_db(session=session)
# making sure it's actually running
self.assertEqual(State.RUNNING, ti.state)
ti = session.query(TI).filter_by(
dag_id=task.dag_id,
task_id=task.task_id,
execution_date=DEFAULT_DATE
).one()
# deleting the instance should result in a failure
session.delete(ti)
session.commit()
# waiting for the async task to finish
p.join()
# making sure that the task ended up as failed
ti.refresh_from_db(session=session)
self.assertEqual(State.FAILED, ti.state)
session.close()
def test_task_fail_duration(self):
"""If a task fails, the duration should be recorded in TaskFail"""
p = BashOperator(
task_id='pass_sleepy',
bash_command='sleep 3',
dag=self.dag)
f = BashOperator(
task_id='fail_sleepy',
bash_command='sleep 5',
execution_timeout=timedelta(seconds=3),
retry_delay=timedelta(seconds=0),
dag=self.dag)
session = settings.Session()
try:
p.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
except:
pass
try:
f.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
except:
pass
p_fails = session.query(models.TaskFail).filter_by(
task_id='pass_sleepy',
dag_id=self.dag.dag_id,
execution_date=DEFAULT_DATE).all()
f_fails = session.query(models.TaskFail).filter_by(
task_id='fail_sleepy',
dag_id=self.dag.dag_id,
execution_date=DEFAULT_DATE).all()
print(f_fails)
self.assertEqual(0, len(p_fails))
self.assertEqual(1, len(f_fails))
# C
self.assertGreaterEqual(sum([f.duration for f in f_fails]), 3)
def test_dag_stats(self):
"""Correctly sets/dirties/cleans rows of DagStat table"""
session = settings.Session()
session.query(models.DagRun).delete()
session.query(models.DagStat).delete()
session.commit()
models.DagStat.update([], session=session)
run1 = self.dag_bash.create_dagrun(
run_id="run1",
execution_date=DEFAULT_DATE,
state=State.RUNNING)
models.DagStat.update([self.dag_bash.dag_id], session=session)
qry = session.query(models.DagStat).all()
self.assertEqual(3, len(qry))
self.assertEqual(self.dag_bash.dag_id, qry[0].dag_id)
for stats in qry:
if stats.state == State.RUNNING:
self.assertEqual(stats.count, 1)
else:
self.assertEqual(stats.count, 0)
self.assertFalse(stats.dirty)
run2 = self.dag_bash.create_dagrun(
run_id="run2",
execution_date=DEFAULT_DATE + timedelta(days=1),
state=State.RUNNING)
models.DagStat.update([self.dag_bash.dag_id], session=session)
qry = session.query(models.DagStat).all()
self.assertEqual(3, len(qry))
self.assertEqual(self.dag_bash.dag_id, qry[0].dag_id)
for stats in qry:
if stats.state == State.RUNNING:
self.assertEqual(stats.count, 2)
else:
self.assertEqual(stats.count, 0)
self.assertFalse(stats.dirty)
session.query(models.DagRun).first().state = State.SUCCESS
session.commit()
models.DagStat.update([self.dag_bash.dag_id], session=session)
qry = session.query(models.DagStat).filter(models.DagStat.state == State.SUCCESS).all()
self.assertEqual(1, len(qry))
self.assertEqual(self.dag_bash.dag_id, qry[0].dag_id)
self.assertEqual(State.SUCCESS, qry[0].state)
self.assertEqual(1, qry[0].count)
self.assertFalse(qry[0].dirty)
qry = session.query(models.DagStat).filter(models.DagStat.state == State.RUNNING).all()
self.assertEqual(1, len(qry))
self.assertEqual(self.dag_bash.dag_id, qry[0].dag_id)
self.assertEqual(State.RUNNING, qry[0].state)
self.assertEqual(1, qry[0].count)
self.assertFalse(qry[0].dirty)
session.query(models.DagRun).delete()
session.query(models.DagStat).delete()
session.commit()
session.close()
def test_run_command(self):
if six.PY3:
write = r'sys.stdout.buffer.write("\u1000foo".encode("utf8"))'
else:
write = r'sys.stdout.write(u"\u1000foo".encode("utf8"))'
cmd = 'import sys; {0}; sys.stdout.flush()'.format(write)
self.assertEqual(run_command("python -c '{0}'".format(cmd)),
u'\u1000foo' if six.PY3 else 'foo')
self.assertEqual(run_command('echo "foo bar"'), u'foo bar\n')
self.assertRaises(AirflowConfigException, run_command, 'bash -c "exit 1"')
class CliTests(unittest.TestCase):
@classmethod
def setUpClass(cls):
super(CliTests, cls).setUpClass()
cls._cleanup()
def setUp(self):
super(CliTests, self).setUp()
configuration.load_test_config()
app = application.create_app()
app.config['TESTING'] = True
self.parser = cli.CLIFactory.get_parser()
self.dagbag = models.DagBag(dag_folder=DEV_NULL, include_examples=True)
self.session = Session()
def tearDown(self):
self._cleanup(session=self.session)
super(CliTests, self).tearDown()
@staticmethod
def _cleanup(session=None):
if session is None:
session = Session()
session.query(models.Pool).delete()
session.query(models.Variable).delete()
session.commit()
session.close()
def test_cli_list_dags(self):
args = self.parser.parse_args(['list_dags', '--report'])
cli.list_dags(args)
def test_cli_create_user_random_password(self):
args = self.parser.parse_args([
'create_user', '-u', 'test1', '-l', 'doe', '-f', 'jon',
'-e', '[email protected]', '-r', 'Viewer', '--use_random_password'
])
cli.create_user(args)
def test_cli_create_user_supplied_password(self):
args = self.parser.parse_args([
'create_user', '-u', 'test2', '-l', 'doe', '-f', 'jon',
'-e', '[email protected]', '-r', 'Viewer', '-p', 'test'
])
cli.create_user(args)
def test_cli_list_tasks(self):
for dag_id in self.dagbag.dags.keys():
args = self.parser.parse_args(['list_tasks', dag_id])
cli.list_tasks(args)
args = self.parser.parse_args([
'list_tasks', 'example_bash_operator', '--tree'])
cli.list_tasks(args)
@mock.patch("airflow.bin.cli.db_utils.initdb")
def test_cli_initdb(self, initdb_mock):
cli.initdb(self.parser.parse_args(['initdb']))
initdb_mock.assert_called_once_with(False)
@mock.patch("airflow.bin.cli.db_utils.resetdb")
def test_cli_resetdb(self, resetdb_mock):
cli.resetdb(self.parser.parse_args(['resetdb', '--yes']))
resetdb_mock.assert_called_once_with(False)
def test_cli_connections_list(self):
with mock.patch('sys.stdout',
new_callable=six.StringIO) as mock_stdout:
cli.connections(self.parser.parse_args(['connections', '--list']))
stdout = mock_stdout.getvalue()
conns = [[x.strip("'") for x in re.findall("'\w+'", line)[:2]]
for ii, line in enumerate(stdout.split('\n'))
if ii % 2 == 1]
conns = [conn for conn in conns if len(conn) > 0]
# Assert that some of the connections are present in the output as
# expected:
self.assertIn(['aws_default', 'aws'], conns)
self.assertIn(['beeline_default', 'beeline'], conns)
self.assertIn(['emr_default', 'emr'], conns)
self.assertIn(['mssql_default', 'mssql'], conns)
self.assertIn(['mysql_default', 'mysql'], conns)
self.assertIn(['postgres_default', 'postgres'], conns)
self.assertIn(['wasb_default', 'wasb'], conns)
self.assertIn(['segment_default', 'segment'], conns)
# Attempt to list connections with invalid cli args
with mock.patch('sys.stdout',
new_callable=six.StringIO) as mock_stdout:
cli.connections(self.parser.parse_args(
['connections', '--list', '--conn_id=fake', '--conn_uri=fake-uri',
'--conn_type=fake-type', '--conn_host=fake_host',
'--conn_login=fake_login', '--conn_password=fake_password',
'--conn_schema=fake_schema', '--conn_port=fake_port', '--conn_extra=fake_extra']))
stdout = mock_stdout.getvalue()
# Check list attempt stdout
lines = [l for l in stdout.split('\n') if len(l) > 0]
self.assertListEqual(lines, [
("\tThe following args are not compatible with the " +
"--list flag: ['conn_id', 'conn_uri', 'conn_extra', " +
"'conn_type', 'conn_host', 'conn_login', " +
"'conn_password', 'conn_schema', 'conn_port']"),
])
def test_cli_connections_list_redirect(self):
cmd = ['airflow', 'connections', '--list']
with tempfile.TemporaryFile() as fp:
p = subprocess.Popen(cmd, stdout=fp)
p.wait()
self.assertEqual(0, p.returncode)
def test_cli_connections_add_delete(self):
# Add connections:
uri = 'postgresql://airflow:airflow@host:5432/airflow'
with mock.patch('sys.stdout',
new_callable=six.StringIO) as mock_stdout:
cli.connections(self.parser.parse_args(
['connections', '--add', '--conn_id=new1',
'--conn_uri=%s' % uri]))
cli.connections(self.parser.parse_args(
['connections', '-a', '--conn_id=new2',
'--conn_uri=%s' % uri]))
cli.connections(self.parser.parse_args(
['connections', '--add', '--conn_id=new3',
'--conn_uri=%s' % uri, '--conn_extra', "{'extra': 'yes'}"]))
cli.connections(self.parser.parse_args(
['connections', '-a', '--conn_id=new4',
'--conn_uri=%s' % uri, '--conn_extra', "{'extra': 'yes'}"]))
cli.connections(self.parser.parse_args(
['connections', '--add', '--conn_id=new5',
'--conn_type=hive_metastore', '--conn_login=airflow',
'--conn_password=airflow', '--conn_host=host',
'--conn_port=9083', '--conn_schema=airflow']))
cli.connections(self.parser.parse_args(
['connections', '-a', '--conn_id=new6',
'--conn_uri', "", '--conn_type=google_cloud_platform', '--conn_extra', "{'extra': 'yes'}"]))
stdout = mock_stdout.getvalue()
# Check addition stdout
lines = [l for l in stdout.split('\n') if len(l) > 0]
self.assertListEqual(lines, [
("\tSuccessfully added `conn_id`=new1 : " +
"postgresql://airflow:airflow@host:5432/airflow"),
("\tSuccessfully added `conn_id`=new2 : " +
"postgresql://airflow:airflow@host:5432/airflow"),
("\tSuccessfully added `conn_id`=new3 : " +
"postgresql://airflow:airflow@host:5432/airflow"),
("\tSuccessfully added `conn_id`=new4 : " +
"postgresql://airflow:airflow@host:5432/airflow"),
("\tSuccessfully added `conn_id`=new5 : " +
"hive_metastore://airflow:airflow@host:9083/airflow"),
("\tSuccessfully added `conn_id`=new6 : " +
"google_cloud_platform://:@:")
])
# Attempt to add duplicate
with mock.patch('sys.stdout',
new_callable=six.StringIO) as mock_stdout:
cli.connections(self.parser.parse_args(
['connections', '--add', '--conn_id=new1',
'--conn_uri=%s' % uri]))
stdout = mock_stdout.getvalue()
# Check stdout for addition attempt
lines = [l for l in stdout.split('\n') if len(l) > 0]
self.assertListEqual(lines, [
"\tA connection with `conn_id`=new1 already exists",
])
# Attempt to add without providing conn_id
with mock.patch('sys.stdout',
new_callable=six.StringIO) as mock_stdout:
cli.connections(self.parser.parse_args(
['connections', '--add', '--conn_uri=%s' % uri]))
stdout = mock_stdout.getvalue()
# Check stdout for addition attempt
lines = [l for l in stdout.split('\n') if len(l) > 0]
self.assertListEqual(lines, [
("\tThe following args are required to add a connection:" +
" ['conn_id']"),
])
# Attempt to add without providing conn_uri
with mock.patch('sys.stdout',
new_callable=six.StringIO) as mock_stdout:
cli.connections(self.parser.parse_args(
['connections', '--add', '--conn_id=new']))
stdout = mock_stdout.getvalue()
# Check stdout for addition attempt
lines = [l for l in stdout.split('\n') if len(l) > 0]
self.assertListEqual(lines, [
("\tThe following args are required to add a connection:" +
" ['conn_uri or conn_type']"),
])
# Prepare to add connections
session = settings.Session()
extra = {'new1': None,
'new2': None,
'new3': "{'extra': 'yes'}",
'new4': "{'extra': 'yes'}"}
# Add connections
for index in range(1, 6):
conn_id = 'new%s' % index
result = (session
.query(models.Connection)
.filter(models.Connection.conn_id == conn_id)
.first())
result = (result.conn_id, result.conn_type, result.host,
result.port, result.get_extra())
if conn_id in ['new1', 'new2', 'new3', 'new4']:
self.assertEqual(result, (conn_id, 'postgres', 'host', 5432,
extra[conn_id]))
elif conn_id == 'new5':
self.assertEqual(result, (conn_id, 'hive_metastore', 'host',
9083, None))
elif conn_id == 'new6':
self.assertEqual(result, (conn_id, 'google_cloud_platform',
None, None, "{'extra': 'yes'}"))
# Delete connections
with mock.patch('sys.stdout',
new_callable=six.StringIO) as mock_stdout:
cli.connections(self.parser.parse_args(
['connections', '--delete', '--conn_id=new1']))
cli.connections(self.parser.parse_args(
['connections', '--delete', '--conn_id=new2']))
cli.connections(self.parser.parse_args(
['connections', '--delete', '--conn_id=new3']))
cli.connections(self.parser.parse_args(
['connections', '--delete', '--conn_id=new4']))
cli.connections(self.parser.parse_args(
['connections', '--delete', '--conn_id=new5']))
cli.connections(self.parser.parse_args(
['connections', '--delete', '--conn_id=new6']))
stdout = mock_stdout.getvalue()
# Check deletion stdout
lines = [l for l in stdout.split('\n') if len(l) > 0]
self.assertListEqual(lines, [
"\tSuccessfully deleted `conn_id`=new1",
"\tSuccessfully deleted `conn_id`=new2",
"\tSuccessfully deleted `conn_id`=new3",
"\tSuccessfully deleted `conn_id`=new4",
"\tSuccessfully deleted `conn_id`=new5",
"\tSuccessfully deleted `conn_id`=new6"
])
# Check deletions
for index in range(1, 7):
conn_id = 'new%s' % index
result = (session.query(models.Connection)
.filter(models.Connection.conn_id == conn_id)
.first())
self.assertTrue(result is None)
# Attempt to delete a non-existing connnection
with mock.patch('sys.stdout',
new_callable=six.StringIO) as mock_stdout:
cli.connections(self.parser.parse_args(
['connections', '--delete', '--conn_id=fake']))
stdout = mock_stdout.getvalue()
# Check deletion attempt stdout
lines = [l for l in stdout.split('\n') if len(l) > 0]
self.assertListEqual(lines, [
"\tDid not find a connection with `conn_id`=fake",
])
# Attempt to delete with invalid cli args
with mock.patch('sys.stdout',
new_callable=six.StringIO) as mock_stdout:
cli.connections(self.parser.parse_args(
['connections', '--delete', '--conn_id=fake',
'--conn_uri=%s' % uri, '--conn_type=fake-type']))
stdout = mock_stdout.getvalue()
# Check deletion attempt stdout
lines = [l for l in stdout.split('\n') if len(l) > 0]
self.assertListEqual(lines, [
("\tThe following args are not compatible with the " +
"--delete flag: ['conn_uri', 'conn_type']"),
])
session.close()
def test_cli_test(self):
cli.test(self.parser.parse_args([
'test', 'example_bash_operator', 'runme_0',
DEFAULT_DATE.isoformat()]))
cli.test(self.parser.parse_args([
'test', 'example_bash_operator', 'runme_0', '--dry_run',
DEFAULT_DATE.isoformat()]))
def test_cli_test_with_params(self):
cli.test(self.parser.parse_args([
'test', 'example_passing_params_via_test_command', 'run_this',
'-tp', '{"foo":"bar"}', DEFAULT_DATE.isoformat()]))
cli.test(self.parser.parse_args([
'test', 'example_passing_params_via_test_command', 'also_run_this',
'-tp', '{"foo":"bar"}', DEFAULT_DATE.isoformat()]))
def test_cli_run(self):
cli.run(self.parser.parse_args([
'run', 'example_bash_operator', 'runme_0', '-l',
DEFAULT_DATE.isoformat()]))
def test_task_state(self):
cli.task_state(self.parser.parse_args([
'task_state', 'example_bash_operator', 'runme_0',
DEFAULT_DATE.isoformat()]))
def test_dag_state(self):
self.assertEqual(None, cli.dag_state(self.parser.parse_args([
'dag_state', 'example_bash_operator', DEFAULT_DATE.isoformat()])))
def test_pause(self):
args = self.parser.parse_args([
'pause', 'example_bash_operator'])
cli.pause(args)
self.assertIn(self.dagbag.dags['example_bash_operator'].is_paused, [True, 1])
args = self.parser.parse_args([
'unpause', 'example_bash_operator'])
cli.unpause(args)
self.assertIn(self.dagbag.dags['example_bash_operator'].is_paused, [False, 0])
def test_subdag_clear(self):
args = self.parser.parse_args([
'clear', 'example_subdag_operator', '--no_confirm'])
cli.clear(args)
args = self.parser.parse_args([
'clear', 'example_subdag_operator', '--no_confirm', '--exclude_subdags'])
cli.clear(args)
def test_get_dags(self):
dags = cli.get_dags(self.parser.parse_args(['clear', 'example_subdag_operator', '-c']))
self.assertEqual(len(dags), 1)
dags = cli.get_dags(self.parser.parse_args(['clear', 'subdag', '-dx', '-c']))
self.assertGreater(len(dags), 1)
with self.assertRaises(AirflowException):
cli.get_dags(self.parser.parse_args(['clear', 'foobar', '-dx', '-c']))
def test_backfill(self):
cli.backfill(self.parser.parse_args([
'backfill', 'example_bash_operator',
'-s', DEFAULT_DATE.isoformat()]))
cli.backfill(self.parser.parse_args([
'backfill', 'example_bash_operator', '-t', 'runme_0', '--dry_run',
'-s', DEFAULT_DATE.isoformat()]))
cli.backfill(self.parser.parse_args([
'backfill', 'example_bash_operator', '--dry_run',
'-s', DEFAULT_DATE.isoformat()]))
cli.backfill(self.parser.parse_args([
'backfill', 'example_bash_operator', '-l',
'-s', DEFAULT_DATE.isoformat()]))
def test_process_subdir_path_with_placeholder(self):
self.assertEqual(os.path.join(settings.DAGS_FOLDER, 'abc'), cli.process_subdir('DAGS_FOLDER/abc'))
def test_trigger_dag(self):
cli.trigger_dag(self.parser.parse_args([
'trigger_dag', 'example_bash_operator',
'-c', '{"foo": "bar"}']))
self.assertRaises(
ValueError,
cli.trigger_dag,
self.parser.parse_args([
'trigger_dag', 'example_bash_operator',
'--run_id', 'trigger_dag_xxx',
'-c', 'NOT JSON'])
)
def test_delete_dag(self):
DM = models.DagModel
key = "my_dag_id"
session = settings.Session()
session.add(DM(dag_id=key))
session.commit()
cli.delete_dag(self.parser.parse_args([
'delete_dag', key, '--yes']))
self.assertEqual(session.query(DM).filter_by(dag_id=key).count(), 0)
self.assertRaises(
AirflowException,
cli.delete_dag,
self.parser.parse_args([
'delete_dag',
'does_not_exist_dag',
'--yes'])
)
def test_pool_create(self):
cli.pool(self.parser.parse_args(['pool', '-s', 'foo', '1', 'test']))
self.assertEqual(self.session.query(models.Pool).count(), 1)
def test_pool_get(self):
cli.pool(self.parser.parse_args(['pool', '-s', 'foo', '1', 'test']))
try:
cli.pool(self.parser.parse_args(['pool', '-g', 'foo']))
except Exception as e:
self.fail("The 'pool -g foo' command raised unexpectedly: %s" % e)
def test_pool_delete(self):
cli.pool(self.parser.parse_args(['pool', '-s', 'foo', '1', 'test']))
cli.pool(self.parser.parse_args(['pool', '-x', 'foo']))
self.assertEqual(self.session.query(models.Pool).count(), 0)
def test_pool_no_args(self):
try:
cli.pool(self.parser.parse_args(['pool']))
except Exception as e:
self.fail("The 'pool' command raised unexpectedly: %s" % e)
def test_variables(self):
# Checks if all subcommands are properly received
cli.variables(self.parser.parse_args([
'variables', '-s', 'foo', '{"foo":"bar"}']))
cli.variables(self.parser.parse_args([
'variables', '-g', 'foo']))
cli.variables(self.parser.parse_args([
'variables', '-g', 'baz', '-d', 'bar']))
cli.variables(self.parser.parse_args([
'variables']))
cli.variables(self.parser.parse_args([
'variables', '-x', 'bar']))
cli.variables(self.parser.parse_args([
'variables', '-i', DEV_NULL]))
cli.variables(self.parser.parse_args([
'variables', '-e', DEV_NULL]))
cli.variables(self.parser.parse_args([
'variables', '-s', 'bar', 'original']))
# First export
cli.variables(self.parser.parse_args([
'variables', '-e', 'variables1.json']))
first_exp = open('variables1.json', 'r')
cli.variables(self.parser.parse_args([
'variables', '-s', 'bar', 'updated']))
cli.variables(self.parser.parse_args([
'variables', '-s', 'foo', '{"foo":"oops"}']))
cli.variables(self.parser.parse_args([
'variables', '-x', 'foo']))
# First import
cli.variables(self.parser.parse_args([
'variables', '-i', 'variables1.json']))
self.assertEqual('original', models.Variable.get('bar'))
self.assertEqual('{"foo": "bar"}', models.Variable.get('foo'))
# Second export
cli.variables(self.parser.parse_args([
'variables', '-e', 'variables2.json']))
second_exp = open('variables2.json', 'r')
self.assertEqual(first_exp.read(), second_exp.read())
second_exp.close()
first_exp.close()
# Second import
cli.variables(self.parser.parse_args([
'variables', '-i', 'variables2.json']))
self.assertEqual('original', models.Variable.get('bar'))
self.assertEqual('{"foo": "bar"}', models.Variable.get('foo'))
os.remove('variables1.json')
os.remove('variables2.json')
def _wait_pidfile(self, pidfile):
while True:
try:
with open(pidfile) as f:
return int(f.read())
except:
sleep(1)
def test_cli_webserver_foreground(self):
# Confirm that webserver hasn't been launched.
# pgrep returns exit status 1 if no process matched.
self.assertEqual(1, subprocess.Popen(["pgrep", "-c", "airflow"]).wait())
self.assertEqual(1, subprocess.Popen(["pgrep", "-c", "gunicorn"]).wait())
# Run webserver in foreground and terminate it.
p = subprocess.Popen(["airflow", "webserver"])
p.terminate()
p.wait()
# Assert that no process remains.
self.assertEqual(1, subprocess.Popen(["pgrep", "-c", "airflow"]).wait())
self.assertEqual(1, subprocess.Popen(["pgrep", "-c", "gunicorn"]).wait())
@unittest.skipIf("TRAVIS" in os.environ and bool(os.environ["TRAVIS"]),
"Skipping test due to lack of required file permission")
def test_cli_webserver_foreground_with_pid(self):
# Run webserver in foreground with --pid option
pidfile = tempfile.mkstemp()[1]
p = subprocess.Popen(["airflow", "webserver", "--pid", pidfile])
# Check the file specified by --pid option exists
self._wait_pidfile(pidfile)
# Terminate webserver
p.terminate()
p.wait()
@unittest.skipIf("TRAVIS" in os.environ and bool(os.environ["TRAVIS"]),
"Skipping test due to lack of required file permission")
def test_cli_webserver_background(self):
import psutil
# Confirm that webserver hasn't been launched.
self.assertEqual(1, subprocess.Popen(["pgrep", "-c", "airflow"]).wait())
self.assertEqual(1, subprocess.Popen(["pgrep", "-c", "gunicorn"]).wait())
# Run webserver in background.
subprocess.Popen(["airflow", "webserver", "-D"])
pidfile = cli.setup_locations("webserver")[0]
self._wait_pidfile(pidfile)
# Assert that gunicorn and its monitor are launched.
self.assertEqual(0, subprocess.Popen(["pgrep", "-c", "airflow"]).wait())
self.assertEqual(0, subprocess.Popen(["pgrep", "-c", "gunicorn"]).wait())
# Terminate monitor process.
pidfile = cli.setup_locations("webserver-monitor")[0]
pid = self._wait_pidfile(pidfile)
p = psutil.Process(pid)
p.terminate()
p.wait()
# Assert that no process remains.
self.assertEqual(1, subprocess.Popen(["pgrep", "-c", "airflow"]).wait())
self.assertEqual(1, subprocess.Popen(["pgrep", "-c", "gunicorn"]).wait())
# Patch for causing webserver timeout
@mock.patch("airflow.bin.cli.get_num_workers_running", return_value=0)
def test_cli_webserver_shutdown_when_gunicorn_master_is_killed(self, _):
# Shorten timeout so that this test doesn't take too long time
configuration.conf.set("webserver", "web_server_master_timeout", "10")
args = self.parser.parse_args(['webserver'])
with self.assertRaises(SystemExit) as e:
cli.webserver(args)
self.assertEqual(e.exception.code, 1)
class SecurityTests(unittest.TestCase):
def setUp(self):
configuration.load_test_config()
configuration.conf.set("webserver", "authenticate", "False")
configuration.conf.set("webserver", "expose_config", "True")
app = application.create_app()
app.config['TESTING'] = True
self.app = app.test_client()
self.dagbag = models.DagBag(
dag_folder=DEV_NULL, include_examples=True)
self.dag_bash = self.dagbag.dags['example_bash_operator']
self.runme_0 = self.dag_bash.get_task('runme_0')
def get_csrf(self, response):
tree = html.fromstring(response.data)
form = tree.find('.//form')
return form.find('.//input[@name="_csrf_token"]').value
def test_csrf_rejection(self):
endpoints = ([
"/admin/queryview/",
"/admin/airflow/paused?dag_id=example_python_operator&is_paused=false",
])
for endpoint in endpoints:
response = self.app.post(endpoint)
self.assertIn('CSRF token is missing', response.data.decode('utf-8'))
def test_csrf_acceptance(self):
response = self.app.get("/admin/queryview/")
csrf = self.get_csrf(response)
response = self.app.post("/admin/queryview/", data=dict(csrf_token=csrf))
self.assertEqual(200, response.status_code)
def test_xss(self):
try:
self.app.get("/admin/airflow/tree?dag_id=<script>alert(123456)</script>")
except:
# exception is expected here since dag doesnt exist
pass
response = self.app.get("/admin/log", follow_redirects=True)
self.assertIn(bleach.clean("<script>alert(123456)</script>"), response.data.decode('UTF-8'))
def test_chart_data_template(self):
"""Protect chart_data from being able to do RCE."""
session = settings.Session()
Chart = models.Chart
chart1 = Chart(
label='insecure_chart',
conn_id='airflow_db',
chart_type='bar',
sql="SELECT {{ ''.__class__.__mro__[1].__subclasses__() }}"
)
chart2 = Chart(
label="{{ ''.__class__.__mro__[1].__subclasses__() }}",
conn_id='airflow_db',
chart_type='bar',
sql="SELECT 1"
)
chart3 = Chart(
label="{{ subprocess.check_output('ls') }}",
conn_id='airflow_db',
chart_type='bar',
sql="SELECT 1"
)
session.add(chart1)
session.add(chart2)
session.add(chart3)
session.commit()
chart1 = session.query(Chart).filter(Chart.label == 'insecure_chart').first()
with self.assertRaises(SecurityError):
self.app.get("/admin/airflow/chart_data?chart_id={}".format(chart1.id))
chart2 = session.query(Chart).filter(
Chart.label == "{{ ''.__class__.__mro__[1].__subclasses__() }}"
).first()
with self.assertRaises(SecurityError):
self.app.get("/admin/airflow/chart_data?chart_id={}".format(chart2.id))
chart3 = session.query(Chart).filter(
Chart.label == "{{ subprocess.check_output('ls') }}"
).first()
with self.assertRaises(UndefinedError):
self.app.get("/admin/airflow/chart_data?chart_id={}".format(chart3.id))
def tearDown(self):
configuration.conf.set("webserver", "expose_config", "False")
self.dag_bash.clear(start_date=DEFAULT_DATE, end_date=timezone.utcnow())
class WebUiTests(unittest.TestCase):
def setUp(self):
configuration.load_test_config()
configuration.conf.set("webserver", "authenticate", "False")
configuration.conf.set("webserver", "expose_config", "True")
app = application.create_app()
app.config['TESTING'] = True
app.config['WTF_CSRF_METHODS'] = []
self.app = app.test_client()
self.dagbag = models.DagBag(include_examples=True)
self.dag_bash = self.dagbag.dags['example_bash_operator']
self.dag_python = self.dagbag.dags['example_python_operator']
self.sub_dag = self.dagbag.dags['example_subdag_operator']
self.runme_0 = self.dag_bash.get_task('runme_0')
self.example_xcom = self.dagbag.dags['example_xcom']
self.dagrun_python = self.dag_python.create_dagrun(
run_id="test_{}".format(models.DagRun.id_for_date(timezone.utcnow())),
execution_date=DEFAULT_DATE,
start_date=timezone.utcnow(),
state=State.RUNNING
)
self.sub_dag.create_dagrun(
run_id="test_{}".format(models.DagRun.id_for_date(timezone.utcnow())),
execution_date=DEFAULT_DATE,
start_date=timezone.utcnow(),
state=State.RUNNING
)
self.example_xcom.create_dagrun(
run_id="test_{}".format(models.DagRun.id_for_date(timezone.utcnow())),
execution_date=DEFAULT_DATE,
start_date=timezone.utcnow(),
state=State.RUNNING
)
def test_index(self):
response = self.app.get('/', follow_redirects=True)
resp_html = response.data.decode('utf-8')
self.assertIn("DAGs", resp_html)
self.assertIn("example_bash_operator", resp_html)
# The HTML should contain data for the last-run. A link to the specific run,
# and the text of the date.
url = "/admin/airflow/graph?" + urlencode({
"dag_id": self.dag_python.dag_id,
"execution_date": self.dagrun_python.execution_date,
}).replace("&", "&")
self.assertIn(url, resp_html)
self.assertIn(
self.dagrun_python.execution_date.strftime("%Y-%m-%d %H:%M"),
resp_html)
def test_query(self):
response = self.app.get('/admin/queryview/')
self.assertIn("Ad Hoc Query", response.data.decode('utf-8'))
response = self.app.post(
"/admin/queryview/", data=dict(
conn_id="airflow_db",
sql="SELECT+COUNT%281%29+as+TEST+FROM+task_instance"))
self.assertIn("TEST", response.data.decode('utf-8'))
def test_health(self):
response = self.app.get('/health')
self.assertIn('The server is healthy!', response.data.decode('utf-8'))
def test_noaccess(self):
response = self.app.get('/admin/airflow/noaccess')
self.assertIn("You don't seem to have access.", response.data.decode('utf-8'))
def test_pickle_info(self):
response = self.app.get('/admin/airflow/pickle_info')
self.assertIn('{', response.data.decode('utf-8'))
def test_dag_views(self):
response = self.app.get(
'/admin/airflow/graph?dag_id=example_bash_operator')
self.assertIn("runme_0", response.data.decode('utf-8'))
# confirm that the graph page loads when execution_date is blank
response = self.app.get(
'/admin/airflow/graph?dag_id=example_bash_operator&execution_date=')
self.assertIn("runme_0", response.data.decode('utf-8'))
response = self.app.get(
'/admin/airflow/tree?num_runs=25&dag_id=example_bash_operator')
self.assertIn("runme_0", response.data.decode('utf-8'))
response = self.app.get(
'/admin/airflow/duration?days=30&dag_id=example_bash_operator')
self.assertIn("example_bash_operator", response.data.decode('utf-8'))
response = self.app.get(
'/admin/airflow/tries?days=30&dag_id=example_bash_operator')
self.assertIn("example_bash_operator", response.data.decode('utf-8'))
response = self.app.get(
'/admin/airflow/landing_times?'
'days=30&dag_id=example_python_operator')
self.assertIn("example_python_operator", response.data.decode('utf-8'))
response = self.app.get(
'/admin/airflow/landing_times?'
'days=30&dag_id=example_xcom')
self.assertIn("example_xcom", response.data.decode('utf-8'))
response = self.app.get(
'/admin/airflow/gantt?dag_id=example_bash_operator')
self.assertIn("example_bash_operator", response.data.decode('utf-8'))
response = self.app.get(
'/admin/airflow/code?dag_id=example_bash_operator')
self.assertIn("example_bash_operator", response.data.decode('utf-8'))
response = self.app.get(
'/admin/airflow/blocked')
response = self.app.get(
'/admin/configurationview/')
self.assertIn("Airflow Configuration", response.data.decode('utf-8'))
self.assertIn("Running Configuration", response.data.decode('utf-8'))
response = self.app.get(
'/admin/airflow/rendered?'
'task_id=runme_1&dag_id=example_bash_operator&'
'execution_date={}'.format(DEFAULT_DATE_ISO))
self.assertIn("example_bash_operator", response.data.decode('utf-8'))
response = self.app.get(
'/admin/airflow/log?task_id=run_this_last&'
'dag_id=example_bash_operator&execution_date={}'
''.format(DEFAULT_DATE_ISO))
self.assertIn("run_this_last", response.data.decode('utf-8'))
response = self.app.get(
'/admin/airflow/task?'
'task_id=runme_0&dag_id=example_bash_operator&'
'execution_date={}'.format(DEFAULT_DATE_DS))
self.assertIn("Attributes", response.data.decode('utf-8'))
response = self.app.get(
'/admin/airflow/dag_stats')
self.assertIn("example_bash_operator", response.data.decode('utf-8'))
response = self.app.get(
'/admin/airflow/task_stats')
self.assertIn("example_bash_operator", response.data.decode('utf-8'))
url = (
"/admin/airflow/success?task_id=print_the_context&"
"dag_id=example_python_operator&upstream=false&downstream=false&"
"future=false&past=false&execution_date={}&"
"origin=/admin".format(DEFAULT_DATE_DS))
response = self.app.get(url)
self.assertIn("Wait a minute", response.data.decode('utf-8'))
response = self.app.get(url + "&confirmed=true")
response = self.app.get(
'/admin/airflow/clear?task_id=print_the_context&'
'dag_id=example_python_operator&future=true&past=false&'
'upstream=true&downstream=false&'
'execution_date={}&'
'origin=/admin'.format(DEFAULT_DATE_DS))
self.assertIn("Wait a minute", response.data.decode('utf-8'))
url = (
"/admin/airflow/success?task_id=section-1&"
"dag_id=example_subdag_operator&upstream=true&downstream=true&"
"future=false&past=false&execution_date={}&"
"origin=/admin".format(DEFAULT_DATE_DS))
response = self.app.get(url)
self.assertIn("Wait a minute", response.data.decode('utf-8'))
self.assertIn("section-1-task-1", response.data.decode('utf-8'))
self.assertIn("section-1-task-2", response.data.decode('utf-8'))
self.assertIn("section-1-task-3", response.data.decode('utf-8'))
self.assertIn("section-1-task-4", response.data.decode('utf-8'))
self.assertIn("section-1-task-5", response.data.decode('utf-8'))
response = self.app.get(url + "&confirmed=true")
url = (
"/admin/airflow/clear?task_id=print_the_context&"
"dag_id=example_python_operator&future=false&past=false&"
"upstream=false&downstream=true&"
"execution_date={}&"
"origin=/admin".format(DEFAULT_DATE_DS))
response = self.app.get(url)
self.assertIn("Wait a minute", response.data.decode('utf-8'))
response = self.app.get(url + "&confirmed=true")
url = (
"/admin/airflow/run?task_id=runme_0&"
"dag_id=example_bash_operator&ignore_all_deps=false&ignore_ti_state=true&"
"ignore_task_deps=true&execution_date={}&"
"origin=/admin".format(DEFAULT_DATE_DS))
response = self.app.get(url)
response = self.app.get(
"/admin/airflow/refresh?dag_id=example_bash_operator")
response = self.app.get("/admin/airflow/refresh_all")
response = self.app.post(
"/admin/airflow/paused?"
"dag_id=example_python_operator&is_paused=false")
self.assertIn("OK", response.data.decode('utf-8'))
response = self.app.get("/admin/xcom", follow_redirects=True)
self.assertIn("Xcoms", response.data.decode('utf-8'))
def test_charts(self):
session = Session()
chart_label = "Airflow task instance by type"
chart = session.query(
models.Chart).filter(models.Chart.label == chart_label).first()
chart_id = chart.id
session.close()
response = self.app.get(
'/admin/airflow/chart'
'?chart_id={}&iteration_no=1'.format(chart_id))
self.assertIn("Airflow task instance by type", response.data.decode('utf-8'))
response = self.app.get(
'/admin/airflow/chart_data'
'?chart_id={}&iteration_no=1'.format(chart_id))
self.assertIn("example", response.data.decode('utf-8'))
response = self.app.get(
'/admin/airflow/dag_details?dag_id=example_branch_operator')
self.assertIn("run_this_first", response.data.decode('utf-8'))
def test_fetch_task_instance(self):
url = (
"/admin/airflow/object/task_instances?"
"dag_id=example_python_operator&"
"execution_date={}".format(DEFAULT_DATE_DS))
response = self.app.get(url)
self.assertIn("print_the_context", response.data.decode('utf-8'))
def tearDown(self):
configuration.conf.set("webserver", "expose_config", "False")
self.dag_bash.clear(start_date=DEFAULT_DATE, end_date=timezone.utcnow())
session = Session()
session.query(models.DagRun).delete()
session.query(models.TaskInstance).delete()
session.commit()
session.close()
class SecureModeWebUiTests(unittest.TestCase):
def setUp(self):
configuration.load_test_config()
configuration.conf.set("webserver", "authenticate", "False")
configuration.conf.set("core", "secure_mode", "True")
app = application.create_app()
app.config['TESTING'] = True
self.app = app.test_client()
def test_query(self):
response = self.app.get('/admin/queryview/')
self.assertEqual(response.status_code, 404)
def test_charts(self):
response = self.app.get('/admin/chart/')
self.assertEqual(response.status_code, 404)
def tearDown(self):
configuration.conf.remove_option("core", "SECURE_MODE")
class WebPasswordAuthTest(unittest.TestCase):
def setUp(self):
configuration.conf.set("webserver", "authenticate", "True")
configuration.conf.set("webserver", "auth_backend", "airflow.contrib.auth.backends.password_auth")
app = application.create_app()
app.config['TESTING'] = True
self.app = app.test_client()
from airflow.contrib.auth.backends.password_auth import PasswordUser
session = Session()
user = models.User()
password_user = PasswordUser(user)
password_user.username = 'airflow_passwordauth'
password_user.password = 'password'
print(password_user._password)
session.add(password_user)
session.commit()
session.close()
def get_csrf(self, response):
tree = html.fromstring(response.data)
form = tree.find('.//form')
return form.find('.//input[@name="_csrf_token"]').value
def login(self, username, password):
response = self.app.get('/admin/airflow/login')
csrf_token = self.get_csrf(response)
return self.app.post('/admin/airflow/login', data=dict(
username=username,
password=password,
csrf_token=csrf_token
), follow_redirects=True)
def logout(self):
return self.app.get('/admin/airflow/logout', follow_redirects=True)
def test_login_logout_password_auth(self):
self.assertTrue(configuration.conf.getboolean('webserver', 'authenticate'))
response = self.login('user1', 'whatever')
self.assertIn('Incorrect login details', response.data.decode('utf-8'))
response = self.login('airflow_passwordauth', 'wrongpassword')
self.assertIn('Incorrect login details', response.data.decode('utf-8'))
response = self.login('airflow_passwordauth', 'password')
self.assertIn('Data Profiling', response.data.decode('utf-8'))
response = self.logout()
self.assertIn('form-signin', response.data.decode('utf-8'))
def test_unauthorized_password_auth(self):
response = self.app.get("/admin/airflow/landing_times")
self.assertEqual(response.status_code, 302)
def tearDown(self):
configuration.load_test_config()
session = Session()
session.query(models.User).delete()
session.commit()
session.close()
configuration.conf.set("webserver", "authenticate", "False")
class WebLdapAuthTest(unittest.TestCase):
def setUp(self):
configuration.conf.set("webserver", "authenticate", "True")
configuration.conf.set("webserver", "auth_backend", "airflow.contrib.auth.backends.ldap_auth")
try:
configuration.conf.add_section("ldap")
except:
pass
configuration.conf.set("ldap", "uri", "ldap://localhost:3890")
configuration.conf.set("ldap", "user_filter", "objectClass=*")
configuration.conf.set("ldap", "user_name_attr", "uid")
configuration.conf.set("ldap", "bind_user", "cn=Manager,dc=example,dc=com")
configuration.conf.set("ldap", "bind_password", "insecure")
configuration.conf.set("ldap", "basedn", "dc=example,dc=com")
configuration.conf.set("ldap", "cacert", "")
app = application.create_app()
app.config['TESTING'] = True
self.app = app.test_client()
def get_csrf(self, response):
tree = html.fromstring(response.data)
form = tree.find('.//form')
return form.find('.//input[@name="_csrf_token"]').value
def login(self, username, password):
response = self.app.get('/admin/airflow/login')
csrf_token = self.get_csrf(response)
return self.app.post('/admin/airflow/login', data=dict(
username=username,
password=password,
csrf_token=csrf_token
), follow_redirects=True)
def logout(self):
return self.app.get('/admin/airflow/logout', follow_redirects=True)
def test_login_logout_ldap(self):
self.assertTrue(configuration.conf.getboolean('webserver', 'authenticate'))
response = self.login('user1', 'userx')
self.assertIn('Incorrect login details', response.data.decode('utf-8'))
response = self.login('userz', 'user1')
self.assertIn('Incorrect login details', response.data.decode('utf-8'))
response = self.login('user1', 'user1')
self.assertIn('Data Profiling', response.data.decode('utf-8'))
response = self.logout()
self.assertIn('form-signin', response.data.decode('utf-8'))
def test_unauthorized(self):
response = self.app.get("/admin/airflow/landing_times")
self.assertEqual(response.status_code, 302)
def test_no_filter(self):
response = self.login('user1', 'user1')
self.assertIn('Data Profiling', response.data.decode('utf-8'))
self.assertIn('Connections', response.data.decode('utf-8'))
def test_with_filters(self):
configuration.conf.set('ldap', 'superuser_filter',
'description=superuser')
configuration.conf.set('ldap', 'data_profiler_filter',
'description=dataprofiler')
response = self.login('dataprofiler', 'dataprofiler')
self.assertIn('Data Profiling', response.data.decode('utf-8'))
response = self.login('superuser', 'superuser')
self.assertIn('Connections', response.data.decode('utf-8'))
def tearDown(self):
configuration.load_test_config()
session = Session()
session.query(models.User).delete()
session.commit()
session.close()
configuration.conf.set("webserver", "authenticate", "False")
class LdapGroupTest(unittest.TestCase):
def setUp(self):
configuration.conf.set("webserver", "authenticate", "True")
configuration.conf.set("webserver", "auth_backend", "airflow.contrib.auth.backends.ldap_auth")
try:
configuration.conf.add_section("ldap")
except:
pass
configuration.conf.set("ldap", "uri", "ldap://localhost:3890")
configuration.conf.set("ldap", "user_filter", "objectClass=*")
configuration.conf.set("ldap", "user_name_attr", "uid")
configuration.conf.set("ldap", "bind_user", "cn=Manager,dc=example,dc=com")
configuration.conf.set("ldap", "bind_password", "insecure")
configuration.conf.set("ldap", "basedn", "dc=example,dc=com")
configuration.conf.set("ldap", "cacert", "")
def test_group_belonging(self):
from airflow.contrib.auth.backends.ldap_auth import LdapUser
users = {"user1": ["group1", "group3"],
"user2": ["group2"]
}
for user in users:
mu = models.User(username=user,
is_superuser=False)
auth = LdapUser(mu)
self.assertEqual(set(users[user]), set(auth.ldap_groups))
def tearDown(self):
configuration.load_test_config()
configuration.conf.set("webserver", "authenticate", "False")
class FakeWebHDFSHook(object):
def __init__(self, conn_id):
self.conn_id = conn_id
def get_conn(self):
return self.conn_id
def check_for_path(self, hdfs_path):
return hdfs_path
class FakeSnakeBiteClientException(Exception):
pass
class FakeSnakeBiteClient(object):
def __init__(self):
self.started = True
def ls(self, path, include_toplevel=False):
"""
the fake snakebite client
:param path: the array of path to test
:param include_toplevel: to return the toplevel directory info
:return: a list for path for the matching queries
"""
if path[0] == '/datadirectory/empty_directory' and not include_toplevel:
return []
elif path[0] == '/datadirectory/datafile':
return [{
'group': u'supergroup',
'permission': 420,
'file_type': 'f',
'access_time': 1481122343796,
'block_replication': 3,
'modification_time': 1481122343862,
'length': 0,
'blocksize': 134217728,
'owner': u'hdfs',
'path': '/datadirectory/datafile'
}]
elif path[0] == '/datadirectory/empty_directory' and include_toplevel:
return [{
'group': u'supergroup',
'permission': 493,
'file_type': 'd',
'access_time': 0,
'block_replication': 0,
'modification_time': 1481132141540,
'length': 0,
'blocksize': 0,
'owner': u'hdfs',
'path': '/datadirectory/empty_directory'
}]
elif path[0] == '/datadirectory/not_empty_directory' and include_toplevel:
return [{
'group': u'supergroup',
'permission': 493,
'file_type': 'd',
'access_time': 0,
'block_replication': 0,
'modification_time': 1481132141540,
'length': 0,
'blocksize': 0,
'owner': u'hdfs',
'path': '/datadirectory/empty_directory'
}, {
'group': u'supergroup',
'permission': 420,
'file_type': 'f',
'access_time': 1481122343796,
'block_replication': 3,
'modification_time': 1481122343862,
'length': 0,
'blocksize': 134217728,
'owner': u'hdfs',
'path': '/datadirectory/not_empty_directory/test_file'
}]
elif path[0] == '/datadirectory/not_empty_directory':
return [{
'group': u'supergroup',
'permission': 420,
'file_type': 'f',
'access_time': 1481122343796,
'block_replication': 3,
'modification_time': 1481122343862,
'length': 0,
'blocksize': 134217728,
'owner': u'hdfs',
'path': '/datadirectory/not_empty_directory/test_file'
}]
elif path[0] == '/datadirectory/not_existing_file_or_directory':
raise FakeSnakeBiteClientException
elif path[0] == '/datadirectory/regex_dir':
return [{
'group': u'supergroup',
'permission': 420,
'file_type': 'f',
'access_time': 1481122343796,
'block_replication': 3,
'modification_time': 1481122343862, 'length': 12582912,
'blocksize': 134217728,
'owner': u'hdfs',
'path': '/datadirectory/regex_dir/test1file'
}, {
'group': u'supergroup',
'permission': 420,
'file_type': 'f',
'access_time': 1481122343796,
'block_replication': 3,
'modification_time': 1481122343862,
'length': 12582912,
'blocksize': 134217728,
'owner': u'hdfs',
'path': '/datadirectory/regex_dir/test2file'
}, {
'group': u'supergroup',
'permission': 420,
'file_type': 'f',
'access_time': 1481122343796,
'block_replication': 3,
'modification_time': 1481122343862,
'length': 12582912,
'blocksize': 134217728,
'owner': u'hdfs',
'path': '/datadirectory/regex_dir/test3file'
}, {
'group': u'supergroup',
'permission': 420,
'file_type': 'f',
'access_time': 1481122343796,
'block_replication': 3,
'modification_time': 1481122343862,
'length': 12582912,
'blocksize': 134217728,
'owner': u'hdfs',
'path': '/datadirectory/regex_dir/copying_file_1.txt._COPYING_'
}, {
'group': u'supergroup',
'permission': 420,
'file_type': 'f',
'access_time': 1481122343796,
'block_replication': 3,
'modification_time': 1481122343862,
'length': 12582912,
'blocksize': 134217728,
'owner': u'hdfs',
'path': '/datadirectory/regex_dir/copying_file_3.txt.sftp'
}]
else:
raise FakeSnakeBiteClientException
class FakeHDFSHook(object):
def __init__(self, conn_id=None):
self.conn_id = conn_id
def get_conn(self):
client = FakeSnakeBiteClient()
return client
class ConnectionTest(unittest.TestCase):
def setUp(self):
configuration.load_test_config()
utils.db.initdb()
os.environ['AIRFLOW_CONN_TEST_URI'] = (
'postgres://username:[email protected]:5432/the_database')
os.environ['AIRFLOW_CONN_TEST_URI_NO_CREDS'] = (
'postgres://ec2.compute.com/the_database')
def tearDown(self):
env_vars = ['AIRFLOW_CONN_TEST_URI', 'AIRFLOW_CONN_AIRFLOW_DB']
for ev in env_vars:
if ev in os.environ:
del os.environ[ev]
def test_using_env_var(self):
c = SqliteHook.get_connection(conn_id='test_uri')
self.assertEqual('ec2.compute.com', c.host)
self.assertEqual('the_database', c.schema)
self.assertEqual('username', c.login)
self.assertEqual('password', c.password)
self.assertEqual(5432, c.port)
def test_using_unix_socket_env_var(self):
c = SqliteHook.get_connection(conn_id='test_uri_no_creds')
self.assertEqual('ec2.compute.com', c.host)
self.assertEqual('the_database', c.schema)
self.assertIsNone(c.login)
self.assertIsNone(c.password)
self.assertIsNone(c.port)
def test_param_setup(self):
c = models.Connection(conn_id='local_mysql', conn_type='mysql',
host='localhost', login='airflow',
password='airflow', schema='airflow')
self.assertEqual('localhost', c.host)
self.assertEqual('airflow', c.schema)
self.assertEqual('airflow', c.login)
self.assertEqual('airflow', c.password)
self.assertIsNone(c.port)
def test_env_var_priority(self):
c = SqliteHook.get_connection(conn_id='airflow_db')
self.assertNotEqual('ec2.compute.com', c.host)
os.environ['AIRFLOW_CONN_AIRFLOW_DB'] = \
'postgres://username:[email protected]:5432/the_database'
c = SqliteHook.get_connection(conn_id='airflow_db')
self.assertEqual('ec2.compute.com', c.host)
self.assertEqual('the_database', c.schema)
self.assertEqual('username', c.login)
self.assertEqual('password', c.password)
self.assertEqual(5432, c.port)
del os.environ['AIRFLOW_CONN_AIRFLOW_DB']
def test_dbapi_get_uri(self):
conn = BaseHook.get_connection(conn_id='test_uri')
hook = conn.get_hook()
self.assertEqual('postgres://username:[email protected]:5432/the_database', hook.get_uri())
conn2 = BaseHook.get_connection(conn_id='test_uri_no_creds')
hook2 = conn2.get_hook()
self.assertEqual('postgres://ec2.compute.com/the_database', hook2.get_uri())
def test_dbapi_get_sqlalchemy_engine(self):
conn = BaseHook.get_connection(conn_id='test_uri')
hook = conn.get_hook()
engine = hook.get_sqlalchemy_engine()
self.assertIsInstance(engine, sqlalchemy.engine.Engine)
self.assertEqual('postgres://username:[email protected]:5432/the_database', str(engine.url))
def test_get_connections_env_var(self):
conns = SqliteHook.get_connections(conn_id='test_uri')
assert len(conns) == 1
assert conns[0].host == 'ec2.compute.com'
assert conns[0].schema == 'the_database'
assert conns[0].login == 'username'
assert conns[0].password == 'password'
assert conns[0].port == 5432
def test_get_connections_db(self):
conns = BaseHook.get_connections(conn_id='airflow_db')
assert len(conns) == 1
assert conns[0].host == 'localhost'
assert conns[0].schema == 'airflow'
assert conns[0].login == 'root'
class WebHDFSHookTest(unittest.TestCase):
def setUp(self):
configuration.load_test_config()
def test_simple_init(self):
from airflow.hooks.webhdfs_hook import WebHDFSHook
c = WebHDFSHook()
self.assertIsNone(c.proxy_user)
def test_init_proxy_user(self):
from airflow.hooks.webhdfs_hook import WebHDFSHook
c = WebHDFSHook(proxy_user='someone')
self.assertEqual('someone', c.proxy_user)
try:
from airflow.hooks.hdfs_hook import HDFSHook
import snakebite
except ImportError:
HDFSHook = None
@unittest.skipIf(HDFSHook is None,
"Skipping test because HDFSHook is not installed")
class HDFSHookTest(unittest.TestCase):
def setUp(self):
configuration.load_test_config()
os.environ['AIRFLOW_CONN_HDFS_DEFAULT'] = ('hdfs://localhost:8020')
def test_get_client(self):
client = HDFSHook(proxy_user='foo').get_conn()
self.assertIsInstance(client, snakebite.client.Client)
self.assertEqual('localhost', client.host)
self.assertEqual(8020, client.port)
self.assertEqual('foo', client.service.channel.effective_user)
@mock.patch('airflow.hooks.hdfs_hook.AutoConfigClient')
@mock.patch('airflow.hooks.hdfs_hook.HDFSHook.get_connections')
def test_get_autoconfig_client(self, mock_get_connections,
MockAutoConfigClient):
c = models.Connection(conn_id='hdfs', conn_type='hdfs',
host='localhost', port=8020, login='foo',
extra=json.dumps({'autoconfig': True}))
mock_get_connections.return_value = [c]
HDFSHook(hdfs_conn_id='hdfs').get_conn()
MockAutoConfigClient.assert_called_once_with(effective_user='foo',
use_sasl=False)
@mock.patch('airflow.hooks.hdfs_hook.AutoConfigClient')
def test_get_autoconfig_client_no_conn(self, MockAutoConfigClient):
HDFSHook(hdfs_conn_id='hdfs_missing', autoconfig=True).get_conn()
MockAutoConfigClient.assert_called_once_with(effective_user=None,
use_sasl=False)
@mock.patch('airflow.hooks.hdfs_hook.HDFSHook.get_connections')
def test_get_ha_client(self, mock_get_connections):
c1 = models.Connection(conn_id='hdfs_default', conn_type='hdfs',
host='localhost', port=8020)
c2 = models.Connection(conn_id='hdfs_default', conn_type='hdfs',
host='localhost2', port=8020)
mock_get_connections.return_value = [c1, c2]
client = HDFSHook().get_conn()
self.assertIsInstance(client, snakebite.client.HAClient)
try:
from airflow.hooks.http_hook import HttpHook
except ImportError:
HttpHook = None
@unittest.skipIf(HttpHook is None,
"Skipping test because HttpHook is not installed")
class HttpHookTest(unittest.TestCase):
def setUp(self):
configuration.load_test_config()
@mock.patch('airflow.hooks.http_hook.HttpHook.get_connection')
def test_http_connection(self, mock_get_connection):
c = models.Connection(conn_id='http_default', conn_type='http',
host='localhost', schema='http')
mock_get_connection.return_value = c
hook = HttpHook()
hook.get_conn({})
self.assertEqual(hook.base_url, 'http://localhost')
@mock.patch('airflow.hooks.http_hook.HttpHook.get_connection')
def test_https_connection(self, mock_get_connection):
c = models.Connection(conn_id='http_default', conn_type='http',
host='localhost', schema='https')
mock_get_connection.return_value = c
hook = HttpHook()
hook.get_conn({})
self.assertEqual(hook.base_url, 'https://localhost')
@mock.patch('airflow.hooks.http_hook.HttpHook.get_connection')
def test_host_encoded_http_connection(self, mock_get_connection):
c = models.Connection(conn_id='http_default', conn_type='http',
host='http://localhost')
mock_get_connection.return_value = c
hook = HttpHook()
hook.get_conn({})
self.assertEqual(hook.base_url, 'http://localhost')
@mock.patch('airflow.hooks.http_hook.HttpHook.get_connection')
def test_host_encoded_https_connection(self, mock_get_connection):
c = models.Connection(conn_id='http_default', conn_type='http',
host='https://localhost')
mock_get_connection.return_value = c
hook = HttpHook()
hook.get_conn({})
self.assertEqual(hook.base_url, 'https://localhost')
send_email_test = mock.Mock()
class EmailTest(unittest.TestCase):
def setUp(self):
configuration.conf.remove_option('email', 'EMAIL_BACKEND')
@mock.patch('airflow.utils.email.send_email')
def test_default_backend(self, mock_send_email):
res = utils.email.send_email('to', 'subject', 'content')
mock_send_email.assert_called_with('to', 'subject', 'content')
self.assertEqual(mock_send_email.return_value, res)
@mock.patch('airflow.utils.email.send_email_smtp')
def test_custom_backend(self, mock_send_email):
configuration.conf.set('email', 'EMAIL_BACKEND', 'tests.core.send_email_test')
utils.email.send_email('to', 'subject', 'content')
send_email_test.assert_called_with(
'to', 'subject', 'content', files=None, dryrun=False,
cc=None, bcc=None, mime_charset='us-ascii', mime_subtype='mixed')
self.assertFalse(mock_send_email.called)
class EmailSmtpTest(unittest.TestCase):
def setUp(self):
configuration.conf.set('smtp', 'SMTP_SSL', 'False')
@mock.patch('airflow.utils.email.send_MIME_email')
def test_send_smtp(self, mock_send_mime):
attachment = tempfile.NamedTemporaryFile()
attachment.write(b'attachment')
attachment.seek(0)
utils.email.send_email_smtp('to', 'subject', 'content', files=[attachment.name])
self.assertTrue(mock_send_mime.called)
call_args = mock_send_mime.call_args[0]
self.assertEqual(configuration.conf.get('smtp', 'SMTP_MAIL_FROM'), call_args[0])
self.assertEqual(['to'], call_args[1])
msg = call_args[2]
self.assertEqual('subject', msg['Subject'])
self.assertEqual(configuration.conf.get('smtp', 'SMTP_MAIL_FROM'), msg['From'])
self.assertEqual(2, len(msg.get_payload()))
filename = u'attachment; filename="' + os.path.basename(attachment.name) + '"'
self.assertEqual(filename, msg.get_payload()[-1].get(u'Content-Disposition'))
mimeapp = MIMEApplication('attachment')
self.assertEqual(mimeapp.get_payload(), msg.get_payload()[-1].get_payload())
@mock.patch('airflow.utils.email.send_MIME_email')
def test_send_smtp_with_multibyte_content(self, mock_send_mime):
utils.email.send_email_smtp('to', 'subject', '🔥', mime_charset='utf-8')
self.assertTrue(mock_send_mime.called)
call_args = mock_send_mime.call_args[0]
msg = call_args[2]
mimetext = MIMEText('🔥', 'mixed', 'utf-8')
self.assertEqual(mimetext.get_payload(), msg.get_payload()[0].get_payload())
@mock.patch('airflow.utils.email.send_MIME_email')
def test_send_bcc_smtp(self, mock_send_mime):
attachment = tempfile.NamedTemporaryFile()
attachment.write(b'attachment')
attachment.seek(0)
utils.email.send_email_smtp('to', 'subject', 'content', files=[attachment.name], cc='cc', bcc='bcc')
self.assertTrue(mock_send_mime.called)
call_args = mock_send_mime.call_args[0]
self.assertEqual(configuration.conf.get('smtp', 'SMTP_MAIL_FROM'), call_args[0])
self.assertEqual(['to', 'cc', 'bcc'], call_args[1])
msg = call_args[2]
self.assertEqual('subject', msg['Subject'])
self.assertEqual(configuration.conf.get('smtp', 'SMTP_MAIL_FROM'), msg['From'])
self.assertEqual(2, len(msg.get_payload()))
self.assertEqual(u'attachment; filename="' + os.path.basename(attachment.name) + '"',
msg.get_payload()[-1].get(u'Content-Disposition'))
mimeapp = MIMEApplication('attachment')
self.assertEqual(mimeapp.get_payload(), msg.get_payload()[-1].get_payload())
@mock.patch('smtplib.SMTP_SSL')
@mock.patch('smtplib.SMTP')
def test_send_mime(self, mock_smtp, mock_smtp_ssl):
mock_smtp.return_value = mock.Mock()
mock_smtp_ssl.return_value = mock.Mock()
msg = MIMEMultipart()
utils.email.send_MIME_email('from', 'to', msg, dryrun=False)
mock_smtp.assert_called_with(
configuration.conf.get('smtp', 'SMTP_HOST'),
configuration.conf.getint('smtp', 'SMTP_PORT'),
)
self.assertTrue(mock_smtp.return_value.starttls.called)
mock_smtp.return_value.login.assert_called_with(
configuration.conf.get('smtp', 'SMTP_USER'),
configuration.conf.get('smtp', 'SMTP_PASSWORD'),
)
mock_smtp.return_value.sendmail.assert_called_with('from', 'to', msg.as_string())
self.assertTrue(mock_smtp.return_value.quit.called)
@mock.patch('smtplib.SMTP_SSL')
@mock.patch('smtplib.SMTP')
def test_send_mime_ssl(self, mock_smtp, mock_smtp_ssl):
configuration.conf.set('smtp', 'SMTP_SSL', 'True')
mock_smtp.return_value = mock.Mock()
mock_smtp_ssl.return_value = mock.Mock()
utils.email.send_MIME_email('from', 'to', MIMEMultipart(), dryrun=False)
self.assertFalse(mock_smtp.called)
mock_smtp_ssl.assert_called_with(
configuration.conf.get('smtp', 'SMTP_HOST'),
configuration.conf.getint('smtp', 'SMTP_PORT'),
)
@mock.patch('smtplib.SMTP_SSL')
@mock.patch('smtplib.SMTP')
def test_send_mime_noauth(self, mock_smtp, mock_smtp_ssl):
configuration.conf.remove_option('smtp', 'SMTP_USER')
configuration.conf.remove_option('smtp', 'SMTP_PASSWORD')
mock_smtp.return_value = mock.Mock()
mock_smtp_ssl.return_value = mock.Mock()
utils.email.send_MIME_email('from', 'to', MIMEMultipart(), dryrun=False)
self.assertFalse(mock_smtp_ssl.called)
mock_smtp.assert_called_with(
configuration.conf.get('smtp', 'SMTP_HOST'),
configuration.conf.getint('smtp', 'SMTP_PORT'),
)
self.assertFalse(mock_smtp.login.called)
@mock.patch('smtplib.SMTP_SSL')
@mock.patch('smtplib.SMTP')
def test_send_mime_dryrun(self, mock_smtp, mock_smtp_ssl):
utils.email.send_MIME_email('from', 'to', MIMEMultipart(), dryrun=True)
self.assertFalse(mock_smtp.called)
self.assertFalse(mock_smtp_ssl.called)
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "aea6cde9003fa4a1cdb298bb432f0c9a",
"timestamp": "",
"source": "github",
"line_count": 2506,
"max_line_length": 109,
"avg_line_length": 39.89385474860335,
"alnum_prop": 0.5872526856982816,
"repo_name": "RealImpactAnalytics/airflow",
"id": "ce32482d04f2a45c9d8f5a4364310cf508aca4f2",
"size": "100792",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/core.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "109698"
},
{
"name": "HTML",
"bytes": "270710"
},
{
"name": "JavaScript",
"bytes": "1988427"
},
{
"name": "Mako",
"bytes": "1284"
},
{
"name": "Python",
"bytes": "3765458"
},
{
"name": "Shell",
"bytes": "46923"
}
],
"symlink_target": ""
} |
<?php
namespace Magento\Braintree\Block\Customer\PayPal;
use Magento\Braintree\Gateway\Config\PayPal\Config;
use Magento\Braintree\Model\Ui\PayPal\ConfigProvider;
use Magento\Framework\View\Element\Template;
use Magento\Vault\Api\Data\PaymentTokenInterface;
use Magento\Vault\Block\AbstractTokenRenderer;
/**
* Class VaultTokenRenderer
*/
class VaultTokenRenderer extends AbstractTokenRenderer
{
/**
* @var Config
*/
private $config;
public function __construct(
Template\Context $context,
Config $config,
array $data = []
) {
parent::__construct($context, $data);
$this->config = $config;
}
/**
* @inheritdoc
*/
public function getIconUrl()
{
return $this->config->getPayPalIcon()['url'];
}
/**
* @inheritdoc
*/
public function getIconHeight()
{
return $this->config->getPayPalIcon()['height'];
}
/**
* @inheritdoc
*/
public function getIconWidth()
{
return $this->config->getPayPalIcon()['width'];
}
/**
* Can render specified token
*
* @param PaymentTokenInterface $token
* @return boolean
*/
public function canRender(PaymentTokenInterface $token)
{
return $token->getPaymentMethodCode() === ConfigProvider::PAYPAL_CODE;
}
/**
* Get email of PayPal payer
* @return string
*/
public function getPayerEmail()
{
return $this->getTokenDetails()['payerEmail'];
}
}
| {
"content_hash": "eb2ab086808db0cc3adcd3debe1ff509",
"timestamp": "",
"source": "github",
"line_count": 73,
"max_line_length": 78,
"avg_line_length": 21.027397260273972,
"alnum_prop": 0.6104234527687297,
"repo_name": "j-froehlich/magento2_wk",
"id": "33458b399e7956b2ca77f0e192ee701b9945d0bd",
"size": "1643",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "vendor/magento/module-braintree/Block/Customer/PayPal/VaultTokenRenderer.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "13636"
},
{
"name": "CSS",
"bytes": "2076720"
},
{
"name": "HTML",
"bytes": "6151072"
},
{
"name": "JavaScript",
"bytes": "2488727"
},
{
"name": "PHP",
"bytes": "12466046"
},
{
"name": "Shell",
"bytes": "6088"
},
{
"name": "XSLT",
"bytes": "19979"
}
],
"symlink_target": ""
} |
cask "backblaze" do
version "8.0.1.588"
sha256 :no_check
url "https://secure.backblaze.com/mac/install_backblaze.dmg"
name "Backblaze"
desc "Data backup and storage service"
homepage "https://backblaze.com/"
livecheck do
url "https://secure.backblaze.com/api/clientversion.xml"
regex(/mac[._-]version=.*?(\d+(?:\.\d+)+)/i)
end
auto_updates true
installer manual: "Backblaze Installer.app"
uninstall launchctl: [
"com.backblaze.bzbmenu",
"com.backblaze.bzserv",
],
delete: [
"#{appdir}/Backblaze.app",
"/Library/Logs/DiagnosticReports/bzbmenu_*.*_resource.diag",
"/Library/PreferencePanes/BackblazeBackup.prefPane",
]
zap trash: [
"/Library/Backblaze.bzpkg",
"~/Library/Application Support/com.apple.sharedfilelist/com.apple.LSSharedFileList.ApplicationRecentDocuments/com.backblaze.*.sfl*",
"~/Library/Logs/BackblazeGUIInstaller",
"~/Library/Preferences/com.backblaze.bzbmenu.plist",
]
end
| {
"content_hash": "7bbd7287cf7172c30d1ec71f6fea190f",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 136,
"avg_line_length": 29.285714285714285,
"alnum_prop": 0.6585365853658537,
"repo_name": "a1russell/homebrew-cask",
"id": "eaa464d0c4da389c0f0d910a79880eb81d8b64ca",
"size": "1025",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "Casks/backblaze.rb",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Dockerfile",
"bytes": "249"
},
{
"name": "Python",
"bytes": "3630"
},
{
"name": "Ruby",
"bytes": "2917557"
},
{
"name": "Shell",
"bytes": "32035"
}
],
"symlink_target": ""
} |
let CopyWebpackPlugin = require('copy-webpack-plugin')
let path = require('path')
module.exports = {
entry: ['babel-polyfill', './src/entryPoint.js'],
output: {
path: './dist',
filename: 'bundle.js'
},
module: {
loaders: [
{
test: /\.css$/,
loader: 'style!css'
},
{
test: /\.scss$/,
loader: 'style!css!sass'
},
{
test: /\.es6$/,
exclude: /node_modules/,
loader: 'babel-loader',
query: {
presets: ['es2015'] // shorthand for 'babel-loader?presets[]=es2015'. Only works when there's a single loader
}
},
{
test: [
/web_modules\/chosen\/.+\.(jsx|js)$/,
/web_modules\/datetime-picker\/bootstrap-datetimepicker.js/,
/node_modules\/moment\/locale/,
/web_modules\/bootstrap.js/
],
loader: 'imports?jQuery=jquery,$=jquery,this=>window'
},
{
test: [
/src\/js\/view-logic\.es6$/,
/src\/js\/windows\.es6$/,
/src\/js\/animations\.es6$/,
/main\.es6$/
],
loaders: ['imports?jQuery=jquery,$=jquery,this=>window', 'babel-loader?presets[]=es2015']
},
{
// test: /node_modules\/bootstrap\/dist\/js\/.+\.(jsx|js)$/,
test: /.+\.(jsx|js)$/,
// http://reactkungfu.com/2015/10/integrating-jquery-chosen-with-webpack-using-imports-loader/
loader: 'babel-loader!imports?jQuery=jquery,$=jquery,this=>window'
},
{
test: /\.(jpe?g|png|gif|svg)$/i,
loaders: [
'file?hash=sha512&digest=hex&name=[hash].[ext]',
'image-webpack?bypassOnDebug&optimizationLevel=7&interlaced=false'
]
},
{ test: /\.eot(\?v=\d+\.\d+\.\d+)?$/, loader: 'file' },
{ test: /\.(woff|woff2)$/, loader:'url?prefix=font/&limit=5000' },
{ test: /\.ttf(\?v=\d+\.\d+\.\d+)?$/, loader: 'url?limit=10000&mimetype=application/octet-stream' },
{ test: /\.svg(\?v=\d+\.\d+\.\d+)?$/, loader: 'url?limit=10000&mimetype=image/svg+xml' }
]
},
plugins: [
new CopyWebpackPlugin([
{from: 'src/index.html', to:'index.html'}
])
],
resolve: {
//places to look for dependencies
//https://webpack.github.io/docs/configuration.html#resolve-modulesdirectories
modulesDirectories: ['node_modules', 'web_modules', 'src/js'],
// lets us specify what kind of file types we can process without specifically giving them a file extension
// require('./logger') instead of require('./logger.es6')
extensions: ['', '.js', '.es6', '.css', '.scss', '.ts']
}
} | {
"content_hash": "9ff3ce2c9994406f49ed39ba83f9cbea",
"timestamp": "",
"source": "github",
"line_count": 80,
"max_line_length": 119,
"avg_line_length": 33.0625,
"alnum_prop": 0.5364839319470699,
"repo_name": "EliasCole3/game-723",
"id": "458254db4ec589a2ec454f74d7aa63410e3a5ea9",
"size": "2645",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "webpack.config.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "14923"
},
{
"name": "HTML",
"bytes": "2142"
},
{
"name": "JavaScript",
"bytes": "156285"
}
],
"symlink_target": ""
} |
<!DOCTYPE html>
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta charset="utf-8" />
<title>statsmodels.sandbox.stats.multicomp.MultiComparison.allpairtest — statsmodels v0.10.1 documentation</title>
<link rel="stylesheet" href="../_static/nature.css" type="text/css" />
<link rel="stylesheet" href="../_static/pygments.css" type="text/css" />
<link rel="stylesheet" type="text/css" href="../_static/graphviz.css" />
<script type="text/javascript" id="documentation_options" data-url_root="../" src="../_static/documentation_options.js"></script>
<script type="text/javascript" src="../_static/jquery.js"></script>
<script type="text/javascript" src="../_static/underscore.js"></script>
<script type="text/javascript" src="../_static/doctools.js"></script>
<script type="text/javascript" src="../_static/language_data.js"></script>
<script async="async" type="text/javascript" src="https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.5/latest.js?config=TeX-AMS-MML_HTMLorMML"></script>
<link rel="shortcut icon" href="../_static/statsmodels_hybi_favico.ico"/>
<link rel="author" title="About these documents" href="../about.html" />
<link rel="index" title="Index" href="../genindex.html" />
<link rel="search" title="Search" href="../search.html" />
<link rel="next" title="statsmodels.sandbox.stats.multicomp.MultiComparison.getranks" href="statsmodels.sandbox.stats.multicomp.MultiComparison.getranks.html" />
<link rel="prev" title="statsmodels.sandbox.stats.multicomp.MultiComparison" href="statsmodels.sandbox.stats.multicomp.MultiComparison.html" />
<link rel="stylesheet" href="../_static/examples.css" type="text/css" />
<link rel="stylesheet" href="../_static/facebox.css" type="text/css" />
<script type="text/javascript" src="../_static/scripts.js">
</script>
<script type="text/javascript" src="../_static/facebox.js">
</script>
<script type="text/javascript">
$.facebox.settings.closeImage = "../_static/closelabel.png"
$.facebox.settings.loadingImage = "../_static/loading.gif"
</script>
<script>
$(document).ready(function() {
$.getJSON("../../versions.json", function(versions) {
var dropdown = document.createElement("div");
dropdown.className = "dropdown";
var button = document.createElement("button");
button.className = "dropbtn";
button.innerHTML = "Other Versions";
var content = document.createElement("div");
content.className = "dropdown-content";
dropdown.appendChild(button);
dropdown.appendChild(content);
$(".header").prepend(dropdown);
for (var i = 0; i < versions.length; i++) {
if (versions[i].substring(0, 1) == "v") {
versions[i] = [versions[i], versions[i].substring(1)];
} else {
versions[i] = [versions[i], versions[i]];
};
};
for (var i = 0; i < versions.length; i++) {
var a = document.createElement("a");
a.innerHTML = versions[i][1];
a.href = "../../" + versions[i][0] + "/index.html";
a.title = versions[i][1];
$(".dropdown-content").append(a);
};
});
});
</script>
</head><body>
<div class="headerwrap">
<div class = "header">
<a href = "../index.html">
<img src="../_static/statsmodels_hybi_banner.png" alt="Logo"
style="padding-left: 15px"/></a>
</div>
</div>
<div class="related" role="navigation" aria-label="related navigation">
<h3>Navigation</h3>
<ul>
<li class="right" style="margin-right: 10px">
<a href="../genindex.html" title="General Index"
accesskey="I">index</a></li>
<li class="right" >
<a href="../py-modindex.html" title="Python Module Index"
>modules</a> |</li>
<li class="right" >
<a href="statsmodels.sandbox.stats.multicomp.MultiComparison.getranks.html" title="statsmodels.sandbox.stats.multicomp.MultiComparison.getranks"
accesskey="N">next</a> |</li>
<li class="right" >
<a href="statsmodels.sandbox.stats.multicomp.MultiComparison.html" title="statsmodels.sandbox.stats.multicomp.MultiComparison"
accesskey="P">previous</a> |</li>
<li><a href ="../install.html">Install</a></li> |
<li><a href="https://groups.google.com/forum/?hl=en#!forum/pystatsmodels">Support</a></li> |
<li><a href="https://github.com/statsmodels/statsmodels/issues">Bugs</a></li> |
<li><a href="../dev/index.html">Develop</a></li> |
<li><a href="../examples/index.html">Examples</a></li> |
<li><a href="../faq.html">FAQ</a></li> |
<li class="nav-item nav-item-1"><a href="../stats.html" >Statistics <code class="xref py py-mod docutils literal notranslate"><span class="pre">stats</span></code></a> |</li>
<li class="nav-item nav-item-2"><a href="statsmodels.sandbox.stats.multicomp.MultiComparison.html" accesskey="U">statsmodels.sandbox.stats.multicomp.MultiComparison</a> |</li>
</ul>
</div>
<div class="document">
<div class="documentwrapper">
<div class="bodywrapper">
<div class="body" role="main">
<div class="section" id="statsmodels-sandbox-stats-multicomp-multicomparison-allpairtest">
<h1>statsmodels.sandbox.stats.multicomp.MultiComparison.allpairtest<a class="headerlink" href="#statsmodels-sandbox-stats-multicomp-multicomparison-allpairtest" title="Permalink to this headline">¶</a></h1>
<p>method</p>
<dl class="method">
<dt id="statsmodels.sandbox.stats.multicomp.MultiComparison.allpairtest">
<code class="sig-prename descclassname">MultiComparison.</code><code class="sig-name descname">allpairtest</code><span class="sig-paren">(</span><em class="sig-param">testfunc</em>, <em class="sig-param">alpha=0.05</em>, <em class="sig-param">method='bonf'</em>, <em class="sig-param">pvalidx=1</em><span class="sig-paren">)</span><a class="reference internal" href="../_modules/statsmodels/sandbox/stats/multicomp.html#MultiComparison.allpairtest"><span class="viewcode-link">[source]</span></a><a class="headerlink" href="#statsmodels.sandbox.stats.multicomp.MultiComparison.allpairtest" title="Permalink to this definition">¶</a></dt>
<dd><p>run a pairwise test on all pairs with multiple test correction</p>
<p>The statistical test given in testfunc is calculated for all pairs
and the p-values are adjusted by methods in multipletests. The p-value
correction is generic and based only on the p-values, and does not
take any special structure of the hypotheses into account.</p>
<dl class="field-list simple">
<dt class="field-odd">Parameters</dt>
<dd class="field-odd"><dl class="simple">
<dt><strong>testfunc</strong><span class="classifier">function</span></dt><dd><p>A test function for two (independent) samples. It is assumed that
the return value on position pvalidx is the p-value.</p>
</dd>
<dt><strong>alpha</strong><span class="classifier">float</span></dt><dd><p>familywise error rate</p>
</dd>
<dt><strong>method</strong><span class="classifier">string</span></dt><dd><p>This specifies the method for the p-value correction. Any method
of multipletests is possible.</p>
</dd>
<dt><strong>pvalidx</strong><span class="classifier">int (default: 1)</span></dt><dd><p>position of the p-value in the return of testfunc</p>
</dd>
</dl>
</dd>
<dt class="field-even">Returns</dt>
<dd class="field-even"><dl class="simple">
<dt><strong>sumtab</strong><span class="classifier">SimpleTable instance</span></dt><dd><p>summary table for printing</p>
</dd>
<dt><strong>errors: TODO: check if this is still wrong, I think it’s fixed.</strong></dt><dd></dd>
<dt><strong>results from multipletests are in different order</strong></dt><dd></dd>
<dt><strong>pval_corrected can be larger than 1 ???</strong></dt><dd></dd>
</dl>
</dd>
</dl>
</dd></dl>
</div>
</div>
</div>
</div>
<div class="sphinxsidebar" role="navigation" aria-label="main navigation">
<div class="sphinxsidebarwrapper">
<h4>Previous topic</h4>
<p class="topless"><a href="statsmodels.sandbox.stats.multicomp.MultiComparison.html"
title="previous chapter">statsmodels.sandbox.stats.multicomp.MultiComparison</a></p>
<h4>Next topic</h4>
<p class="topless"><a href="statsmodels.sandbox.stats.multicomp.MultiComparison.getranks.html"
title="next chapter">statsmodels.sandbox.stats.multicomp.MultiComparison.getranks</a></p>
<div role="note" aria-label="source link">
<h3>This Page</h3>
<ul class="this-page-menu">
<li><a href="../_sources/generated/statsmodels.sandbox.stats.multicomp.MultiComparison.allpairtest.rst.txt"
rel="nofollow">Show Source</a></li>
</ul>
</div>
<div id="searchbox" style="display: none" role="search">
<h3 id="searchlabel">Quick search</h3>
<div class="searchformwrapper">
<form class="search" action="../search.html" method="get">
<input type="text" name="q" aria-labelledby="searchlabel" />
<input type="submit" value="Go" />
</form>
</div>
</div>
<script type="text/javascript">$('#searchbox').show(0);</script>
</div>
</div>
<div class="clearer"></div>
</div>
<div class="footer" role="contentinfo">
© Copyright 2009-2018, Josef Perktold, Skipper Seabold, Jonathan Taylor, statsmodels-developers.
Created using <a href="http://sphinx-doc.org/">Sphinx</a> 2.1.2.
</div>
</body>
</html> | {
"content_hash": "f8bfedfb43f7580119747c2f5dd0d1fa",
"timestamp": "",
"source": "github",
"line_count": 193,
"max_line_length": 637,
"avg_line_length": 49.19170984455958,
"alnum_prop": 0.6642089740888982,
"repo_name": "statsmodels/statsmodels.github.io",
"id": "222e55b1bb2751f9668588e1255410affa14d05c",
"size": "9500",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "v0.10.1/generated/statsmodels.sandbox.stats.multicomp.MultiComparison.allpairtest.html",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
#include <zephyr.h>
#include <flash.h>
#include <device.h>
#include <stdio.h>
/* Offset between pages */
#define FLASH_TEST_OFFSET 0x40000
#define FLASH_PAGE_SIZE 4096
#define TEST_DATA_WORD_0 0x1122
#define TEST_DATA_WORD_1 0xaabb
#define TEST_DATA_WORD_2 0xabcd
#define TEST_DATA_WORD_3 0x1234
void main(void)
{
struct device *flash_dev;
uint32_t buf_array_1[4] = { TEST_DATA_WORD_0, TEST_DATA_WORD_1,
TEST_DATA_WORD_2, TEST_DATA_WORD_3 };
uint32_t buf_array_2[4] = { TEST_DATA_WORD_3, TEST_DATA_WORD_1,
TEST_DATA_WORD_2, TEST_DATA_WORD_0 };
uint32_t buf_array_3[8] = { TEST_DATA_WORD_0, TEST_DATA_WORD_1,
TEST_DATA_WORD_2, TEST_DATA_WORD_3,
TEST_DATA_WORD_0, TEST_DATA_WORD_1,
TEST_DATA_WORD_2, TEST_DATA_WORD_3 };
uint32_t buf_word = 0;
uint32_t i, offset;
printf("\nNordic nRF5 Flash Testing\n");
printf("=========================\n");
flash_dev = device_get_binding(CONFIG_SOC_FLASH_NRF5_DEV_NAME);
if (!flash_dev) {
printf("Nordic nRF5 flash driver was not found!\n");
return;
}
printf("\nTest 1: Flash erase page at 0x%x\n", FLASH_TEST_OFFSET);
if (flash_erase(flash_dev, FLASH_TEST_OFFSET, FLASH_PAGE_SIZE) != 0) {
printf(" Flash erase failed!\n");
} else {
printf(" Flash erase succeeded!\n");
}
printf("\nTest 2: Flash write (word array 1)\n");
flash_write_protection_set(flash_dev, false);
for (i = 0; i < ARRAY_SIZE(buf_array_1); i++) {
offset = FLASH_TEST_OFFSET + (i << 2);
printf(" Attempted to write %x at 0x%x\n", buf_array_1[i],
offset);
if (flash_write(flash_dev, offset, &buf_array_1[i],
sizeof(uint32_t)) != 0) {
printf(" Flash write failed!\n");
return;
}
printf(" Attempted to read 0x%x\n", offset);
if (flash_read(flash_dev, offset, &buf_word,
sizeof(uint32_t)) != 0) {
printf(" Flash read failed!\n");
return;
}
printf(" Data read: %x\n", buf_word);
if (buf_array_1[i] == buf_word) {
printf(" Data read matches data written. Good!\n");
} else {
printf(" Data read does not match data written!\n");
}
}
flash_write_protection_set(flash_dev, true);
offset = FLASH_TEST_OFFSET - FLASH_PAGE_SIZE * 2;
printf("\nTest 3: Flash erase (4 pages at 0x%x)\n", offset);
if (flash_erase(flash_dev, offset, FLASH_PAGE_SIZE * 4) != 0) {
printf(" Flash erase failed!\n");
} else {
printf(" Flash erase succeeded!\n");
}
printf("\nTest 4: Flash write (word array 2)\n");
flash_write_protection_set(flash_dev, false);
for (i = 0; i < ARRAY_SIZE(buf_array_2); i++) {
offset = FLASH_TEST_OFFSET + (i << 2);
printf(" Attempted to write %x at 0x%x\n", buf_array_2[i],
offset);
if (flash_write(flash_dev, offset, &buf_array_2[i],
sizeof(uint32_t)) != 0) {
printf(" Flash write failed!\n");
return;
}
printf(" Attempted to read 0x%x\n", offset);
if (flash_read(flash_dev, offset, &buf_word,
sizeof(uint32_t)) != 0) {
printf(" Flash read failed!\n");
return;
}
printf(" Data read: %x\n", buf_word);
if (buf_array_2[i] == buf_word) {
printf(" Data read matches data written. Good!\n");
} else {
printf(" Data read does not match data written!\n");
}
}
flash_write_protection_set(flash_dev, true);
printf("\nTest 5: Flash erase page at 0x%x\n", FLASH_TEST_OFFSET);
if (flash_erase(flash_dev, FLASH_TEST_OFFSET, FLASH_PAGE_SIZE) != 0) {
printf(" Flash erase failed!\n");
} else {
printf(" Flash erase succeeded!\n");
}
printf("\nTest 6: Non-word aligned write (word array 3)\n");
flash_write_protection_set(flash_dev, false);
for (i = 0; i < ARRAY_SIZE(buf_array_3); i++) {
offset = FLASH_TEST_OFFSET + (i << 2) + 1;
printf(" Attempted to write %x at 0x%x\n", buf_array_3[i],
offset);
if (flash_write(flash_dev, offset, &buf_array_3[i],
sizeof(uint32_t)) != 0) {
printf(" Flash write failed!\n");
return;
}
printf(" Attempted to read 0x%x\n", offset);
if (flash_read(flash_dev, offset, &buf_word,
sizeof(uint32_t)) != 0) {
printf(" Flash read failed!\n");
return;
}
printf(" Data read: %x\n", buf_word);
if (buf_array_3[i] == buf_word) {
printf(" Data read matches data written. Good!\n");
} else {
printf(" Data read does not match data written!\n");
}
}
flash_write_protection_set(flash_dev, true);
}
| {
"content_hash": "3a5033909109133e746af5680dc05ec0",
"timestamp": "",
"source": "github",
"line_count": 139,
"max_line_length": 71,
"avg_line_length": 31.158273381294965,
"alnum_prop": 0.6157931193719696,
"repo_name": "tidyjiang8/zephyr-doc",
"id": "e956eec54b72383ebd2cfce3bd1b026295b7d681",
"size": "4457",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "samples/drivers/soc_flash_nrf5/src/main.c",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "320163"
},
{
"name": "C",
"bytes": "9322795"
},
{
"name": "C++",
"bytes": "170106"
},
{
"name": "Lex",
"bytes": "11299"
},
{
"name": "Makefile",
"bytes": "102165"
},
{
"name": "Objective-C",
"bytes": "11870"
},
{
"name": "Perl",
"bytes": "83479"
},
{
"name": "Prolog",
"bytes": "185732"
},
{
"name": "Python",
"bytes": "181376"
},
{
"name": "Shell",
"bytes": "44243"
},
{
"name": "Verilog",
"bytes": "1449"
},
{
"name": "Yacc",
"bytes": "15396"
}
],
"symlink_target": ""
} |
<?php
/*TODO: Check the usage of call-now component, or we are using #55 from features.php #2652*/
add_action('amp_call_button','amp_call_button_html_output');
function amp_call_button_html_output(){
global $redux_builder_amp;
if ( $redux_builder_amp['ampforwp-callnow-button'] ) { ?>
<div class="amp-phone">
<a href="tel:<?php echo esc_attr($redux_builder_amp['enable-amp-call-numberfield']); ?>"></a>
</div> <?php
}
}
add_action('amp_post_template_css','amp_callnow_styles',11);
function amp_callnow_styles(){
global $redux_builder_amp; if ($redux_builder_amp['ampforwp-callnow-button']) { ?>
.amp-phone{ position:relative }
.amp-phone a:before { content: ""; display:inline-block; width: 4px; height: 8px; border-width: 6px 0 6px 3px; border-style: solid; border-color:<?php echo $redux_builder_amp['amp-opt-color-rgba-colorscheme-call']['color']; ?>; background: transparent; transform: rotate(-30deg); box-sizing: initial; border-top-left-radius: 3px 5px; border-bottom-left-radius: 3px 5px; }
<?php }} ?> | {
"content_hash": "6151b6fcf5da3d608b0f9ccb528819dd",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 371,
"avg_line_length": 60.35294117647059,
"alnum_prop": 0.6988304093567251,
"repo_name": "nicholasgriffintn/Accelerated-Mobile-Pages",
"id": "5f87858a624a40c9e42718615110263aab7b7f84",
"size": "1026",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "components/call-now/call-now.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "120727"
},
{
"name": "JavaScript",
"bytes": "326140"
},
{
"name": "PHP",
"bytes": "1787669"
}
],
"symlink_target": ""
} |
/* TEMPLATE GENERATED TESTCASE FILE
Filename: CWE401_Memory_Leak__int64_t_malloc_63a.c
Label Definition File: CWE401_Memory_Leak.c.label.xml
Template File: sources-sinks-63a.tmpl.c
*/
/*
* @description
* CWE: 401 Memory Leak
* BadSource: malloc Allocate data using malloc()
* GoodSource: Allocate data on the stack
* Sinks:
* GoodSink: call free() on data
* BadSink : no deallocation of data
* Flow Variant: 63 Data flow: pointer to data passed from one function to another in different source files
*
* */
#include "std_testcase.h"
#include <wchar.h>
#ifndef OMITBAD
/* bad function declaration */
void CWE401_Memory_Leak__int64_t_malloc_63b_badSink(int64_t * * dataPtr);
void CWE401_Memory_Leak__int64_t_malloc_63_bad()
{
int64_t * data;
data = NULL;
/* POTENTIAL FLAW: Allocate memory on the heap */
data = (int64_t *)malloc(100*sizeof(int64_t));
if (data == NULL) {exit(-1);}
/* Initialize and make use of data */
data[0] = 5LL;
printLongLongLine(data[0]);
CWE401_Memory_Leak__int64_t_malloc_63b_badSink(&data);
}
#endif /* OMITBAD */
#ifndef OMITGOOD
/* goodG2B uses the GoodSource with the BadSink */
void CWE401_Memory_Leak__int64_t_malloc_63b_goodG2BSink(int64_t * * data);
static void goodG2B()
{
int64_t * data;
data = NULL;
/* FIX: Use memory allocated on the stack with ALLOCA */
data = (int64_t *)ALLOCA(100*sizeof(int64_t));
/* Initialize and make use of data */
data[0] = 5LL;
printLongLongLine(data[0]);
CWE401_Memory_Leak__int64_t_malloc_63b_goodG2BSink(&data);
}
/* goodB2G uses the BadSource with the GoodSink */
void CWE401_Memory_Leak__int64_t_malloc_63b_goodB2GSink(int64_t * * data);
static void goodB2G()
{
int64_t * data;
data = NULL;
/* POTENTIAL FLAW: Allocate memory on the heap */
data = (int64_t *)malloc(100*sizeof(int64_t));
if (data == NULL) {exit(-1);}
/* Initialize and make use of data */
data[0] = 5LL;
printLongLongLine(data[0]);
CWE401_Memory_Leak__int64_t_malloc_63b_goodB2GSink(&data);
}
void CWE401_Memory_Leak__int64_t_malloc_63_good()
{
goodG2B();
goodB2G();
}
#endif /* OMITGOOD */
/* Below is the main(). It is only used when building this testcase on
its own for testing or for building a binary to use in testing binary
analysis tools. It is not used when compiling all the testcases as one
application, which is how source code analysis tools are tested. */
#ifdef INCLUDEMAIN
int main(int argc, char * argv[])
{
/* seed randomness */
srand( (unsigned)time(NULL) );
#ifndef OMITGOOD
printLine("Calling good()...");
CWE401_Memory_Leak__int64_t_malloc_63_good();
printLine("Finished good()");
#endif /* OMITGOOD */
#ifndef OMITBAD
printLine("Calling bad()...");
CWE401_Memory_Leak__int64_t_malloc_63_bad();
printLine("Finished bad()");
#endif /* OMITBAD */
return 0;
}
#endif
| {
"content_hash": "379e7a1e49d94de7ba95e156fd78f87d",
"timestamp": "",
"source": "github",
"line_count": 107,
"max_line_length": 108,
"avg_line_length": 28.205607476635514,
"alnum_prop": 0.6454605699138503,
"repo_name": "JianpingZeng/xcc",
"id": "ac28c0e7e115616ad8e2efdfb81c8836769bf6f7",
"size": "3018",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "xcc/test/juliet/testcases/CWE401_Memory_Leak/s01/CWE401_Memory_Leak__int64_t_malloc_63a.c",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
FROM balenalib/raspberry-pi-alpine:3.12-run
ENV NODE_VERSION 17.6.0
ENV YARN_VERSION 1.22.4
# Install dependencies
RUN apk add --no-cache libgcc libstdc++ libuv \
&& apk add --no-cache libssl1.0 || apk add --no-cache libssl1.1
RUN buildDeps='curl' \
&& set -x \
&& for key in \
6A010C5166006599AA17F08146C2130DFD2497F5 \
; do \
gpg --keyserver pgp.mit.edu --recv-keys "$key" || \
gpg --keyserver keyserver.pgp.com --recv-keys "$key" || \
gpg --keyserver keyserver.ubuntu.com --recv-keys "$key" ; \
done \
&& apk add --no-cache $buildDeps \
&& curl -SLO "http://resin-packages.s3.amazonaws.com/node/v$NODE_VERSION/node-v$NODE_VERSION-linux-alpine-armv6hf.tar.gz" \
&& echo "3867faa9de7afc2a11805a8c057aadeda4cede886d651972e6ee1daa29fdf719 node-v$NODE_VERSION-linux-alpine-armv6hf.tar.gz" | sha256sum -c - \
&& tar -xzf "node-v$NODE_VERSION-linux-alpine-armv6hf.tar.gz" -C /usr/local --strip-components=1 \
&& rm "node-v$NODE_VERSION-linux-alpine-armv6hf.tar.gz" \
&& curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz" \
&& curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz.asc" \
&& gpg --batch --verify yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \
&& mkdir -p /opt/yarn \
&& tar -xzf yarn-v$YARN_VERSION.tar.gz -C /opt/yarn --strip-components=1 \
&& ln -s /opt/yarn/bin/yarn /usr/local/bin/yarn \
&& ln -s /opt/yarn/bin/yarn /usr/local/bin/yarnpkg \
&& rm yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \
&& npm config set unsafe-perm true -g --unsafe-perm \
&& rm -rf /tmp/*
CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"]
RUN curl -SLO "https://raw.githubusercontent.com/balena-io-library/base-images/8accad6af708fca7271c5c65f18a86782e19f877/scripts/assets/tests/[email protected]" \
&& echo "Running test-stack@node" \
&& chmod +x [email protected] \
&& bash [email protected] \
&& rm -rf [email protected]
RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo $'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: ARM v6 \nOS: Alpine Linux 3.12 \nVariant: run variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nNode.js v17.6.0, Yarn v1.22.4 \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info
RUN echo $'#!/bin/bash\nbalena-info\nbusybox ln -sf /bin/busybox /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \
&& chmod +x /bin/sh-shim \
&& ln -f /bin/sh /bin/sh.real \
&& ln -f /bin/sh-shim /bin/sh | {
"content_hash": "39c00db9ab52c03d89326ade24e9cf0d",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 696,
"avg_line_length": 62.458333333333336,
"alnum_prop": 0.7068045363575717,
"repo_name": "resin-io-library/base-images",
"id": "3bb13e744ce0c6e3ffbd607f219105c87ff6dc4d",
"size": "3019",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "balena-base-images/node/raspberry-pi/alpine/3.12/17.6.0/run/Dockerfile",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "71234697"
},
{
"name": "JavaScript",
"bytes": "13096"
},
{
"name": "Shell",
"bytes": "12051936"
},
{
"name": "Smarty",
"bytes": "59789"
}
],
"symlink_target": ""
} |
#include <UtH/Platform/Android/FileManager.hpp>
#elif defined(UTH_SYSTEM_WINDOWS) || defined(UTH_SYSTEM_LINUX)
#include <UtH/Platform/Common/FileManager.hpp>
#else
#error No filereader for such platform
#endif | {
"content_hash": "1e859a90a0025b99c3b8f05fbf53fadf",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 62,
"avg_line_length": 35.333333333333336,
"alnum_prop": 0.7830188679245284,
"repo_name": "TeamZilla/Destroyer",
"id": "634354cb5762e3f1a25a06bf7cd4b8c03489a433",
"size": "287",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "include/UtH/Platform/FileManager.hpp",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "3090006"
},
{
"name": "C++",
"bytes": "1035127"
},
{
"name": "Java",
"bytes": "23939"
},
{
"name": "Makefile",
"bytes": "5649"
},
{
"name": "Objective-C",
"bytes": "30799"
},
{
"name": "Shell",
"bytes": "467"
}
],
"symlink_target": ""
} |
/**
* @ignore
* generate proxy drag object,
* @author [email protected]
*/
var DD = require('dd'),
Base = require('base');
var DDM = DD.DDM,
PROXY_EVENT = '.-ks-proxy' + (+new Date());
/**
* @extends KISSY.Base
* @class KISSY.DD.Plugin.Proxy
* Proxy plugin to provide abilities for draggable tp create a proxy drag node,
* instead of dragging the original node.
*/
module.exports = Base.extend({
pluginId: 'dd/plugin/proxy',
/**
* make this draggable object can be proxied.
* @param {KISSY.DD.Draggable} drag
* @private
*/
pluginInitializer: function (drag) {
var self = this;
function start() {
var node = self.get('node'),
dragNode = drag.get('node');
// cache proxy node
if (!self.get('proxyNode')) {
if (typeof node === 'function') {
node = node(drag);
node.addClass('ks-dd-proxy');
self.set('proxyNode', node);
}
} else {
node = self.get('proxyNode');
}
node.show();
dragNode.parent().append(node);
DDM.cacheWH(node);
node.offset(dragNode.offset());
drag.setInternal('dragNode', dragNode);
drag.setInternal('node', node);
}
function end() {
var node = self.get('proxyNode'),
dragNode = drag.get('dragNode');
if (self.get('moveOnEnd')) {
dragNode.offset(node.offset());
}
if (self.get('destroyOnEnd')) {
node.remove();
self.set('proxyNode', 0);
} else {
node.hide();
}
drag.setInternal('node', dragNode);
}
drag.on('dragstart' + PROXY_EVENT, start)
.on('dragend' + PROXY_EVENT, end);
},
/**
* make this draggable object unproxied
* @param {KISSY.DD.Draggable} drag
* @private
*/
pluginDestructor: function (drag) {
drag.detach(PROXY_EVENT);
}
}, {
ATTRS: {
/**
* how to get the proxy node.
* default clone the node itself deeply.
* @cfg {Function} node
*/
/**
* @ignore
*/
node: {
value: function (drag) {
return drag.get('node').clone(true);
}
},
/**
* destroy the proxy node at the end of this drag.
* default false
* @cfg {Boolean} destroyOnEnd
*/
/**
* @ignore
*/
destroyOnEnd: {
value: false
},
/**
* move the original node at the end of the drag.
* default true
* @cfg {Boolean} moveOnEnd
*/
/**
* @ignore
*/
moveOnEnd: {
value: true
},
/**
* Current proxy node.
* @type {KISSY.Node}
* @property proxyNode
*/
/**
* @ignore
*/
proxyNode: {}
}
}); | {
"content_hash": "4df96427475af83050aeedde25810c23",
"timestamp": "",
"source": "github",
"line_count": 127,
"max_line_length": 79,
"avg_line_length": 24.77952755905512,
"alnum_prop": 0.4534477279949158,
"repo_name": "eaglesjava/kissy",
"id": "07967ff1fa97153ccb83c0e5860830fdf6486eeb",
"size": "3147",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/dd/plugin/proxy/src/proxy.js",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
import logging
from indra.sources.cwms.processor import CWMSProcessor
from indra.sources.cwms.rdf_processor import CWMSRDFProcessor
from indra.sources.trips import client
logger = logging.getLogger(__name__)
def process_text(text, save_xml='cwms_output.xml'):
"""Processes text using the CWMS web service.
Parameters
----------
text : str
Text to process
Returns
-------
cp : indra.sources.cwms.CWMSProcessor
A CWMSProcessor, which contains a list of INDRA statements in its
statements attribute.
"""
xml = client.send_query(text, 'cwmsreader')
# There are actually two EKBs in the xml document. Extract the second.
first_end = xml.find('</ekb>') # End of first EKB
second_start = xml.find('<ekb', first_end) # Start of second EKB
second_end = xml.find('</ekb>', second_start) # End of second EKB
second_ekb = xml[second_start:second_end+len('</ekb>')] # second EKB
if save_xml:
with open(save_xml, 'wb') as fh:
fh.write(second_ekb.encode('utf-8'))
return process_ekb(second_ekb)
def process_ekb_file(fname):
"""Processes an EKB file produced by CWMS.
Parameters
----------
fname : str
Path to the EKB file to process.
Returns
-------
cp : indra.sources.cwms.CWMSProcessor
A CWMSProcessor, which contains a list of INDRA statements in its
statements attribute.
"""
# Process EKB XML file into statements
with open(fname, 'rb') as fh:
ekb_str = fh.read().decode('utf-8')
return process_ekb(ekb_str)
def process_ekb(ekb_str):
"""Processes an EKB string produced by CWMS.
Parameters
----------
ekb_str : str
EKB string to process
Returns
-------
cp : indra.sources.cwms.CWMSProcessor
A CWMSProcessor, which contains a list of INDRA statements in its
statements attribute.
"""
# Process EKB XML into statements
cp = CWMSProcessor(ekb_str)
cp.extract_causal_relations()
cp.extract_correlations()
cp.extract_events()
return cp
def process_rdf_file(text, rdf_filename):
"""Process CWMS's RDF output for the given statement and returns a
processor populated with INDRA statements.
Parameters
----------
text : str
Sentence to process
rdf_filename : str
The RDF filename to process
Returns
-------
cp : indra.sources.cwms.CWMSRDFProcessor
A CWMSProcessor instance, which contains a list of INDRA Statements
as its statements attribute.
"""
cp = CWMSRDFProcessor(text, rdf_filename)
return cp
| {
"content_hash": "5711357b391cde73cbfd17cab65ded7d",
"timestamp": "",
"source": "github",
"line_count": 98,
"max_line_length": 75,
"avg_line_length": 28.091836734693878,
"alnum_prop": 0.6465673810388667,
"repo_name": "pvtodorov/indra",
"id": "0fb3e0a9f9dfc6f14a98f2ac13c717572cc63868",
"size": "2753",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "indra/sources/cwms/api.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "169"
},
{
"name": "HTML",
"bytes": "17236"
},
{
"name": "JavaScript",
"bytes": "72960"
},
{
"name": "Python",
"bytes": "2660313"
},
{
"name": "Shell",
"bytes": "381"
}
],
"symlink_target": ""
} |
package com.amazonaws.services.cloudsearchv2.model.transform;
import javax.annotation.Generated;
import com.amazonaws.SdkClientException;
import com.amazonaws.Request;
import com.amazonaws.DefaultRequest;
import com.amazonaws.http.HttpMethodName;
import com.amazonaws.services.cloudsearchv2.model.*;
import com.amazonaws.transform.Marshaller;
import com.amazonaws.util.StringUtils;
/**
* UpdateDomainEndpointOptionsRequest Marshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class UpdateDomainEndpointOptionsRequestMarshaller implements
Marshaller<Request<UpdateDomainEndpointOptionsRequest>, UpdateDomainEndpointOptionsRequest> {
public Request<UpdateDomainEndpointOptionsRequest> marshall(UpdateDomainEndpointOptionsRequest updateDomainEndpointOptionsRequest) {
if (updateDomainEndpointOptionsRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
Request<UpdateDomainEndpointOptionsRequest> request = new DefaultRequest<UpdateDomainEndpointOptionsRequest>(updateDomainEndpointOptionsRequest,
"AmazonCloudSearchv2");
request.addParameter("Action", "UpdateDomainEndpointOptions");
request.addParameter("Version", "2013-01-01");
request.setHttpMethod(HttpMethodName.POST);
if (updateDomainEndpointOptionsRequest.getDomainName() != null) {
request.addParameter("DomainName", StringUtils.fromString(updateDomainEndpointOptionsRequest.getDomainName()));
}
{
DomainEndpointOptions domainEndpointOptions = updateDomainEndpointOptionsRequest.getDomainEndpointOptions();
if (domainEndpointOptions != null) {
if (domainEndpointOptions.getEnforceHTTPS() != null) {
request.addParameter("DomainEndpointOptions.EnforceHTTPS", StringUtils.fromBoolean(domainEndpointOptions.getEnforceHTTPS()));
}
if (domainEndpointOptions.getTLSSecurityPolicy() != null) {
request.addParameter("DomainEndpointOptions.TLSSecurityPolicy", StringUtils.fromString(domainEndpointOptions.getTLSSecurityPolicy()));
}
}
}
return request;
}
}
| {
"content_hash": "89ef79acb90b15520c43343593157d26",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 154,
"avg_line_length": 41.27272727272727,
"alnum_prop": 0.7352422907488987,
"repo_name": "aws/aws-sdk-java",
"id": "2d6e647cebf03123ed84989ec88b506b5a7ca28c",
"size": "2850",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "aws-java-sdk-cloudsearch/src/main/java/com/amazonaws/services/cloudsearchv2/model/transform/UpdateDomainEndpointOptionsRequestMarshaller.java",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
(function( $ ) {
$.fn.hasScrollBar = function() {
return this.get(0).scrollHeight > this.height();
};
$.fn.lionbars = function(options) {
options = options || {};
autohide = options.autohide;
// Flags
var timeout,
HDragging=false,
VDragging=false,
activeScroll=0,
activeWrap=0,
eventX,
eventY,
mouseX,
mouseY,
currentRatio,
initPos,
scrollValue,
hideTimeoutSet=false,
vEventFired = false,
hEventFired = false;
// Initialization
var elements = $(this),
id = 0,
vScrollWidth=0, hScrollWidth=0,
addHScroll=false, addVScroll=false,
paddingTop=0, paddingLeft=0, paddingBottom=0, paddingRight=0,
borderTop=0, borderRight=0, borderBottom=0, borderLeft=0,
scrollHeight=0, scrollWidth=0, offsetWidth=0, offsetHeight=0, clientWidth=0, clientHeight=0,
vRatio=0, hRatio=0,
vSliderHeight=0, hSliderHeight=0,
vLbHeight=0, hLbHeight=0;
// Main Loop
mainLoop();
function mainLoop() {
for (var i=0; elements[i] !== undefined; i++) {
if (needScrollbars(elements[i]) && !$(elements[i]).hasClass('nolionbars')) {
// add the element to the main array
target = elements[i];
// get some values before the element is wrapped
getDimentions(target);
// wrap the element
wrap(target, addVScroll, addHScroll);
// hide the default scrollbar
hideScrollbars(target, addVScroll, addHScroll);
// Calculate the size of the scrollbars
reduceScrollbarsWidthHeight(target);
setSlidersHeight(target);
// Set variables needed to calculate scroll speed, etc.
setScrollRatios(target);
// Set events
setEvents(target);
// prepare for next element
resetVars();
}
}
}
// Set document events
$(document).mousemove(function(e) {
if (VDragging) {
mouseY = e.pageY;
activeWrap.scrollTop((initPos + mouseY - eventY) * Math.abs(currentRatio));
}
if (HDragging) {
mouseX = e.pageX;
activeWrap.scrollLeft((initPos + mouseX - eventX) * Math.abs(currentRatio));
}
});
$(document).mouseup(function(e) {
if (VDragging) {
VDragging = false;
}
if (HDragging) {
HDragging = false;
}
});
// Core functions
function setEvents(elem) {
var el = $(elem);
if (addVScroll || addHScroll) {
el.find('.lb-wrap').scroll(function(e) {
el.find('.lb-v-scrollbar-slider').css({ "top" : -$(this).scrollTop()/el.attr('vratio') });
el.find('.lb-h-scrollbar-slider').css({ "left" : -$(this).scrollLeft()/el.attr('hratio') });
if (el.find('.lb-v-scrollbar').height() == (parseInt(el.find('.lb-v-scrollbar-slider').css('top')) + el.find('.lb-v-scrollbar-slider').height())
&& typeof(options.reachedBottom) == 'function'
&& !vEventFired
) {
vEventFired = true;
var self = $(this);
options.reachedBottom.apply($(this).children('.lb-content'), [function () {
getDimentions($(self).parent(), {
height: $(self).children('.lb-content').get(0).scrollHeight,
width: $(self).children('.lb-content').get(0).scrollWidth
});
// Calculate the size of the scrollbars
reduceScrollbarsWidthHeight($(self).parent());
setSlidersHeight($(self).parent());
// Set variables needed to calculate scroll speed, etc.
setScrollRatios($(self).parent());
// prepare for next element
resetVars();
vEventFired = false;
}]);
}
if (el.find('.lb-h-scrollbar').width() == (parseInt(el.find('.lb-h-scrollbar-slider').css('left')) + el.find('.lb-h-scrollbar-slider').width())
&& typeof(options.reachedRight) == 'function'
&& !hEventFired
) {
hEventFired = true;
var self = $(this);
options.reachedRight.apply($(this).children('.lb-content'), [function () {
getDimentions($(self).parent(), {
height: $(self).children('.lb-content').get(0).scrollHeight,
width: $(self).children('.lb-content').get(0).scrollWidth
});
// Calculate the size of the scrollbars
reduceScrollbarsWidthHeight($(self).parent());
setSlidersHeight($(self).parent());
// Set variables needed to calculate scroll speed, etc.
setScrollRatios($(self).parent());
// prepare for next element
resetVars();
hEventFired = false;
}]);
}
if (autohide) {
el.find('.lb-v-scrollbar, .lb-h-scrollbar').fadeIn(150);
clearTimeout(timeout);
timeout = setTimeout(function() {
el.find('.lb-v-scrollbar, .lb-h-scrollbar').fadeOut(150);
}, 2000);
}
});
}
if (addVScroll) {
el.find('.lb-v-scrollbar-slider').mousedown(function(e) {
eventY = e.pageY;
VDragging = true;
activeScroll = $(this);
activeWrap = el.find('.lb-wrap');
currentRatio = activeWrap.parent().attr('vratio');
initPos = activeScroll.position().top;
return false;
});
el.find('.lb-v-scrollbar').mousedown(function(e) {
if (!$(e.target).hasClass('lb-v-scrollbar-slider')) {
el.find('.lb-wrap').scrollTop((e.pageY - $(this).offset().top) * Math.abs(el.attr('vratio')) - $(this).find('.lb-v-scrollbar-slider').height()/2);
}
return false;
});
}
if (addHScroll) {
el.find('.lb-h-scrollbar-slider').mousedown(function(e) {
eventX = e.pageX;
HDragging = true;
activeScroll = $(this);
activeWrap = el.find('.lb-wrap');
currentRatio = activeWrap.parent().attr('hratio');
initPos = activeScroll.position().left;
return false;
});
el.find('.lb-h-scrollbar').mousedown(function(e) {
if (!$(e.target).hasClass('lb-h-scrollbar-slider')) {
el.find('.lb-wrap').scrollLeft((e.pageX - $(this).offset().left) * Math.abs(el.attr('hratio')) - $(this).find('.lb-h-scrollbar-slider').width()/2);
}
return false;
});
}
if ((addVScroll || addHScroll) && autohide) {
el.find('.lb-v-scrollbar, .lb-h-scrollbar').hide();
el.hover(function() {
}, function() {
el.find('.lb-v-scrollbar, .lb-h-scrollbar').fadeOut(150);
});
}
}
function setScrollRatios(elem) {
vRatio = (offsetHeight - $(elem).find('.lb-wrap').get(0).scrollHeight - borderTop - borderBottom)/(vLbHeight - vSliderHeight);
hRatio = (offsetWidth - $(elem).find('.lb-wrap').get(0).scrollWidth - borderLeft - borderRight)/(hLbHeight - hSliderHeight);
var el = $(elem);
el.attr('vratio', vRatio);
el.attr('hratio', hRatio);
}
function setSlidersHeight(elem) {
var el = $(elem);
var hmin, hmax, gap;
if (el.find('.lb-v-scrollbar').length != 0) {
hmin = 20;
gap = offsetHeight - el.find('.lb-v-scrollbar').height();
hmax = offsetHeight - gap - hmin;
vSliderHeight = Math.round((offsetHeight*hmax)/scrollHeight);
vSliderHeight = (vSliderHeight < hmin) ? hmin : vSliderHeight;
}
if (el.find('.lb-h-scrollbar').length != 0) {
hmin = 20;
gap = offsetWidth - el.find('.lb-h-scrollbar').width();
hmax = offsetWidth - gap - hmin;
hSliderHeight = Math.round((offsetWidth*hmax)/scrollWidth);
hSliderHeight = (hSliderHeight < hmin) ? hmin : hSliderHeight;
}
el.find('.lb-v-scrollbar-slider').css({ "height" : vSliderHeight });
el.find('.lb-h-scrollbar-slider').css({ "width" : hSliderHeight });
}
function resetVars() {
vScrollWidth = 0;
hScrollWidth = 0;
addHScroll=false;
addVScroll=false;
paddingTop = 0;
paddingLeft = 0;
paddingBottom = 0;
paddingRight = 0;
borderTop = 0;
borderLeft = 0;
borderBottom = 0;
borderRight = 0;
scrollHeight = 0;
scrollWidth = 0;
offsetWidth = 0;
offsetHeight = 0;
clientWidth = 0;
clientHeight = 0;
// vRatio = 0;
// hRatio = 0;
vSliderHeight = 0;
hSliderHeight = 0;
vLbHeight = 0;
hLbHeight = 0;
}
function reduceScrollbarsWidthHeight(elem) {
var el = $(elem);
if (addVScroll && addHScroll) {
vLbHeight = el.height()-12;
hLbHeight = el.width()-12;
el.find('.lb-v-scrollbar').css({ "height" : vLbHeight });
el.find('.lb-h-scrollbar').css({ "width" : hLbHeight });
} else {
vLbHeight = el.height()-4;
hLbHeight = el.width()-4;
el.find('.lb-v-scrollbar').css({ "height" : vLbHeight });
el.find('.lb-h-scrollbar').css({ "width" : hLbHeight });
}
}
function hideScrollbars(elem, vscroll, hscroll) {
var el = $(elem);
if (vscroll || hscroll) {
el.css({ "overflow" : 'hidden' });
movePadding(el, el.find('.lb-wrap'));
resizeMainBox(el);
resizeInnerWrap(el, el.find('.lb-wrap'));
}
}
function resizeMainBox(elem) {
var el = $(elem);
el.css({ "width" : el.width() + paddingLeft + paddingRight, "height" : el.height() + paddingTop + paddingBottom });
}
function movePadding(from, to) {
var fromEl = $(from);
var toEl = $(to);
fromEl.css({ "padding" : 0 });
toEl.css({
"padding-top" : paddingTop+'px',
"padding-left" : paddingLeft+'px',
"padding-bottom" : paddingBottom+'px',
"padding-right" : paddingRight+'px'
});
}
function resizeInnerWrap(main, child) {
var mainEl = $(main);
var childEl = $(child);
mainEl.css({ "position" : 'relative' });
childEl.css({
"width" : mainEl.width()+vScrollWidth - paddingLeft - paddingRight,
"height" : mainEl.height()+hScrollWidth - paddingTop - paddingBottom
});
}
function setVScrollbarWidth(elem) {
var el = $(elem);
el.css({ "overflow" : 'auto' });
vScrollWidth = offsetWidth - clientWidth - borderLeft - borderRight;
el.css({ "overflow" : 'hidden' });
}
function setHScrollbarWidth(elem) {
var el = $(elem);
el.css({ "overflow" : 'auto' });
hScrollWidth = offsetHeight - clientHeight - borderTop - borderBottom;
el.css({ "overflow" : 'hidden' });
}
function wrap(elem, vscroll, hscroll) {
var el = $(elem);
var elemId = el.attr('id');
var wrap = 0;
if (elemId !== undefined) {
el.wrapInner('<div class="lb-wrap" id="lb-wrap-'+id+'-'+elemId+'"></div>');
wrap = $('#lb-wrap-'+id+'-'+elemId);
} else {
el.wrapInner('<div class="lb-wrap" id="lb-wrap-'+id+'"></div>');
wrap = $('#lb-wrap-'+id);
}
wrap.wrapInner('<div class="lb-content"></div>');
if (vscroll) {
el.prepend('<div class="lb-v-scrollbar"></div>');
el.find('.lb-v-scrollbar').append('<div class="lb-v-scrollbar-slider"></div>');
}
if (hscroll) {
el.prepend('<div class="lb-h-scrollbar"></div>');
el.find('.lb-h-scrollbar').append('<div class="lb-h-scrollbar-slider"></div>');
}
// preparation for the next element
id = id + 1;
}
function needScrollbars(elem) {
var el = $(elem);
addVScroll = false;
addHScroll = false;
getPadding(el);
getBorders(el);
el.css({ "overflow" : 'hidden' });
// check for vertical scrollbars
if (el.get(0).scrollHeight > el.get(0).clientHeight) {
addVScroll = true;
// setVScrollbarWidth(el);
}
// check for horizontal scrollbars
if (el.get(0).scrollWidth > el.get(0).clientWidth) {
addHScroll = true;
// setHScrollbarWidth(el);
}
el.css({ "overflow" : 'auto' });
if (addVScroll || addHScroll) {
return true;
}
}
function getPadding(elem) {
var el = $(elem);
paddingTop = parseInt(el.css('padding-top').replace('px', ''));
paddingLeft = parseInt(el.css('padding-left').replace('px', ''));
paddingBottom = parseInt(el.css('padding-bottom').replace('px', ''));
paddingRight = parseInt(el.css('padding-right').replace('px', ''));
}
function getBorders(elem) {
var el = $(elem);
borderTop = parseInt(el.css('border-top-width').replace('px', ''));
borderRight = parseInt(el.css('border-right-width').replace('px', ''));
borderBottom = parseInt(el.css('border-bottom-width').replace('px', ''));
borderLeft = parseInt(el.css('border-left-width').replace('px', ''));
}
function getDimentions(elem, scroll) {
var el = $(elem).get(0);
scrollHeight = (typeof(scroll) != 'undefined') ? scroll.height : el.scrollHeight;
scrollWidth = (typeof(scroll) != 'undefined') ? scroll.width : el.scrollWidth;
clientHeight = el.clientHeight;
clientWidth = el.clientWidth;
offsetHeight = el.offsetHeight;
offsetWidth = el.offsetWidth;
setVScrollbarWidth($(elem));
setHScrollbarWidth($(elem));
}
return this.each(function() {
//var $this = $(this);
});
};
})( jQuery ); | {
"content_hash": "6d5b64f28a9b37fe3a2c96bd475963be",
"timestamp": "",
"source": "github",
"line_count": 417,
"max_line_length": 153,
"avg_line_length": 30.314148681055155,
"alnum_prop": 0.596313582786172,
"repo_name": "rip-projects/judge",
"id": "125ee4f628df0e1daf43ef4baa15c6648215ed1d",
"size": "12641",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "themes/default/js/jquery.lionbars.0.3.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "129497"
},
{
"name": "HTML",
"bytes": "1280672"
},
{
"name": "JavaScript",
"bytes": "152239"
},
{
"name": "PHP",
"bytes": "2059358"
},
{
"name": "Shell",
"bytes": "346"
}
],
"symlink_target": ""
} |
using System.Diagnostics;
namespace System.Linq.Parallel
{
/// <summary>
/// This enumerator merges multiple input channels into a single output stream. The merging process just
/// goes from left-to-right, enumerating each channel in succession in its entirety.
/// Assumptions:
/// Before enumerating this object, all producers for all channels must have finished enqueuing new
/// elements.
/// </summary>
/// <typeparam name="T"></typeparam>
internal sealed class SynchronousChannelMergeEnumerator<T> : MergeEnumerator<T>
{
private readonly SynchronousChannel<T>[] _channels; // The channel array we will enumerate, from left-to-right.
private int _channelIndex; // The current channel index. This moves through the array as we enumerate.
private T _currentElement; // The last element remembered during enumeration.
//-----------------------------------------------------------------------------------
// Instantiates a new enumerator for a set of channels.
//
internal SynchronousChannelMergeEnumerator(
QueryTaskGroupState taskGroupState, SynchronousChannel<T>[] channels) : base(taskGroupState)
{
Debug.Assert(channels != null);
#if DEBUG
foreach (SynchronousChannel<T> c in channels) Debug.Assert(c != null);
#endif
_channels = channels;
_channelIndex = -1;
}
//-----------------------------------------------------------------------------------
// Retrieves the current element.
//
// Notes:
// This throws if we haven't begun enumerating or have gone past the end of the
// data source.
//
public override T Current
{
get
{
// If we're at the beginning or the end of the array, it's invalid to be
// retrieving the current element. We throw.
if (_channelIndex == -1 || _channelIndex == _channels.Length)
{
throw new InvalidOperationException(SR.PLINQ_CommonEnumerator_Current_NotStarted);
}
return _currentElement;
}
}
//-----------------------------------------------------------------------------------
// Positions the enumerator over the next element. This includes merging as we
// enumerate, by just incrementing indexes, etc.
//
// Return Value:
// True if there's a current element, false if we've reached the end.
//
public override bool MoveNext()
{
Debug.Assert(_channels != null);
// If we're at the start, initialize the index.
if (_channelIndex == -1)
{
_channelIndex = 0;
}
// If the index has reached the end, we bail.
while (_channelIndex != _channels.Length)
{
SynchronousChannel<T> current = _channels[_channelIndex];
Debug.Assert(current != null);
if (current.Count == 0)
{
// We're done with this channel, move on to the next one. We don't
// have to check that it's "done" since this is a synchronous consumer.
_channelIndex++;
}
else
{
// Remember the "current" element and return.
_currentElement = current.Dequeue();
return true;
}
}
TraceHelpers.TraceInfo("[timing]: {0}: Completed the merge", DateTime.Now.Ticks);
// If we got this far, it means we've exhausted our channels.
Debug.Assert(_channelIndex == _channels.Length);
return false;
}
}
}
| {
"content_hash": "e77edbba53b435fc1cd697ac68cecca3",
"timestamp": "",
"source": "github",
"line_count": 104,
"max_line_length": 119,
"avg_line_length": 37.75,
"alnum_prop": 0.5155374426897605,
"repo_name": "ViktorHofer/corefx",
"id": "ba2f1c2ee6e8eb5cfecba65d5b54bbfaf348779d",
"size": "4357",
"binary": false,
"copies": "15",
"ref": "refs/heads/master",
"path": "src/System.Linq.Parallel/src/System/Linq/Parallel/Merging/SynchronousChannelMergeEnumerator.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "1C Enterprise",
"bytes": "280724"
},
{
"name": "ASP",
"bytes": "1687"
},
{
"name": "Batchfile",
"bytes": "11027"
},
{
"name": "C",
"bytes": "3803475"
},
{
"name": "C#",
"bytes": "181020278"
},
{
"name": "C++",
"bytes": "1521"
},
{
"name": "CMake",
"bytes": "79434"
},
{
"name": "DIGITAL Command Language",
"bytes": "26402"
},
{
"name": "HTML",
"bytes": "653"
},
{
"name": "Makefile",
"bytes": "13780"
},
{
"name": "OpenEdge ABL",
"bytes": "137969"
},
{
"name": "Perl",
"bytes": "3895"
},
{
"name": "PowerShell",
"bytes": "192578"
},
{
"name": "Python",
"bytes": "1535"
},
{
"name": "Roff",
"bytes": "9422"
},
{
"name": "Shell",
"bytes": "131531"
},
{
"name": "TSQL",
"bytes": "96941"
},
{
"name": "Visual Basic",
"bytes": "2135320"
},
{
"name": "XSLT",
"bytes": "514720"
}
],
"symlink_target": ""
} |
class Vote < ActiveRecord::Base
validates :user, :story, :value, presence: true
validates :user, uniqueness: { scope: :story }
belongs_to :user
belongs_to :story
end
| {
"content_hash": "66c0a0bcf82afbee71da5fa410c1dc64",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 49,
"avg_line_length": 25,
"alnum_prop": 0.7028571428571428,
"repo_name": "przemekszyszka/news",
"id": "302b7213643156d5e77271628f4b99197272aa88",
"size": "175",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/models/vote.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "41343"
}
],
"symlink_target": ""
} |
using Microsoft.Practices.Unity;
using Prism.Unity;
using Modules.Views;
using System.Windows;
using Prism.Modularity;
namespace Modules
{
class Bootstrapper : UnityBootstrapper
{
protected override DependencyObject CreateShell()
{
return Container.Resolve<MainWindow>();
}
protected override void InitializeShell()
{
Application.Current.MainWindow.Show();
}
protected override IModuleCatalog CreateModuleCatalog()
{
return new ConfigurationModuleCatalog();
}
}
}
| {
"content_hash": "ee0fb260c31de50be752b463a349c1a5",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 63,
"avg_line_length": 22.615384615384617,
"alnum_prop": 0.641156462585034,
"repo_name": "jurajvt/Prism-Samples-Wpf",
"id": "7b58ca32255b9e9fd97fff1bb3777f0463d9d7c0",
"size": "590",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "7-Modules - AppConfig/Modules/Bootstrapper.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "253315"
}
],
"symlink_target": ""
} |
namespace blink {
class Element;
class TimingFunction;
class QualifiedName;
class CORE_EXPORT AnimationInputHelpers {
STATIC_ONLY(AnimationInputHelpers);
public:
static CSSPropertyID keyframeAttributeToCSSProperty(const String&, const Document&);
static CSSPropertyID keyframeAttributeToPresentationAttribute(const String&, const Element&);
static const QualifiedName* keyframeAttributeToSVGAttribute(const String&, Element&);
static PassRefPtr<TimingFunction> parseTimingFunction(const String&, Document*);
};
} // namespace blink
#endif // AnimationInputHelpers_h
| {
"content_hash": "a8e5b62e4ec605d92cf50444e34b98ad",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 97,
"avg_line_length": 32.77777777777778,
"alnum_prop": 0.8016949152542373,
"repo_name": "was4444/chromium.src",
"id": "7237abc32df5ab6f20d90e6943d02d2368f66a5f",
"size": "911",
"binary": false,
"copies": "1",
"ref": "refs/heads/nw15",
"path": "third_party/WebKit/Source/core/animation/AnimationInputHelpers.h",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
import os
import sys
import knowledgebase
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
version = knowledgebase.__version__
if sys.argv[-1] == 'publish':
try:
import wheel
except ImportError:
print('Wheel library missing. Please run "pip install wheel"')
sys.exit()
os.system('python setup.py sdist upload')
os.system('python setup.py bdist_wheel upload')
sys.exit()
if sys.argv[-1] == 'tag':
print("Tagging the version on github:")
os.system("git tag -a %s -m 'version %s'" % (version, version))
os.system("git push --tags")
sys.exit()
readme = open('README.rst').read()
history = open('HISTORY.rst').read().replace('.. :changelog:', '')
install_requires = []
with open('requirements.txt') as f:
install_requires = f.read().splitlines()
setup(
name='django-knowledgebase',
version=version,
description="""A knowledgebase made with Django""",
long_description=readme + '\n\n' + history,
author='Julio Marquez',
author_email='[email protected]',
url='https://github.com/bazzite/django-knowledgebase',
packages=[
'knowledgebase',
],
include_package_data=True,
install_requires=install_requires,
license="Apache License 2.0",
zip_safe=False,
keywords='django-knowledgebase',
classifiers=[
'Development Status :: 4 - Beta',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.9',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Natural Language :: English',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
)
| {
"content_hash": "edbd85d5c2542468469d0fe233fa2852",
"timestamp": "",
"source": "github",
"line_count": 68,
"max_line_length": 70,
"avg_line_length": 28.88235294117647,
"alnum_prop": 0.6186354378818737,
"repo_name": "bazzite/django-knowledgebase",
"id": "376ae219d7658a8d874ecd840a6c7d2b2b93c805",
"size": "2011",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "setup.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "19519"
},
{
"name": "JavaScript",
"bytes": "1197"
},
{
"name": "Makefile",
"bytes": "1259"
},
{
"name": "Python",
"bytes": "47357"
}
],
"symlink_target": ""
} |
/**
* @author mrdoob / http://mrdoob.com/
*/
THREE.MaterialExporter = function () {};
THREE.MaterialExporter.prototype = {
constructor: THREE.MaterialExporter,
parse: function ( material ) {
var output = {
metadata: {
version: 4.0,
type: 'material',
generator: 'MaterialExporter'
}
};
if ( material.name !== "" ) output.name = material.name;
if ( material instanceof THREE.MeshBasicMaterial ) {
output.type = 'MeshBasicMaterial';
output.color = material.color.getHex();
output.opacity = material.opacity;
output.transparent = material.transparent;
output.wireframe = material.wireframe;
} else if ( material instanceof THREE.MeshLambertMaterial ) {
output.type = 'MeshLambertMaterial';
output.color = material.color.getHex();
output.ambient = material.ambient.getHex();
output.emissive = material.emissive.getHex();
output.opacity = material.opacity;
output.transparent = material.transparent;
output.wireframe = material.wireframe;
} else if ( material instanceof THREE.MeshPhongMaterial ) {
output.type = 'MeshPhongMaterial';
output.color = material.color.getHex();
output.ambient = material.ambient.getHex();
output.emissive = material.emissive.getHex();
output.specular = material.specular.getHex();
output.shininess = material.shininess;
output.opacity = material.opacity;
output.transparent = material.transparent;
output.wireframe = material.wireframe;
} else if ( material instanceof THREE.MeshNormalMaterial ) {
output.type = 'MeshNormalMaterial';
output.opacity = material.opacity;
output.transparent = material.transparent;
output.wireframe = material.wireframe;
} else if ( material instanceof THREE.MeshDepthMaterial ) {
output.type = 'MeshDepthMaterial';
output.opacity = material.opacity;
output.transparent = material.transparent;
output.wireframe = material.wireframe;
}
return output;
}
};
| {
"content_hash": "7231d52a8bf9f00de1613d8b1fb7b6e8",
"timestamp": "",
"source": "github",
"line_count": 73,
"max_line_length": 63,
"avg_line_length": 26.82191780821918,
"alnum_prop": 0.7114402451481103,
"repo_name": "1974kpkpkp/three.js",
"id": "ec89e3d33b8c19deb4be8872a1389b12f12e442c",
"size": "1958",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "examples/js/exporters/MaterialExporter.js",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
import sys
import os.path
import unittest
sys.path.append(
os.path.abspath(os.path.join(os.path.dirname(__file__), "../../")))
import configgen.utils.videoMode as videoMode
class VideoModeUtilTest(unittest.TestCase):
pass
#def test_createSimpleFillValues(self):
# self.assertEquals(videoMode.createVideoModeLine("10"), "tvservice -e 10 CEA HDMI")
#def test_createAddHDMI(self):
# self.assertEquals(videoMode.createVideoModeLine("10 CEA"), "tvservice -e 10 CEA HDMI")
#def test_createDontAddWhenLineCompelete(self):
# self.assertEquals(videoMode.createVideoModeLine("10 CEA HDMI"), "tvservice -e 10 CEA HDMI")
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "ed37058ccfc23a8bb1223e687919ce35",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 100,
"avg_line_length": 33.38095238095238,
"alnum_prop": 0.7061340941512125,
"repo_name": "digitalLumberjack/recalbox-configgen",
"id": "779b36513025df2566bebe4b6f3c13977678ce19",
"size": "723",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "configgen/tests/utils/videoModeUtil_test.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "131403"
}
],
"symlink_target": ""
} |
"""
Representation Tutorial for RLPy
================================
Assumes you have created the IncrTabularTut.py agent according to the tutorial and
placed it in the Representations/ directory.
Tests the Representation on the GridWorld domain usin SARSA
"""
__author__ = "Robert H. Klein"
from rlpy.Domains import GridWorld
from rlpy.Agents import SARSA
from rlpy.Representations import IncrTabularTut
from rlpy.Policies import eGreedy
from rlpy.Experiments import Experiment
import os
def make_experiment(exp_id=1, path="./Results/Tutorial/gridworld-IncrTabularTut"):
"""
Each file specifying an experimental setup should contain a
make_experiment function which returns an instance of the Experiment
class with everything set up.
@param id: number used to seed the random number generators
@param path: output directory where logs and results are stored
"""
opt = {}
opt["exp_id"] = exp_id
opt["path"] = path
## Domain:
maze = os.path.join(GridWorld.default_map_dir, '4x5.txt')
domain = GridWorld(maze, noise=0.3)
opt["domain"] = domain
## Representation
# discretization only needed for continuous state spaces, discarded otherwise
representation = IncrTabularTut(domain)
## Policy
policy = eGreedy(representation, epsilon=0.2)
## Agent
opt["agent"] = SARSA(representation=representation, policy=policy,
discount_factor=domain.discount_factor,
learn_rate=0.1)
opt["checks_per_policy"] = 100
opt["max_steps"] = 2000
opt["num_policy_checks"] = 10
experiment = Experiment(**opt)
return experiment
if __name__ == '__main__':
experiment = make_experiment(1)
experiment.run(visualize_steps=False, # should each learning step be shown?
visualize_learning=True, # show policy / value function?
visualize_performance=1) # show performance runs?
experiment.plot()
experiment.save()
| {
"content_hash": "d59b27f5cf6809d0eb7f0897737f3e76",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 82,
"avg_line_length": 33.74576271186441,
"alnum_prop": 0.6790557508789553,
"repo_name": "imanolarrieta/RL",
"id": "8ecaca2f7278ccbe40fad3e89ea4dcf9ee6f9de9",
"size": "2013",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/tutorial/IncrTabularTut_example.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "117712"
},
{
"name": "C++",
"bytes": "1575"
},
{
"name": "Python",
"bytes": "1350176"
}
],
"symlink_target": ""
} |
package com.alibaba.simpleimage.analyze.testbed;
import java.io.IOException;
import com.alibaba.simpleimage.analyze.ModifiableConst;
/**
* 类TestSurf.java的实现描述:TODO 类实现描述
*
* @author axman 2013-5-23 上午11:37:12
*/
public class TestSurf extends Thread {
static {
System.setProperty(ModifiableConst._TOWPNTSCALAMINUS, "8.0");
System.setProperty(ModifiableConst._SLOPEARCSTEP, "2");
System.setProperty(ModifiableConst._TOWPNTORIENTATIONMINUS, "0.05");
}
private String[] args;
private int idx;
public TestSurf(String[] args, int idx){
this.args = args;
this.idx = idx;
}
public void run() {
try {
testSurf(args, idx);
} catch (IOException e) {
e.printStackTrace();
}
}
public static void main(String[] args) {
if (args.length < 3) {
System.out.println("argrements must be more than 3.");
return;
}
System.out.println("model path:" + args[0]);
System.out.println("logo path:" + args[1]);
System.out.println("diff file path:" + args[2]);
for (int i = 0; i < 10; i++)
new MakeSurfPoint(args, i).start();
}
public static void testSurf(String[] args, int offset) throws IOException {
}
}
| {
"content_hash": "8a29a1e1fa6abacdbc941cea42802f6d",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 79,
"avg_line_length": 24.92452830188679,
"alnum_prop": 0.5927327781983346,
"repo_name": "alibaba/simpleimage",
"id": "077e9a59cb1f6f81008f638ada6fd469e7b9997f",
"size": "1688",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "simpleimage.analyze/src/test/java/com/alibaba/simpleimage/analyze/testbed/TestSurf.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "866943"
},
{
"name": "Shell",
"bytes": "4685"
}
],
"symlink_target": ""
} |
class GURL;
namespace base {
class DictionaryValue;
}
// Helper consts and methods for both cloud print and chrome browser.
namespace cloud_print {
// A map representing printer tags.
typedef std::map<std::string, std::string> PrinterTags;
// Appends a relative path to the url making sure to append a '/' if the
// URL's path does not end with a slash. It is assumed that |path| does not
// begin with a '/'.
// NOTE: Since we ALWAYS want to append here, we simply append the path string
// instead of calling url_utils::ResolveRelative. The input |url| may or may not
// contain a '/' at the end.
std::string AppendPathToUrl(const GURL& url, const std::string& path);
GURL GetUrlForSearch(const GURL& cloud_print_server_url);
GURL GetUrlForSubmit(const GURL& cloud_print_server_url);
GURL GetUrlForPrinterList(const GURL& cloud_print_server_url,
const std::string& proxy_id);
GURL GetUrlForPrinterRegistration(const GURL& cloud_print_server_url);
GURL GetUrlForPrinterUpdate(const GURL& cloud_print_server_url,
const std::string& printer_id);
GURL GetUrlForPrinterDelete(const GURL& cloud_print_server_url,
const std::string& printer_id,
const std::string& reason);
GURL GetUrlForJobFetch(const GURL& cloud_print_server_url,
const std::string& printer_id,
const std::string& reason);
GURL GetUrlForJobDelete(const GURL& cloud_print_server_url,
const std::string& job_id);
GURL GetUrlForJobStatusUpdate(const GURL& cloud_print_server_url,
const std::string& job_id,
const std::string& status_string);
GURL GetUrlForUserMessage(const GURL& cloud_print_server_url,
const std::string& message_id);
GURL GetUrlForGetAuthCode(const GURL& cloud_print_server_url,
const std::string& oauth_client_id,
const std::string& proxy_id);
// Parses the response data for any cloud print server request. The method
// returns null if there was an error in parsing the JSON. The succeeded
// value returns the value of the "success" value in the response JSON.
// Returns the response as a dictionary value.
scoped_ptr<base::DictionaryValue> ParseResponseJSON(
const std::string& response_data,
bool* succeeded);
// Prepares one value as part of a multi-part upload request.
void AddMultipartValueForUpload(const std::string& value_name,
const std::string& value,
const std::string& mime_boundary,
const std::string& content_type,
std::string* post_data);
// Returns the MIME type of multipart with |mime_boundary|.
std::string GetMultipartMimeType(const std::string& mime_boundary);
// Create a MIME boundary marker (27 '-' characters followed by 16 hex digits).
void CreateMimeBoundaryForUpload(std::string *out);
// Returns an MD5 hash for |printer_tags| and the default required tags.
std::string GetHashOfPrinterTags(const PrinterTags& printer_tags);
// Returns the post data for |printer_tags| and the default required tags.
std::string GetPostDataForPrinterTags(
const PrinterTags& printer_tags,
const std::string& mime_boundary,
const std::string& proxy_tag_prefix,
const std::string& tags_hash_tag_name);
// Get the cloud print auth header from |auth_token|.
std::string GetCloudPrintAuthHeader(const std::string& auth_token);
} // namespace cloud_print
#endif // CHROME_COMMON_CLOUD_PRINT_CLOUD_PRINT_HELPERS_H_
| {
"content_hash": "076735c4cbe00a6af096551fbf33724c",
"timestamp": "",
"source": "github",
"line_count": 81,
"max_line_length": 80,
"avg_line_length": 45.5679012345679,
"alnum_prop": 0.6710918450284475,
"repo_name": "codenote/chromium-test",
"id": "71bd9b6446ecccdcb4a5b55909299c571f68a5f7",
"size": "4065",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "chrome/common/cloud_print/cloud_print_helpers.h",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
std::size_t Service::determine_min_fetch_size_for_new_stream() {
// TODO:
// We should probably figure out an initial min_fetch_size based on how many streams
// are currently active and have a global budget that for all of them. For now, start with a relatively low constant fetch size
// for all new streams
return 4 * 1024 * 1024;
}
// generate and schedule a new request for peer `node`, for replication of content for `partitions`
void Service::replicate_from(cluster_node *const node, topic_partition *const *partitions, std::size_t partitions_cnt) {
enum {
trace = false,
};
TANK_EXPECT(node);
TANK_EXPECT(partitions);
TANK_EXPECT(partitions_cnt);
// in case we have scheduled a retry
abort_retry_consume_from(node);
// Invariant:
// for all [partitions, partitions + cnt)
// the leader is `node`
// so we really only need one connection
auto c = node->consume_conn.ch.get();
if (trace) {
SLog(ansifmt::bold, ansifmt::color_brown, "Will attempt to replicate ", partitions_cnt, " from ", node->id, "@", node->ep, ansifmt::reset, "\n");
SLog("To replicate:", values_repr_with_lambda(partitions, partitions_cnt, [](const auto it) noexcept {
return Buffer{}.append(it->owner->name(), '/', it->idx); }), "\n");
}
if (c) {
if (const auto state = c->as.consumer.state; state == connection::As::Consumer::State::Connecting or state == connection::As::Consumer::State::Busy) {
// we are waiting for a response from an outstanding/active consume req
// or we are still trying to establish a connection
if (trace) {
SLog("Peer CONSUME connection is busy\n");
}
return;
} else if (state == connection::As::Consumer::State::ScheduledShutdown) {
if (trace) {
SLog("Peer connection was scheduled for shutdown\n");
}
cancel_timer(&c->as.consumer.attached_timer.node);
} else if (trace) {
SLog("Connection ", ptr_repr(c), " already available ", unsigned(c->as.consumer.state), "\n");
}
} else {
if (trace) {
SLog("Requiring NEW connection\n");
}
c = new_peer_connection(node);
if (not c) {
IMPLEMENT_ME();
}
// once we connect, consider_connected_consumer() will retry
TANK_EXPECT(c->as.consumer.state == connection::As::Consumer::State::Connecting);
return;
}
if (trace) {
SLog(ansifmt::color_green, "Will generate a CONSUME request for node ", node->id, "@", node->ep, ansifmt::reset, " (", partitions_cnt, " partitions)\n");
}
// topics are expected to be ordered
// see cluster_node::cluster::leadership::ordered_list()
auto oq = c->outQ ?: (c->outQ = get_outgoing_queue());
auto dvp = get_data_vector_payload();
auto b = get_buf();
b->pack(static_cast<uint8_t>(TankAPIMsgType::ConsumePeer));
const auto req_size_offset = b->size();
b->pack(static_cast<uint32_t>(0)); // request length
b->pack(static_cast<uint16_t>(cluster_state.local_node.id));
b->pack(static_cast<uint64_t>(4 * 1000)); // TODO: max-wait
b->pack(static_cast<uint32_t>(0)); // TODO: min.bytes
const auto topics_cnt_offset = b->size();
uint16_t topics_cnt{0};
// u16 for total topics/u8 for Consume reqs
b->pack(static_cast<uint16_t>(0));
dvp->buf = b;
for (size_t i{0}; i < partitions_cnt;) {
auto topic = partitions[i]->owner;
const auto base = i;
b->pack(topic->name());
const auto total_partitions_offset = b->size();
// u16 for total partitions/u8 for Consume reqs
b->pack(static_cast<uint16_t>(0));
do {
auto p = partitions[i];
auto stream = p->cluster.rs;
uint64_t next;
if (const auto v = p->cluster.consume_next_lsn; v != std::numeric_limits<uint64_t>::max()) {
next = v;
p->cluster.consume_next_lsn = std::numeric_limits<uint64_t>::max();
} else {
next = partition_log(p)->lastAssignedSeqNum + 1;
}
if (not stream) {
// no replication stream for that partition
// we need to track all replication streams so that we can know
// (peer providing us content for that partition, which is usually the leader but when
// the partition leader changes, we need to know that we are not replicating from that node anymore)
stream = p->cluster.rs = get_repl_stream();
stream->partition = p;
stream->min_fetch_size = determine_min_fetch_size_for_new_stream();
cluster_state.replication_streams.push_back(&stream->repl_streams_ll);
}
stream->ch.set(c);
stream->src = node;
if (trace) {
SLog("Will request topic ", p->owner->name(), "/", p->idx, " from seq ", next, " (local last assigned:", partition_log(p)->lastAssignedSeqNum, "), min_fetch_size = ", stream->min_fetch_size, "\n");
}
b->pack(p->idx); // partition
b->pack(static_cast<uint64_t>(next)); // absolute sequence number to consume from
b->pack(static_cast<uint32_t>(stream->min_fetch_size)); // fetch size
} while (++i < partitions_cnt and partitions[i]->owner == topic);
const uint16_t total_partitions = i - base;
*reinterpret_cast<uint16_t *>(b->data() + total_partitions_offset) = total_partitions;
++topics_cnt;
}
*reinterpret_cast<uint16_t *>(b->data() + topics_cnt_offset) = topics_cnt;
*reinterpret_cast<uint32_t *>(b->data() + req_size_offset) = b->size() - req_size_offset - sizeof(uint32_t);
dvp->append(b->as_s32());
oq->push_back(dvp);
c->as.consumer.state = connection::As::Consumer::State::Busy;
try_tx(c);
}
// Attempt to replicate from a peer content
// for all partitions this node is a replica of and that peer is their leader
void Service::try_replicate_from(cluster_node *const peer) {
enum {
trace = false,
};
TANK_EXPECT(peer);
if (not peer->available()) {
if (trace) {
SLog("Will NOT replicate from peer ", peer->id, "@", peer->ep, " because it is not available\n");
}
return;
}
auto partitions = partitions_to_replicate_from(peer);
if (trace) {
SLog("Total partitions to replicate from peer ", peer->id, '@', peer->ep, ' ', partitions ? partitions->size() : 0, "\n");
}
if (partitions and not partitions->empty()) {
replicate_from(peer, partitions->data(), partitions->size());
} else if (auto c = peer->consume_conn.ch.get()) {
if (trace) {
SLog("Connection to peer is no longer required, no partitions to replicate\n");
}
if (not c->as.consumer.attached_timer.node.node.leaf_p) {
// We no longer need this connection, but we 'll keep it around in case we need it later
// We 'll ready a timer so that if we don't need this within some time, we 'll shut it down
// TODO: verify again
if (trace) {
SLog("Will schedule shutdown of consumer connection\n");
}
c->as.consumer.state = connection::As::Consumer::State::ScheduledShutdown;
c->as.consumer.attached_timer.node.key = now_ms + 4 * 1000;
c->as.consumer.attached_timer.type = timer_node::ContainerType::ShutdownConsumerConn;
register_timer(&c->as.consumer.attached_timer.node);
}
}
}
// For each peer in `peers`, initiate a new CONSUME request for all partitions
// that peer is a leader, unless the connection to that peer is already busy
//
// TODO: maybe just iterate cluster_state.local_node.replication_streams
// and collect all partitions where src is in peers
void Service::try_replicate_from(const std::unordered_set<cluster_node *> &peers) {
enum {
trace = false,
};
if (trace) {
SLog("Will attempt to replicate from ", peers.size(), " cluster peers\n");
}
for (auto peer : peers) {
try_replicate_from(peer);
}
}
// `start`: the partitions to begin replicating from
// `stop`: the partitions to stop replicating from
void Service::replicate_partitions(std::vector<std::pair<topic_partition *, cluster_node *>> *start,
std::vector<std::pair<topic_partition *, cluster_node *>> *stop) {
enum {
trace = false,
};
auto self = cluster_state.local_node.ref;
auto & peers_set = reusable.peers_set;
if (trace) {
SLog("REPLICATE PARTITIONS start = ", start ? start->size() : 0, ", stop = ", stop ? stop->size() : 0, "\n");
}
peers_set.clear();
#ifdef TANK_RUNTIME_CHECKS
// sanity check
if (start) {
for (auto &it : *start) {
TANK_EXPECT(self->is_replica_for(it.first));
}
}
#endif
if (stop) {
for (auto [p, src] : *stop) {
if (trace) {
SLog("STOP:", p->owner->name(), "/", p->idx, " from ", src->id, "@", src->ep, "\n");
}
try_abort_replication(p, src, __LINE__);
}
}
if (start) {
for (auto [p, src] : *start) {
if (trace) {
SLog("START: ", p->owner->name(), "/", p->idx, " from ", src->id, "@", src->ep, "\n");
}
// make sure we are not trying to start replication from us
TANK_EXPECT(src != cluster_state.local_node.ref);
peers_set.insert(src);
}
}
if (not peers_set.empty()) {
try_replicate_from(peers_set);
}
}
// `gen` is the ModifyIndex of the key in configs/
// we use conf-updates/ for updates and we compare against the ModifyIndex of the key in conf-updates
// we still have access to ModifyIndex of the configs/
void Service::process_cluster_config(const str_view32 conf, const uint64_t gen) {
SLog(ansifmt::bold, ansifmt::inverse, ansifmt::color_red, "CLUSTER: cluster config updates", ansifmt::reset, " ", gen, " [", conf, "]\n");
}
void Service::process_topic_config(const str_view8 topic_name, const str_view32 conf, const uint64_t gen) {
enum {
trace = false,
};
using json = nlohmann::json;
if (trace) {
SLog(ansifmt::bold, ansifmt::inverse, ansifmt::color_red, "CLUSTER: configuration of topic [", topic_name, "] updated", ansifmt::reset, " ", gen, "\n");
}
try {
if (const auto doc = json::parse(conf); doc.is_object()) {
for (auto it = doc.begin(); it != doc.end(); ++it) {
const auto &key = it.key();
const auto &value = it.value();
if (key == "rf") {
const auto rf = value.get<int64_t>();
// See topic.cluster.rf_ comments for why rf_ cannot be lower than 0
if (rf < 1 || rf > 64) {
if (trace) {
SLog("Ignoring: bogus RF ", rf, "\n");
}
} else {
if (trace) {
SLog("RF to ", rf, " for '", topic_name, "'\n");
}
if (auto t = topic_by_name(topic_name)) {
auto _t = cluster_state.updates.get_topic(t);
_t->set_rf(rf);
schedule_cluster_updates_apply(__FUNCTION__);
} else if (trace) {
SLog("Topic [", topic_name, "] is not defined\n");
}
}
} else if (trace) {
SLog("Unexpected key '", key, "'\n");
}
}
} else if (trace) {
SLog("Unexpected response\n");
}
} catch (const std::exception &e) {
if (trace) {
SLog("Failed:", e.what(), "\n");
}
}
}
void Service::try_become_cluster_leader(const uint32_t ref) {
static constexpr bool trace{false};
if (0 == (consul_state.flags & unsigned(ConsulState::Flags::AttemptBecomeClusterLeader))) {
if (trace) {
SLog("Yes, will try, ref = ", ref, "\n");
}
cancel_timer(&try_become_cluster_leader_timer.node); // just in case
consul_state.flags |= unsigned(ConsulState::Flags::AttemptBecomeClusterLeader);
schedule_consul_req(consul_state.get_req(consul_request::Type::TryBecomeClusterLeader), true);
} else if (trace) {
SLog("Cannot TryBecomeClusterLeader ref = ", ref, "\n");
}
}
// Invoked whenever we get a response to AcquireNodeID
void Service::cluster_node_id_acquired() {
static constexpr bool trace{false};
auto n = cluster_state.local_node.ref;
TANK_EXPECT(n);
if (trace) {
SLog(ansifmt::bold, ansifmt::inverse, ansifmt::color_red, "CLUSTER: node ID has been acquired", ansifmt::reset, "\n");
}
if (consul_state.flags & unsigned(ConsulState::Flags::BootstrapStateUpdatesProcessed)) {
// This is the second AcquireNodeID request, which we issued after we updated the bootstrap state updates
// and it was used to assign our endpoint to this the reserved nodes/id
//
// Only now can we try to acquire cluster leadership
// @see conclude_bootstrap_updates() for reational and why
// we shouldn't try to become a leader prior to acquiring the node ID the second time by setting our endpoint here
if (trace) {
SLog("Node ID AND endpoint acquired/set\n");
}
TANK_EXPECT(consul_state.bootstrap_state_updates_processed());
if (!cluster_state.leader_id) {
if (trace) {
SLog("Will NOW try to become cluster leader because no cluster leader\n");
}
try_become_cluster_leader(__LINE__);
}
return;
}
// explicitly
n->available_ = true;
// We need to immediately begin accepting connections
// because other nodes(e.g cluster leader) may promote us to leaders for 1+ partitions
// so we ned to be able to accept client requests now.
//
// XXX: Turns out this is a *bad* idea. We only need to begin accepting connections as soon as we have
// processed the states update collected during bootstrap. This is because otherwise
// this node may responsd with e.g No Leader to requests from other leeaders or from clients
//
// It is OK if we are deferring accepting connections until then because even if
// other nodes rush to try to replicate from here and fail, they will retry soon thereafter
//
// We also now do NOT set local endpoint when we acquire the node id, so that nodes
// will not try to replicate from us until we have processed the bootstrap updates
// and once we do, then we acquire the ID again this time with the endpoint set
// so that other nodes will commence replication.
//
// WAS: enable_listener();
//
// We also can't try_become_cluster_leader() here; we can only do so if we have
// acquired the node AND assigned the endpoint to it (see ^^)
if (const auto bm = unsigned(ConsulState::Flags::RegistrationComplete); 0 == (consul_state.flags & bm)) {
// this usually takes around 0.2s
static constexpr bool trace{false};
consul_state.flags |= bm;
if (trace) {
SLog(ansifmt::bold, ansifmt::color_red, ansifmt::bgcolor_green, "CLUSTER: reg is complete now", ansifmt::reset,
" took ", duration_repr(Timings::Milliseconds::ToMicros(now_ms - consul_state.reg_init_ts)), "\n");
}
force_cluster_updates_apply();
}
}
void Service::process_fetched_cluster_configurations(consul_request *const req, const str_view32 content) {
static constexpr bool trace{false};
if (trace) {
SLog(ansifmt::bold, ansifmt::inverse, ansifmt::color_red, "CLUSTER: all configurations retrieved", ansifmt::reset, "\n");
}
process_consul_cluster_configurations(req, content);
// we now need to create or renew our session because
// we need it in order to register our ID in the nodes namespace
if (consul_state.session_id()) {
// we already have a session created earlier
// attempt to renew it instead of creting a new one; if that fails, then we will create another
if (trace) {
SLog("Will renew session\n");
}
schedule_consul_req(consul_state.get_req(consul_request::Type::RenewSession), true);
} else {
// we 'll create another
// this is fast and cheap
if (trace) {
SLog("Will create a new session\n");
}
schedule_consul_req(consul_state.get_req(consul_request::Type::CreateSession), true);
}
}
| {
"content_hash": "4f99dc014e86b8d818a0648a575721c2",
"timestamp": "",
"source": "github",
"line_count": 449,
"max_line_length": 229,
"avg_line_length": 43.40311804008909,
"alnum_prop": 0.5083641215106732,
"repo_name": "phaistos-networks/TANK",
"id": "9cee3abbf6c75e18802a21687ad89d920673999f",
"size": "22275",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "service_cluster.cpp",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "910180"
},
{
"name": "C++",
"bytes": "6560753"
},
{
"name": "CMake",
"bytes": "54273"
},
{
"name": "CSS",
"bytes": "896"
},
{
"name": "Dockerfile",
"bytes": "823"
},
{
"name": "Makefile",
"bytes": "157483"
},
{
"name": "Meson",
"bytes": "300"
},
{
"name": "Python",
"bytes": "3952"
},
{
"name": "Roff",
"bytes": "339"
},
{
"name": "Shell",
"bytes": "4955"
}
],
"symlink_target": ""
} |
module AmazonFlexPay::API
class Error < StandardError
attr_accessor :request, :request_id, :code, :message
def initialize(code, message, request_id, request)
@request_id, @request, @code, @message = request_id, request, code, message
end
def to_s
message
end
end
# generated from http://docs.amazonwebservices.com/AmazonFPS/latest/FPSAPIReference/APIErrorCodesTable.html
# as of: 2012-06-15
%w(
AccessFailure
AccountClosed
AccountLimitsExceeded
AmountOutOfRange
AuthFailure
ConcurrentModification
DuplicateRequest
InactiveInstrument
IncompatibleTokens
InstrumentAccessDenied
InstrumentExpired
InsufficientBalance
InternalError
InvalidAccountState
InvalidAccountState_Caller
InvalidAccountState_Recipient
InvalidAccountState_Sender
InvalidCallerReference
InvalidClientTokenId
InvalidDateRange
InvalidParams
InvalidPaymentInstrument
InvalidPaymentMethod
InvalidRecipientForCCTransaction
InvalidSenderRoleForAccountType
InvalidTokenId
InvalidTokenId_Recipient
InvalidTokenId_Sender
InvalidTokenType
InvalidTransactionId
InvalidTransactionState
NotMarketplaceApp
OriginalTransactionFailed
OriginalTransactionIncomplete
PaymentInstrumentNotCC
PaymentMethodNotDefined
PrepaidFundingLimitExceeded
RefundAmountExceeded
SameSenderAndRecipient
SameTokenIdUsedMultipleTimes
SenderNotOriginalRecipient
SettleAmountGreaterThanDebt
SettleAmountGreaterThanReserveAmount
SignatureDoesNotMatch
TokenAccessDenied
TokenNotActive
TokenNotActive_Recipient
TokenNotActive_Sender
TokenUsageError
TransactionDenied
TransactionFullyRefundedAlready
TransactionTypeNotRefundable
UnverifiedAccount_Recipient
UnverifiedAccount_Sender
UnverifiedBankAccount
UnverifiedEmailAddress_Caller
UnverifiedEmailAddress_Recipient
UnverifiedEmailAddress_Sender
).each do |name|
const_set(name, Class.new(Error))
end
# undocumented errors
%w(
InvalidSignature
).each do |name|
const_set(name, Class.new(Error))
end
end | {
"content_hash": "1ec8a629c983d7f15dcc775cab3ed1a3",
"timestamp": "",
"source": "github",
"line_count": 85,
"max_line_length": 109,
"avg_line_length": 25.623529411764707,
"alnum_prop": 0.7690541781450873,
"repo_name": "selfstar/amazon_flex_pay",
"id": "ec306a81138e7207382e1187251fcc8c29c328cf",
"size": "2178",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "lib/amazon_flex_pay/api/errors.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "83161"
}
],
"symlink_target": ""
} |
<?php
class attempt extends master {
public $attempt_id;
public $user_id;
public $score;
public $timestamp;
public $status = 0;
public $outOf = 0;
function __constuct() {
parent::__construct();
}
public function getFromDB($attempt_id) {
$attempt_id = $this -> mysqli -> escape_string($attempt_id);
$query = "SELECT * FROM attempt WHERE attempt_id = ?";
if ($stmt = $this -> mysqli -> prepare($query) or die($this -> mysqli -> error)) {
$stmt -> bind_param('i', $attempt_id);
$stmt -> execute() or die($stmt -> error);
$result = $stmt -> get_result();
while ($obj = $result -> fetch_object()) {
$this -> attempt_id = $obj -> attempt_id;
$this -> score = $obj -> score;
$this -> timestamp = $obj -> timestamp;
$this -> user_id = $obj -> user_id;
$this -> status = $obj -> status;
$this -> outOf = $obj -> out_of;
}
}
}
public function get_attempt_id() {
return $this -> attempt_id;
}
public function set_attempt_id($id) {
$id = $this -> mysqli -> escape_string($id);
$this -> attempt_id = $id;
return TRUE;
}
public function get_user_id() {
return $this -> user_id;
}
public function set_user_id($id) {
$id = $this -> mysqli -> escape_string($id);
$this -> user_id = $id;
return TRUE;
}
public function get_score() {
return $this -> score;
}
public function set_score($score) {
$score = $this -> mysqli -> escape_string($score);
$this -> score = $score;
return TRUE;
}
public function get_timestamp() {
return $this -> timestamp;
}
public function set_timestamp($timestamp) {
$timestamp = $this -> mysqli -> escape_string($timestamp);
$this -> timestamp = $timestamp;
return TRUE;
}
public function enable_attempt() {
$this -> status = 1;
return TRUE;
}
public function disable_attempt() {
$this -> status = 0;
return TRUE;
}
public function get_outOf() {
return $this -> outOf;
}
public function set_outOf($outOf) {
$outOf = $this -> mysqli -> escape_string($outOf);
$this -> outOf = $outOf;
return TRUE;
}
public function saveToDB() {
if (isset($this -> attempt_id)) {
die("attempt_id set; object already exists in DB");
} else {
$query = "INSERT INTO attempt VALUES (NULL, ?, ?, NOW(), ?, ?)";
if ($stmt = $this -> mysqli -> prepare($query) or die($this -> mysqli -> error)) {
$stmt -> bind_param('iiii', $this -> user_id, $this -> score, $this -> status, $this -> outOf);
$stmt -> execute() or die($stmt -> error);
return TRUE;
}
}
}
public function deleteFromDB() {
if (!isset($this -> attempt_id)) {
die("attempt_id not set; no object referenced in DB");
} else {
$query = "DELETE FROM attempt WHERE attemp_id = ?";
if ($stmt = $this -> mysqli -> prepare($query) or die($this -> mysqli -> error)) {
$stmt -> bind_param('i', $this -> attempt_id);
$stmt -> execute() or die($stmt -> error);
return TRUE;
} else {
return FALSE;
}
}
}
public function updateInDB() {
if (!isset($this -> attempt_id)) {
die("attempt_id not set; no object referenced in DB");
} else {
$query = "UPDATE attempt SET user_id = ?, score = ?, timestamp = '$this->timestamp', status = ?, out_of = ? WHERE attempt_id = ?";
if ($stmt = $this -> mysqli -> prepare($query) or die($this -> mysqli -> error)) {
$stmt -> bind_param('iiiii', $this -> user_id, $this -> score, $this -> status, $this -> outOf, $this -> attempt_id);
$stmt -> execute() or die($stmt -> error);
return TRUE;
} else {
return FALSE;
}
}
}
public function get_latest_attempt_from_user_id($user_id) {
$user_id = $this -> mysqli -> escape_string($user_id);
$query = " SELECT
*
FROM
attempt
WHERE
? = user_id
ORDER BY
attempt_id DESC
LIMIT 1";
if ($stmt = $this -> mysqli -> prepare($query) or die($this -> mysqli -> error)) {
$stmt -> bind_param('i', $user_id);
$stmt -> execute();
$result = $stmt -> get_result();
$obj = $result -> fetch_object();
return $obj;
}
}
public function getScore($attempt_id) {
$attempt_id = $this -> mysqli -> escape_string($attempt_id);
$query = " SELECT
a.attempt_id
FROM
attempt AS a,
attempt_sqa_map as asm,
section_question_answer_map as sqam
WHERE
? = a.attempt_id AND
a.attempt_id = asm.attempt_id AND
asm.sqam_id = sqam.sqam_id AND
asm.answer_id = sqam.answer_id";
if ($stmt = $this -> mysqli -> prepare($query) or die($this -> mysqli -> error)) {
$stmt -> bind_param('i', $attempt_id);
$stmt -> execute() or die($stmt -> error);
$result = $stmt -> get_result();
$rows = $result -> num_rows;
return $rows;
}
}
public function get_OutOfScore() {
$query = "SELECT * FROM attempt AS a, attempt_sqa_map AS asm, section_question_answer_map AS sqam WHERE ? = a.attempt_id AND a.attempt_id = asm.attempt_id AND asm.sqam_id = sqam.sqam_id";
if ($stmt = $this->mysqli->prepare($query) or die($this->mysqli->error)) {
$stmt -> bind_param('i', $this->attempt_id);
if ($stmt -> execute() or die ($stmt -> error)) {
$result = $stmt -> get_result();
return $result->num_rows;
}
}
}
public function deleteAttempt() {
$q = "DELETE FROM attempt_sqa_map WHERE attempt_id = ?";
if ($s = $this -> mysqli -> prepare($q) or die($this -> mysqli -> error)) {
$s -> bind_param('i', $this -> attempt_id);
$s -> execute() or die($s -> error);
}
$q = "DELETE FROM attempt_exam_map WHERE attempt_id = ?";
if ($s = $this -> mysqli -> prepare($q) or die($this -> mysqli -> error)) {
$s -> bind_param('i', $this -> attempt_id);
$s -> execute() or die($s -> error);
}
$q = "DELETE FROM attempt WHERE attempt_id = ?";
if ($s = $this -> mysqli -> prepare($q) or die($this -> mysqli -> error)) {
$s -> bind_param('i', $this -> attempt_id);
$s -> execute() or die($s -> error);
}
}
}
?> | {
"content_hash": "29d156f38f751d64209e95e9bb50a739",
"timestamp": "",
"source": "github",
"line_count": 212,
"max_line_length": 189,
"avg_line_length": 27.87735849056604,
"alnum_prop": 0.5774957698815567,
"repo_name": "evanlouie/exam",
"id": "eb098550f21324999c30e4ad6bc70f98fb7e1de4",
"size": "5910",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "_classes/attempt.php",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "23082"
},
{
"name": "PHP",
"bytes": "138397"
}
],
"symlink_target": ""
} |
package sq.squ1rr.mcc4.layout;
import android.content.Context;
import android.view.View;
/**
* Minecraft Select Button.
* Acts like a list in a button, selects next element when the button is
* clicked.
* @author Aleksandr Belkin
*/
public class McSelector extends McButton {
/** labels to show */
private String[] labels = null;
/** IDs assigned to labels */
private int[] ids = null;
/** selected element */
private int selected = 0;
/** click listener */
private OnClickListener clickListener = null;
/**
* Create Button
* @param context
*/
public McSelector(Context context) {
super(context);
super.setOnClickListener(changeLabel);
}
/**
* Returns index of the selected label
* @return
*/
public int getSelectedIndex() {
return selected;
}
/**
* Returns selected ID
* @return
*/
public int getSelectedId() {
return ids[selected];
}
/**
* Sets new labels
* @param _labels
*/
public void setLabels(String[] _labels) {
labels = _labels;
invalidate();
}
/**
* Sets IDs associated with labels
* @param _ids
*/
public void setIds(int[] _ids) {
ids = _ids;
}
/**
* Sets new labels from string resources and assigns IDs
*/
public void setLabels(int[] stringIds) {
String[] labels = new String[stringIds.length];
for(int i = 0; i < stringIds.length; ++i) {
labels[i] = getContext().getString(stringIds[i]);
}
ids = stringIds;
setLabels(labels);
}
/**
* Selects the label by index
* @param index
*/
public void select(int index) {
selected = index;
invalidate();
}
/**
* Selects the label based on ID
* @param id
*/
public void selectId(int id) {
selected = 0;
for(int i : ids) {
if(i == id) break;
selected++;
}
invalidate();
}
/*
* (non-Javadoc)
* @see android.view.View#invalidate()
*/
@Override
public void invalidate() {
if(labels != null && getText() != labels[selected]) {
setText(labels[selected]);
}
super.invalidate();
}
/*
* (non-Javadoc)
* @see android.view.View#setOnClickListener(android.view.View.OnClickListener)
*/
@Override
public void setOnClickListener(OnClickListener _clickListener) {
clickListener = _clickListener;
}
/**
* Increments selection index, calls user listener
*/
private OnClickListener changeLabel = new OnClickListener() {
@Override
public void onClick(View view) {
selected = (selected + 1) % labels.length;
if(clickListener != null) clickListener.onClick(view);
invalidate();
}
};
}
| {
"content_hash": "e0ac210a507b98c2564cc6c888743b59",
"timestamp": "",
"source": "github",
"line_count": 134,
"max_line_length": 83,
"avg_line_length": 22.365671641791046,
"alnum_prop": 0.5455455455455456,
"repo_name": "sQu1rr/minecraft-connect-4",
"id": "5b14032e4b0b21ef17553279af64eb764f5e32c0",
"size": "3060",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/sq/squ1rr/mcc4/layout/McSelector.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "144796"
}
],
"symlink_target": ""
} |
<?php
namespace Flowcode\FinancialBundle\DependencyInjection;
use Symfony\Component\DependencyInjection\ContainerBuilder;
use Symfony\Component\Config\FileLocator;
use Symfony\Component\HttpKernel\DependencyInjection\Extension;
use Symfony\Component\DependencyInjection\Loader;
/**
* This is the class that loads and manages your bundle configuration.
*
* @link http://symfony.com/doc/current/cookbook/bundles/extension.html
*/
class FlowcodeFinancialExtension extends Extension
{
/**
* {@inheritdoc}
*/
public function load(array $configs, ContainerBuilder $container)
{
$configuration = new Configuration();
$config = $this->processConfiguration($configuration, $configs);
$loader = new Loader\YamlFileLoader($container, new FileLocator(__DIR__.'/../Resources/config'));
$loader->load('services.yml');
}
}
| {
"content_hash": "feb0bf0908a4e1fac375726464ab3772",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 105,
"avg_line_length": 31.214285714285715,
"alnum_prop": 0.7311212814645309,
"repo_name": "flowcode/financial-bundle",
"id": "893cf855ec34a346bde854c89113544123dd5855",
"size": "874",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Flowcode/FinancialBundle/DependencyInjection/FlowcodeFinancialExtension.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "13"
},
{
"name": "PHP",
"bytes": "124799"
}
],
"symlink_target": ""
} |
package com.beecavegames.bjc.handlers;
import lombok.NoArgsConstructor;
import java.util.HashMap;
import java.util.Map;
import com.beecavegames.Game;
import com.beecavegames.GameResponse;
import com.beecavegames.GameSession;
import com.beecavegames.entities.PlayerGrant.GrantType;
import com.beecavegames.util.MoneyAmount;
@NoArgsConstructor
public class FundResponse<G extends Game> extends GameResponse<G> {
public Map<GrantType, MoneyAmount> currencies=new HashMap<>();
public FundResponse(GameSession session, GameResponse<G> response, Map<GrantType, MoneyAmount> balances) {
super(session,response != null?response.game:null);
currencies = balances;
status = response != null?response.status:Status.OK;
}
public FundResponse(String string) {
super(string);
}
public FundResponse(Exception ie) {
super(ie);
}
}
| {
"content_hash": "543f2dbd56933d73c17379f5bb932aa3",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 107,
"avg_line_length": 25.515151515151516,
"alnum_prop": 0.7826603325415677,
"repo_name": "sgmiller/galeforce",
"id": "e1226c7b05f68b923949212be00c590afaf967cf",
"size": "842",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "gameelements/src/main/java/bjc/handlers/FundResponse.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "347155"
}
],
"symlink_target": ""
} |
using ClosedXML.Excel;
using DocumentFormat.OpenXml;
using NUnit.Framework;
using System;
namespace ClosedXML.Tests.Excel
{
[TestFixture]
public class ExtensionsTests
{
[Test]
public void FixNewLines()
{
Assert.AreEqual("\n".FixNewLines(), Environment.NewLine);
Assert.AreEqual("\r\n".FixNewLines(), Environment.NewLine);
Assert.AreEqual("\rS\n".FixNewLines(), "\rS" + Environment.NewLine);
Assert.AreEqual("\r\n\n".FixNewLines(), Environment.NewLine + Environment.NewLine);
}
[Test]
public void DoubleSaveRound()
{
Double value = 1234.1234567;
Assert.AreEqual(value.SaveRound(), Math.Round(value, 6));
}
[Test]
public void DoubleValueSaveRound()
{
Double value = 1234.1234567;
Assert.AreEqual(new DoubleValue(value).SaveRound().Value, Math.Round(value, 6));
}
[TestCase("NoEscaping", ExpectedResult = "NoEscaping")]
[TestCase("1", ExpectedResult = "'1'")]
[TestCase("AB-CD", ExpectedResult = "'AB-CD'")]
[TestCase(" AB", ExpectedResult = "' AB'")]
[TestCase("Test sheet", ExpectedResult = "'Test sheet'")]
[TestCase("O'Kelly", ExpectedResult = "'O''Kelly'")]
[TestCase("A2+3", ExpectedResult = "'A2+3'")]
[TestCase("A\"B", ExpectedResult = "'A\"B'")]
[TestCase("A!B", ExpectedResult = "'A!B'")]
[TestCase("A~B", ExpectedResult = "'A~B'")]
[TestCase("A^B", ExpectedResult = "'A^B'")]
[TestCase("A&B", ExpectedResult = "'A&B'")]
[TestCase("A>B", ExpectedResult = "'A>B'")]
[TestCase("A<B", ExpectedResult = "'A<B'")]
[TestCase("A.B", ExpectedResult = "A.B")]
[TestCase(".", ExpectedResult = "'.'")]
[TestCase("A_B", ExpectedResult = "A_B")]
[TestCase("_", ExpectedResult = "_")]
[TestCase("=", ExpectedResult = "'='")]
[TestCase("A,B", ExpectedResult = "'A,B'")]
[TestCase("A@B", ExpectedResult = "'A@B'")]
[TestCase("(Test)", ExpectedResult = "'(Test)'")]
[TestCase("A#", ExpectedResult = "'A#'")]
[TestCase("A$", ExpectedResult = "'A$'")]
[TestCase("A%", ExpectedResult = "'A%'")]
[TestCase("ABC1", ExpectedResult = "'ABC1'")]
[TestCase("ABCD1", ExpectedResult = "ABCD1")]
[TestCase("R1C1", ExpectedResult = "'R1C1'")]
[TestCase("A{", ExpectedResult = "'A{'")]
[TestCase("A}", ExpectedResult = "'A}'")]
[TestCase("A`", ExpectedResult = "'A`'")]
[TestCase("Русский", ExpectedResult = "Русский")]
[TestCase("日本語", ExpectedResult = "日本語")]
[TestCase("한국어", ExpectedResult = "한국어")]
[TestCase("Slovenščina", ExpectedResult = "Slovenščina")]
[TestCase("", ExpectedResult = "")]
[TestCase(null, ExpectedResult = null)]
public string CanEscapeSheetName(string sheetName)
{
return StringExtensions.EscapeSheetName(sheetName);
}
[TestCase("TestSheet", ExpectedResult = "TestSheet")]
[TestCase("'Test sheet'", ExpectedResult = "Test sheet")]
[TestCase("'O''Kelly'", ExpectedResult = "O'Kelly")]
public string CanUnescapeSheetName(string sheetName)
{
return StringExtensions.UnescapeSheetName(sheetName);
}
}
}
| {
"content_hash": "0ba05efee80768707875981da3d4c792",
"timestamp": "",
"source": "github",
"line_count": 84,
"max_line_length": 95,
"avg_line_length": 40.55952380952381,
"alnum_prop": 0.5600234810683886,
"repo_name": "ClosedXML/ClosedXML",
"id": "56df75c47f83c9fd7e80d635ccfd973744cd8765",
"size": "3451",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "ClosedXML.Tests/Excel/Misc/ExtensionsTests.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "3965501"
}
],
"symlink_target": ""
} |
import { onRenderBody } from "../gatsby-ssr"
describe(`gatsby-plugin-fullstory`, () => {
describe(`onRenderBody`, () => {
describe(`in development mode`, () => {
it(`does not set any head components`, () => {
const setHeadComponents = jest.fn()
onRenderBody({ setHeadComponents }, {})
expect(setHeadComponents).not.toHaveBeenCalled()
})
})
describe(`in production mode`, () => {
let env
beforeAll(() => {
env = process.env.NODE_ENV
process.env.NODE_ENV = `production`
})
afterAll(() => {
process.env.NODE_ENV = env
})
it(`sets the correct head components`, () => {
const setHeadComponents = jest.fn()
const pluginOptions = { fs_org: `test-org` }
onRenderBody({ setHeadComponents }, pluginOptions)
expect(setHeadComponents.mock.calls).toMatchSnapshot()
})
})
})
})
| {
"content_hash": "e97c89118acb8094912dbc417f876ea5",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 62,
"avg_line_length": 25.054054054054053,
"alnum_prop": 0.5652642934196332,
"repo_name": "gatsbyjs/gatsby",
"id": "2e76d3374c06bc86653a82545a1ecb4856132ee5",
"size": "927",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "packages/gatsby-plugin-fullstory/src/__tests__/gatsby-ssr.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "93774"
},
{
"name": "Dockerfile",
"bytes": "2751"
},
{
"name": "EJS",
"bytes": "461"
},
{
"name": "HTML",
"bytes": "62227"
},
{
"name": "JavaScript",
"bytes": "5243904"
},
{
"name": "Less",
"bytes": "218"
},
{
"name": "PHP",
"bytes": "2010"
},
{
"name": "Python",
"bytes": "281"
},
{
"name": "SCSS",
"bytes": "218"
},
{
"name": "Shell",
"bytes": "10621"
},
{
"name": "Stylus",
"bytes": "206"
},
{
"name": "TypeScript",
"bytes": "3099577"
}
],
"symlink_target": ""
} |
ACCEPTED
#### According to
Euro+Med Plantbase
#### Published in
null
#### Original name
Hieracium incisum subsp. ciliatifolium Zahn
### Remarks
null | {
"content_hash": "bd6ab6381ee5799c86993ac9b66ab367",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 43,
"avg_line_length": 11.692307692307692,
"alnum_prop": 0.7302631578947368,
"repo_name": "mdoering/backbone",
"id": "4afa5b0cbb3c7adca1cbd8d221bd9c74f28c765c",
"size": "237",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Magnoliophyta/Magnoliopsida/Asterales/Asteraceae/Hieracium/Hieracium pallescens/Hieracium pallescens ciliatifolium/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
layout: organization
category: national
title: Iwate Yuicco
impact_area: Disaster Relief
keywords:
location_services:
location_offices:
website: http://eng.yuicco.com/
description:
mission: |
To support recovery and relief efforts in the wake of the March 2011 earthquake that took place in Japan.
cash_grants:
grants:
service_opp:
services:
learn:
cont_relationship:
salutation:
first_name:
last_name:
title_contact_person:
city: Tokyo
state: US
address: |
Japan
Tokyo US
lat: 39.390897
lng: -99.066067
phone:
ext:
fax:
email:
preferred_contact:
contact_person_intro:
---
| {
"content_hash": "ee6d8b50927394bcad37317d6289e69d",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 107,
"avg_line_length": 14.829268292682928,
"alnum_prop": 0.7351973684210527,
"repo_name": "flipside-org/penny-harvest",
"id": "103ea5012909ff550fd3763bd0a115165f3ab384",
"size": "612",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "_posts/organizations/2015-01-12-O639.md",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "392900"
},
{
"name": "JavaScript",
"bytes": "30565"
}
],
"symlink_target": ""
} |
use futures_sink::Sink;
use pin_project_lite::pin_project;
use std::io;
use std::pin::Pin;
use std::task::{Context, Poll};
use tokio::io::AsyncWrite;
pin_project! {
/// Convert a [`Sink`] of byte chunks into an [`AsyncWrite`].
///
/// Whenever you write to this [`SinkWriter`], the supplied bytes are
/// forwarded to the inner [`Sink`]. When `shutdown` is called on this
/// [`SinkWriter`], the inner sink is closed.
///
/// This adapter takes a `Sink<&[u8]>` and provides an [`AsyncWrite`] impl
/// for it. Because of the lifetime, this trait is relatively rarely
/// implemented. The main ways to get a `Sink<&[u8]>` that you can use with
/// this type are:
///
/// * With the codec module by implementing the [`Encoder`]`<&[u8]>` trait.
/// * By wrapping a `Sink<Bytes>` in a [`CopyToBytes`].
/// * Manually implementing `Sink<&[u8]>` directly.
///
/// The opposite conversion of implementing `Sink<_>` for an [`AsyncWrite`]
/// is done using the [`codec`] module.
///
/// # Example
///
/// ```
/// use bytes::Bytes;
/// use futures_util::SinkExt;
/// use std::io::{Error, ErrorKind};
/// use tokio::io::AsyncWriteExt;
/// use tokio_util::io::{SinkWriter, CopyToBytes};
/// use tokio_util::sync::PollSender;
///
/// # #[tokio::main(flavor = "current_thread")]
/// # async fn main() -> Result<(), Error> {
/// // We use an mpsc channel as an example of a `Sink<Bytes>`.
/// let (tx, mut rx) = tokio::sync::mpsc::channel::<Bytes>(1);
/// let sink = PollSender::new(tx).sink_map_err(|_| Error::from(ErrorKind::BrokenPipe));
///
/// // Wrap it in `CopyToBytes` to get a `Sink<&[u8]>`.
/// let mut writer = SinkWriter::new(CopyToBytes::new(sink));
///
/// // Write data to our interface...
/// let data: [u8; 4] = [1, 2, 3, 4];
/// let _ = writer.write(&data).await?;
///
/// // ... and receive it.
/// assert_eq!(data.as_slice(), &*rx.recv().await.unwrap());
/// # Ok(())
/// # }
/// ```
///
/// [`AsyncWrite`]: tokio::io::AsyncWrite
/// [`CopyToBytes`]: crate::io::CopyToBytes
/// [`Encoder`]: crate::codec::Encoder
/// [`Sink`]: futures_sink::Sink
/// [`codec`]: tokio_util::codec
#[derive(Debug)]
pub struct SinkWriter<S> {
#[pin]
inner: S,
}
}
impl<S> SinkWriter<S> {
/// Creates a new [`SinkWriter`].
pub fn new(sink: S) -> Self {
Self { inner: sink }
}
/// Gets a reference to the underlying sink.
pub fn get_ref(&self) -> &S {
&self.inner
}
/// Gets a mutable reference to the underlying sink.
pub fn get_mut(&mut self) -> &mut S {
&mut self.inner
}
/// Consumes this [`SinkWriter`], returning the underlying sink.
pub fn into_inner(self) -> S {
self.inner
}
}
impl<S, E> AsyncWrite for SinkWriter<S>
where
for<'a> S: Sink<&'a [u8], Error = E>,
E: Into<io::Error>,
{
fn poll_write(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
buf: &[u8],
) -> Poll<Result<usize, io::Error>> {
let mut this = self.project();
match this.inner.as_mut().poll_ready(cx) {
Poll::Ready(Ok(())) => {
if let Err(e) = this.inner.as_mut().start_send(buf) {
Poll::Ready(Err(e.into()))
} else {
Poll::Ready(Ok(buf.len()))
}
}
Poll::Ready(Err(e)) => Poll::Ready(Err(e.into())),
Poll::Pending => {
cx.waker().wake_by_ref();
Poll::Pending
}
}
}
fn poll_flush(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Result<(), io::Error>> {
self.project().inner.poll_flush(cx).map_err(Into::into)
}
fn poll_shutdown(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Result<(), io::Error>> {
self.project().inner.poll_close(cx).map_err(Into::into)
}
}
| {
"content_hash": "9dab2801d1454cae513d360fd4b6a091",
"timestamp": "",
"source": "github",
"line_count": 124,
"max_line_length": 97,
"avg_line_length": 32.49193548387097,
"alnum_prop": 0.5266815586994291,
"repo_name": "tokio-rs/tokio",
"id": "5d1acc499cc7c6a3b2e92ebc0a94df668c8be222",
"size": "4029",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tokio-util/src/io/sink_writer.rs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Rust",
"bytes": "3760867"
},
{
"name": "Shell",
"bytes": "4631"
}
],
"symlink_target": ""
} |
ACCEPTED
#### According to
International Plant Names Index
#### Published in
null
#### Original name
null
### Remarks
null | {
"content_hash": "3cccdbaaecc9564a64814f567bdb8f7d",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 31,
"avg_line_length": 9.692307692307692,
"alnum_prop": 0.7063492063492064,
"repo_name": "mdoering/backbone",
"id": "31abbb1b35d143fe57a826cbc44a8fbe1a964933",
"size": "178",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Magnoliophyta/Magnoliopsida/Lamiales/Oleaceae/Chionanthus/Linociera rupicola/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
module Excon
class PrettyPrinter
def self.pp(io, datum, indent=0)
datum = datum.dup
# reduce duplication/noise of output
unless datum.is_a?(Excon::Headers)
datum.delete(:connection)
datum.delete(:stack)
if datum.has_key?(:headers) && datum[:headers].has_key?('Authorization')
datum[:headers] = datum[:headers].dup
datum[:headers]['Authorization'] = REDACTED
end
if datum.has_key?(:password)
datum[:password] = REDACTED
end
end
indent += 2
max_key_length = datum.keys.map {|key| key.inspect.length}.max
datum.keys.sort_by {|key| key.to_s}.each do |key|
value = datum[key]
io.write("#{' ' * indent}#{key.inspect.ljust(max_key_length)} => ")
case value
when Array
io.puts("[")
value.each do |v|
io.puts("#{' ' * indent} #{v.inspect}")
end
io.write("#{' ' * indent}]")
when Hash
io.puts("{")
self.pp(io, value, indent)
io.write("#{' ' * indent}}")
else
io.write("#{value.inspect}")
end
io.puts
end
indent -= 2
end
end
end
| {
"content_hash": "5a0a7dfd6c2bd96d2e1af6481694d27d",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 80,
"avg_line_length": 27.177777777777777,
"alnum_prop": 0.5134914145543745,
"repo_name": "kamillamagna/NMF_Tool",
"id": "0ba2e2b1ec1f1c06179da28aa81f1cd3ac7af0e7",
"size": "1253",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "vendor/bundle/gems/excon-0.55.0/lib/excon/pretty_printer.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "103"
},
{
"name": "CSS",
"bytes": "289648"
},
{
"name": "HTML",
"bytes": "1819532"
},
{
"name": "JavaScript",
"bytes": "3425661"
},
{
"name": "Makefile",
"bytes": "2846"
},
{
"name": "Ruby",
"bytes": "273715"
},
{
"name": "Shell",
"bytes": "2015"
}
],
"symlink_target": ""
} |
package main
import (
"encoding/json"
"errors"
"flag"
"fmt"
"os/exec"
"strings"
"time"
"github.com/golang/glog"
"github.com/kubernetes-incubator/external-storage/lib/controller"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/types"
"k8s.io/apimachinery/pkg/util/uuid"
"k8s.io/apimachinery/pkg/util/wait"
"k8s.io/client-go/kubernetes"
"k8s.io/client-go/pkg/api/v1"
"k8s.io/client-go/rest"
"k8s.io/client-go/tools/clientcmd"
)
const (
resyncPeriod = 15 * time.Second
provisionerName = "ceph/cephfs"
exponentialBackOffOnError = false
failedRetryThreshold = 5
provisionCmd = "/usr/local/bin/cephfs_provisioner"
provisionerIDAnn = "cephFSProvisionerIdentity"
cephShareAnn = "cephShare"
)
type provisionOutput struct {
Path string `json:"path"`
User string `json:"user"`
Secret string `json:"auth"`
}
type cephFSProvisioner struct {
// Kubernetes Client. Use to retrieve Ceph admin secret
client kubernetes.Interface
// Identity of this cephFSProvisioner, generated. Used to identify "this"
// provisioner's PVs.
identity types.UID
}
func newCephFSProvisioner(client kubernetes.Interface) controller.Provisioner {
return &cephFSProvisioner{
client: client,
identity: uuid.NewUUID(),
}
}
var _ controller.Provisioner = &cephFSProvisioner{}
// Provision creates a storage asset and returns a PV object representing it.
func (p *cephFSProvisioner) Provision(options controller.VolumeOptions) (*v1.PersistentVolume, error) {
if options.PVC.Spec.Selector != nil {
return nil, fmt.Errorf("claim Selector is not supported")
}
cluster, adminID, adminSecret, mon, err := p.parseParameters(options.Parameters)
if err != nil {
return nil, err
}
// create random share name
share := fmt.Sprintf("kubernetes-dynamic-pvc-%s", uuid.NewUUID())
// create random user id
user := fmt.Sprintf("kubernetes-dynamic-user-%s", uuid.NewUUID())
// provision share
// create cmd
cmd := exec.Command(provisionCmd, "-n", share, "-u", user)
// set env
cmd.Env = []string{
"CEPH_CLUSTER_NAME=" + cluster,
"CEPH_MON=" + strings.Join(mon[:], ","),
"CEPH_AUTH_ID=" + adminID,
"CEPH_AUTH_KEY=" + adminSecret}
output, cmdErr := cmd.CombinedOutput()
if cmdErr != nil {
glog.Errorf("failed to provision share %q for %q, err: %v, output: %v", share, user, cmdErr, string(output))
return nil, cmdErr
}
// validate output
res := &provisionOutput{}
json.Unmarshal([]byte(output), &res)
if res.User == "" || res.Secret == "" || res.Path == "" {
return nil, fmt.Errorf("invalid provisioner output")
}
// create secret in PVC's namespace
nameSpace := options.PVC.Namespace
secretName := "ceph-" + user + "-secret"
secret := &v1.Secret{
ObjectMeta: metav1.ObjectMeta{
Namespace: nameSpace,
Name: secretName,
},
Data: map[string][]byte{
"key": []byte(res.Secret),
},
Type: "Opaque",
}
_, err = p.client.Core().Secrets(nameSpace).Create(secret)
if err != nil {
return nil, fmt.Errorf("failed to create secret")
}
if err != nil {
glog.Errorf("Cephfs Provisioner: create volume failed, err: %v", err)
return nil, err
}
pv := &v1.PersistentVolume{
ObjectMeta: metav1.ObjectMeta{
Name: options.PVName,
Annotations: map[string]string{
provisionerIDAnn: string(p.identity),
cephShareAnn: share,
},
},
Spec: v1.PersistentVolumeSpec{
PersistentVolumeReclaimPolicy: options.PersistentVolumeReclaimPolicy,
AccessModes: []v1.PersistentVolumeAccessMode{
v1.ReadWriteOnce,
v1.ReadOnlyMany,
v1.ReadWriteMany,
},
Capacity: v1.ResourceList{ //FIXME: kernel cephfs doesn't enforce quota, capacity is not meaningless here.
v1.ResourceName(v1.ResourceStorage): options.PVC.Spec.Resources.Requests[v1.ResourceName(v1.ResourceStorage)],
},
PersistentVolumeSource: v1.PersistentVolumeSource{
CephFS: &v1.CephFSVolumeSource{
Monitors: mon,
Path: res.Path[strings.Index(res.Path, "/"):],
SecretRef: &v1.LocalObjectReference{
Name: secretName,
},
User: user,
},
},
},
}
glog.Infof("successfully created CephFS share %+v", pv.Spec.PersistentVolumeSource.CephFS)
return pv, nil
}
// Delete removes the storage asset that was created by Provision represented
// by the given PV.
func (p *cephFSProvisioner) Delete(volume *v1.PersistentVolume) error {
ann, ok := volume.Annotations[provisionerIDAnn]
if !ok {
return errors.New("identity annotation not found on PV")
}
if ann != string(p.identity) {
return &controller.IgnoredError{"identity annotation on PV does not match ours"}
}
share, ok := volume.Annotations[cephShareAnn]
if !ok {
return errors.New("ceph share annotation not found on PV")
}
// delete CephFS
class, err := p.client.Storage().StorageClasses().Get(v1.GetPersistentVolumeClass(volume), metav1.GetOptions{})
if err != nil {
return err
}
cluster, adminID, adminSecret, mon, err := p.parseParameters(class.Parameters)
if err != nil {
return err
}
user := volume.Spec.PersistentVolumeSource.CephFS.User
// create cmd
cmd := exec.Command(provisionCmd, "-r", "-n", share, "-u", user)
// set env
cmd.Env = []string{
"CEPH_CLUSTER_NAME=" + cluster,
"CEPH_MON=" + strings.Join(mon[:], ","),
"CEPH_AUTH_ID=" + adminID,
"CEPH_AUTH_KEY=" + adminSecret}
output, cmdErr := cmd.CombinedOutput()
if cmdErr != nil {
glog.Errorf("failed to delete share %q for %q, err: %v, output: %v", share, user, cmdErr, string(output))
return cmdErr
}
return nil
}
func (p *cephFSProvisioner) parseParameters(parameters map[string]string) (string, string, string, []string, error) {
var (
err error
mon []string
cluster, adminID, adminSecretName, adminSecretNamespace, adminSecret string
)
adminSecretNamespace = "default"
adminID = "admin"
cluster = "ceph"
for k, v := range parameters {
switch strings.ToLower(k) {
case "cluster":
cluster = v
case "monitors":
arr := strings.Split(v, ",")
for _, m := range arr {
mon = append(mon, m)
}
case "adminid":
adminID = v
case "adminsecretname":
adminSecretName = v
case "adminsecretnamespace":
adminSecretNamespace = v
default:
return "", "", "", nil, fmt.Errorf("invalid option %q", k)
}
}
// sanity check
if adminSecretName == "" {
return "", "", "", nil, fmt.Errorf("missing Ceph admin secret name")
}
if adminSecret, err = p.parsePVSecret(adminSecretNamespace, adminSecretName); err != nil {
return "", "", "", nil, fmt.Errorf("failed to get admin secret from [%q/%q]: %v", adminSecretNamespace, adminSecretName, err)
}
if len(mon) < 1 {
return "", "", "", nil, fmt.Errorf("missing Ceph monitors")
}
return cluster, adminID, adminSecret, mon, nil
}
func (p *cephFSProvisioner) parsePVSecret(namespace, secretName string) (string, error) {
if p.client == nil {
return "", fmt.Errorf("Cannot get kube client")
}
secrets, err := p.client.Core().Secrets(namespace).Get(secretName, metav1.GetOptions{})
if err != nil {
return "", err
}
for _, data := range secrets.Data {
return string(data), nil
}
// If not found, the last secret in the map wins as done before
return "", fmt.Errorf("no secret found")
}
var (
master = flag.String("master", "", "Master URL")
kubeconfig = flag.String("kubeconfig", "", "Absolute path to the kubeconfig")
)
func main() {
flag.Parse()
flag.Set("logtostderr", "true")
var config *rest.Config
var err error
if *master != "" || *kubeconfig != "" {
config, err = clientcmd.BuildConfigFromFlags(*master, *kubeconfig)
} else {
config, err = rest.InClusterConfig()
}
if err != nil {
glog.Fatalf("Failed to create config: %v", err)
}
clientset, err := kubernetes.NewForConfig(config)
if err != nil {
glog.Fatalf("Failed to create client: %v", err)
}
// The controller needs to know what the server version is because out-of-tree
// provisioners aren't officially supported until 1.5
serverVersion, err := clientset.Discovery().ServerVersion()
if err != nil {
glog.Fatalf("Error getting server version: %v", err)
}
// Create the provisioner: it implements the Provisioner interface expected by
// the controller
cephFSProvisioner := newCephFSProvisioner(clientset)
// Start the provision controller which will dynamically provision cephFS
// PVs
pc := controller.NewProvisionController(clientset, resyncPeriod, provisionerName, cephFSProvisioner, serverVersion.GitVersion, exponentialBackOffOnError, failedRetryThreshold, 2*resyncPeriod, resyncPeriod, resyncPeriod/2, 2*resyncPeriod)
pc.Run(wait.NeverStop)
}
| {
"content_hash": "55f10d6a7847aa333be2cd2b57f469c9",
"timestamp": "",
"source": "github",
"line_count": 295,
"max_line_length": 238,
"avg_line_length": 29.63050847457627,
"alnum_prop": 0.6796705182473401,
"repo_name": "clarkhale/external-storage",
"id": "8f02a4d9a1f341381c5f78f91e8803fe81e4cba3",
"size": "9310",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "iscsi/targetd/vendor/github.com/kubernetes-incubator/external-storage/ceph/cephfs/cephfs-provisioner.go",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Go",
"bytes": "251087"
},
{
"name": "Makefile",
"bytes": "7191"
},
{
"name": "Python",
"bytes": "25945"
},
{
"name": "Shell",
"bytes": "6851"
}
],
"symlink_target": ""
} |
@interface AppDelegate : UIResponder <UIApplicationDelegate>
@property (strong, nonatomic) UIWindow *window;
@end
| {
"content_hash": "22742ca724ada7df1d301f0b5091922e",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 60,
"avg_line_length": 16.857142857142858,
"alnum_prop": 0.7796610169491526,
"repo_name": "twototwoto/WW_Normal",
"id": "a33bff8a9e3fe8d02bce565a5ba83a89e9b9b271",
"size": "293",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "函数式编程/函数式编程/AppDelegate.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Objective-C",
"bytes": "124762"
},
{
"name": "Ruby",
"bytes": "206"
},
{
"name": "Shell",
"bytes": "8511"
}
],
"symlink_target": ""
} |
package groovy.lang;
import org.codehaus.groovy.GroovyBugError;
import org.codehaus.groovy.ast.ClassNode;
import org.codehaus.groovy.classgen.asm.BytecodeHelper;
import org.codehaus.groovy.control.CompilationUnit;
import org.codehaus.groovy.control.Phases;
import org.codehaus.groovy.reflection.CachedClass;
import org.codehaus.groovy.reflection.CachedConstructor;
import org.codehaus.groovy.reflection.CachedField;
import org.codehaus.groovy.reflection.CachedMethod;
import org.codehaus.groovy.reflection.ClassInfo;
import org.codehaus.groovy.reflection.GeneratedMetaMethod;
import org.codehaus.groovy.reflection.ParameterTypes;
import org.codehaus.groovy.reflection.ReflectionCache;
import org.codehaus.groovy.runtime.ConvertedClosure;
import org.codehaus.groovy.runtime.CurriedClosure;
import org.codehaus.groovy.runtime.DefaultGroovyMethods;
import org.codehaus.groovy.runtime.ExceptionUtils;
import org.codehaus.groovy.runtime.GeneratedClosure;
import org.codehaus.groovy.runtime.GroovyCategorySupport;
import org.codehaus.groovy.runtime.InvokerHelper;
import org.codehaus.groovy.runtime.InvokerInvocationException;
import org.codehaus.groovy.runtime.MetaClassHelper;
import org.codehaus.groovy.runtime.MethodClosure;
import org.codehaus.groovy.runtime.callsite.AbstractCallSite;
import org.codehaus.groovy.runtime.callsite.CallSite;
import org.codehaus.groovy.runtime.callsite.ConstructorSite;
import org.codehaus.groovy.runtime.callsite.MetaClassConstructorSite;
import org.codehaus.groovy.runtime.callsite.PogoMetaClassSite;
import org.codehaus.groovy.runtime.callsite.PogoMetaMethodSite;
import org.codehaus.groovy.runtime.callsite.PojoMetaClassSite;
import org.codehaus.groovy.runtime.callsite.PojoMetaMethodSite;
import org.codehaus.groovy.runtime.callsite.StaticMetaClassSite;
import org.codehaus.groovy.runtime.callsite.StaticMetaMethodSite;
import org.codehaus.groovy.runtime.metaclass.ClosureMetaMethod;
import org.codehaus.groovy.runtime.metaclass.MethodMetaProperty.GetBeanMethodMetaProperty;
import org.codehaus.groovy.runtime.metaclass.MethodMetaProperty.GetMethodMetaProperty;
import org.codehaus.groovy.runtime.metaclass.MetaClassRegistryImpl;
import org.codehaus.groovy.runtime.metaclass.MetaMethodIndex;
import org.codehaus.groovy.runtime.metaclass.MethodSelectionException;
import org.codehaus.groovy.runtime.metaclass.MissingMethodExceptionNoStack;
import org.codehaus.groovy.runtime.metaclass.MissingMethodExecutionFailed;
import org.codehaus.groovy.runtime.metaclass.MissingPropertyExceptionNoStack;
import org.codehaus.groovy.runtime.metaclass.MixinInstanceMetaMethod;
import org.codehaus.groovy.runtime.metaclass.MultipleSetterProperty;
import org.codehaus.groovy.runtime.metaclass.NewInstanceMetaMethod;
import org.codehaus.groovy.runtime.metaclass.NewMetaMethod;
import org.codehaus.groovy.runtime.metaclass.NewStaticMetaMethod;
import org.codehaus.groovy.runtime.metaclass.TransformMetaMethod;
import org.codehaus.groovy.runtime.typehandling.DefaultTypeTransformation;
import org.codehaus.groovy.runtime.typehandling.NumberMathModificationInfo;
import org.codehaus.groovy.runtime.wrappers.Wrapper;
import org.codehaus.groovy.util.ComplexKeyHashMap;
import org.codehaus.groovy.util.FastArray;
import org.codehaus.groovy.util.SingleKeyHashMap;
import org.codehaus.groovy.reflection.android.AndroidSupport;
import org.objectweb.asm.ClassVisitor;
import java.beans.BeanInfo;
import java.beans.EventSetDescriptor;
import java.beans.IntrospectionException;
import java.beans.Introspector;
import java.beans.PropertyDescriptor;
import java.lang.reflect.Constructor;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.lang.reflect.Proxy;
import java.net.URL;
import java.security.AccessController;
import java.security.PrivilegedActionException;
import java.security.PrivilegedExceptionAction;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
/**
* Allows methods to be dynamically added to existing classes at runtime
*
* @author <a href="mailto:[email protected]">James Strachan</a>
* @author Guillaume Laforge
* @author <a href="mailto:[email protected]">Jochen "blackdrag" Theodorou</a>
* @author Graeme Rocher
* @author Alex Tkachman
* @author Roshan Dawrani
* @see groovy.lang.MetaClass
*/
public class MetaClassImpl implements MetaClass, MutableMetaClass {
public static final Object[] EMPTY_ARGUMENTS = {};
protected static final String STATIC_METHOD_MISSING = "$static_methodMissing";
protected static final String STATIC_PROPERTY_MISSING = "$static_propertyMissing";
protected static final String METHOD_MISSING = "methodMissing";
protected static final String PROPERTY_MISSING = "propertyMissing";
protected static final String INVOKE_METHOD_METHOD = "invokeMethod";
private static final String CLOSURE_CALL_METHOD = "call";
private static final String CLOSURE_DO_CALL_METHOD = "doCall";
private static final String GET_PROPERTY_METHOD = "getProperty";
private static final String SET_PROPERTY_METHOD = "setProperty";
private static final Class[] METHOD_MISSING_ARGS = new Class[]{String.class, Object.class};
private static final Class[] GETTER_MISSING_ARGS = new Class[]{String.class};
private static final Class[] SETTER_MISSING_ARGS = METHOD_MISSING_ARGS;
private static final Comparator<CachedClass> CACHED_CLASS_NAME_COMPARATOR = new Comparator<CachedClass>() {
public int compare(final CachedClass o1, final CachedClass o2) {
return o1.getName().compareTo(o2.getName());
}
};
private static final MetaMethod[] EMPTY = new MetaMethod[0];
private static final MetaMethod AMBIGUOUS_LISTENER_METHOD = new DummyMetaMethod();
protected final Class theClass;
protected final CachedClass theCachedClass;
protected final boolean isGroovyObject;
protected final boolean isMap;
protected final MetaMethodIndex metaMethodIndex;
private final Index classPropertyIndex = new MethodIndex();
private final SingleKeyHashMap staticPropertyIndex = new SingleKeyHashMap();
private final Map<String, MetaMethod> listeners = new HashMap<String, MetaMethod>();
private final List<MetaMethod> allMethods = new ArrayList<MetaMethod>();
// we only need one of these that can be reused over and over.
private final MetaProperty arrayLengthProperty = new MetaArrayLengthProperty();
private final Index classPropertyIndexForSuper = new MethodIndex();
private final Set<MetaMethod> newGroovyMethodsSet = new HashSet<MetaMethod>();
private final MetaMethod [] myNewMetaMethods;
private final MetaMethod [] additionalMetaMethods;
protected MetaMethod getPropertyMethod;
protected MetaMethod invokeMethodMethod;
protected MetaMethod setPropertyMethod;
protected MetaClassRegistry registry;
private ClassNode classNode;
private FastArray constructors;
private boolean initialized;
private MetaMethod genericGetMethod;
private MetaMethod genericSetMethod;
private MetaMethod propertyMissingGet;
private MetaMethod propertyMissingSet;
private MetaMethod methodMissing;
private MetaMethodIndex.Header mainClassMethodHeader;
/**
* Constructor
*
* @param theClass The class this is the metaclass dor
* @param add The methods for this class
*/
public MetaClassImpl(final Class theClass, MetaMethod [] add) {
this.theClass = theClass;
theCachedClass = ReflectionCache.getCachedClass(theClass);
this.isGroovyObject = GroovyObject.class.isAssignableFrom(theClass);
this.isMap = Map.class.isAssignableFrom(theClass);
this.registry = GroovySystem.getMetaClassRegistry();
metaMethodIndex = new MetaMethodIndex(theCachedClass);
final MetaMethod[] metaMethods = theCachedClass.getNewMetaMethods();
if (add != null && !(add.length == 0)) {
List<MetaMethod> arr = new ArrayList<MetaMethod>();
arr.addAll(Arrays.asList(metaMethods));
arr.addAll(Arrays.asList(add));
myNewMetaMethods = arr.toArray(new MetaMethod[arr.size()]);
additionalMetaMethods = metaMethods;
}
else {
myNewMetaMethods = metaMethods;
additionalMetaMethods = EMPTY;
}
}
/**
* Constructor that sets the methods to null
*
* @param theClass The class this is the metaclass dor
*/
public MetaClassImpl(final Class theClass) {
this(theClass, null);
}
/**
* Constructor with registry
*
* @param registry The metaclass registry for this MetaClass
* @param theClass The class
* @param add The methods
*/
public MetaClassImpl(MetaClassRegistry registry, final Class theClass, MetaMethod add []) {
this(theClass, add);
this.registry = registry;
this.constructors = new FastArray(theCachedClass.getConstructors());
}
/**
* Constructor with registry setting methods to null
*
* @param registry The metaclass registry for this MetaClass
* @param theClass The class
*/
public MetaClassImpl(MetaClassRegistry registry, final Class theClass) {
this(registry, theClass, null);
}
/**
* Returns the cached class for this metaclass
*
* @return The cached class.
*/
public final CachedClass getTheCachedClass() {
return theCachedClass;
}
/**
* Returns the registry for this metaclass
*
* @return The resgistry
*/
public MetaClassRegistry getRegistry() {
return registry;
}
/**
* @see MetaObjectProtocol#respondsTo(Object, String, Object[])
*/
public List respondsTo(Object obj, String name, Object[] argTypes) {
Class[] classes = MetaClassHelper.castArgumentsToClassArray(argTypes);
MetaMethod m = getMetaMethod(name, classes);
if (m!=null) {
return Collections.singletonList(m);
}
return Collections.emptyList();
}
/**
* @see MetaObjectProtocol#respondsTo(Object, String)
*/
public List respondsTo(final Object obj, final String name) {
final Object o = getMethods(getTheClass(), name, false);
if (o instanceof FastArray)
return ((FastArray) o).toList();
else
return Collections.singletonList(o);
}
/**
* @see MetaObjectProtocol#hasProperty(Object,String)
*/
public MetaProperty hasProperty(Object obj, String name) {
return getMetaProperty(name);
}
/**
* @see MetaObjectProtocol#getMetaProperty(String)
*/
public MetaProperty getMetaProperty(String name) {
SingleKeyHashMap propertyMap = classPropertyIndex.getNotNull(theCachedClass);
if (propertyMap.containsKey(name)) {
return (MetaProperty) propertyMap.get(name);
} else if (staticPropertyIndex.containsKey(name)) {
return (MetaProperty) staticPropertyIndex.get(name);
} else {
propertyMap = classPropertyIndexForSuper.getNotNull(theCachedClass);
if (propertyMap.containsKey(name))
return (MetaProperty) propertyMap.get(name);
else {
CachedClass superClass = theCachedClass;
while (superClass != null && superClass != ReflectionCache.OBJECT_CLASS) {
final MetaBeanProperty property = findPropertyInClassHierarchy(name, superClass);
if (property != null) {
onSuperPropertyFoundInHierarchy(property);
return property;
}
superClass = superClass.getCachedSuperClass();
}
return null;
}
}
}
/**
* @see MetaObjectProtocol#getStaticMetaMethod(String, Object[])
*/
public MetaMethod getStaticMetaMethod(String name, Object[] argTypes) {
Class[] classes = MetaClassHelper.castArgumentsToClassArray(argTypes);
return pickStaticMethod(name, classes);
}
/**
* @see MetaObjectProtocol#getMetaMethod(String, Object[])
*/
public MetaMethod getMetaMethod(String name, Object[] argTypes) {
Class[] classes = MetaClassHelper.castArgumentsToClassArray(argTypes);
return pickMethod(name, classes);
}
/**
*Returns the class this object this is the metaclass of.
*
* @return The class contained by this metaclass
*/
public Class getTheClass() {
return this.theClass;
}
/**
* Return wether the class represented by this metaclass instance is an instance of the GroovyObject class
*
* @return true if this is a groovy class, false otherwise.
*/
public boolean isGroovyObject() {
return isGroovyObject;
}
/**
* Fills the method index
*/
private void fillMethodIndex() {
mainClassMethodHeader = metaMethodIndex.getHeader(theClass);
LinkedList<CachedClass> superClasses = getSuperClasses();
CachedClass firstGroovySuper = calcFirstGroovySuperClass(superClasses);
Set<CachedClass> interfaces = theCachedClass.getInterfaces();
addInterfaceMethods(interfaces);
populateMethods(superClasses, firstGroovySuper);
inheritInterfaceNewMetaMethods(interfaces);
if (isGroovyObject) {
metaMethodIndex.copyMethodsToSuper();
connectMultimethods(superClasses, firstGroovySuper);
removeMultimethodsOverloadedWithPrivateMethods();
replaceWithMOPCalls(theCachedClass.mopMethods);
}
}
private void populateMethods(LinkedList<CachedClass> superClasses, CachedClass firstGroovySuper) {
MetaMethodIndex.Header header = metaMethodIndex.getHeader(firstGroovySuper.getTheClass());
CachedClass c;
Iterator<CachedClass> iter = superClasses.iterator();
for (; iter.hasNext();) {
c = iter.next();
CachedMethod[] cachedMethods = c.getMethods();
for (CachedMethod metaMethod : cachedMethods) {
addToAllMethodsIfPublic(metaMethod);
if (!metaMethod.isPrivate() || c == firstGroovySuper)
addMetaMethodToIndex(metaMethod, header);
}
MetaMethod[] cachedMethods1 = getNewMetaMethods(c);
for (final MetaMethod method : cachedMethods1) {
if (!newGroovyMethodsSet.contains(method)) {
newGroovyMethodsSet.add(method);
addMetaMethodToIndex(method, header);
}
}
if (c == firstGroovySuper)
break;
}
MetaMethodIndex.Header last = header;
for (;iter.hasNext();) {
c = iter.next();
header = metaMethodIndex.getHeader(c.getTheClass());
if (last != null) {
metaMethodIndex.copyNonPrivateMethods(last, header);
}
last = header;
for (CachedMethod metaMethod : c.getMethods()) {
addToAllMethodsIfPublic(metaMethod);
addMetaMethodToIndex(metaMethod, header);
}
for (final MetaMethod method : getNewMetaMethods(c)) {
if (method.getName().equals("<init>") && !method.getDeclaringClass().equals(theCachedClass)) continue;
if (!newGroovyMethodsSet.contains(method)) {
newGroovyMethodsSet.add(method);
addMetaMethodToIndex(method, header);
}
}
}
}
private MetaMethod[] getNewMetaMethods(CachedClass c) {
if (theCachedClass != c)
return c.getNewMetaMethods();
return myNewMetaMethods;
}
private void addInterfaceMethods(Set<CachedClass> interfaces) {
MetaMethodIndex.Header header = metaMethodIndex.getHeader(theClass);
for (CachedClass c : interfaces) {
final CachedMethod[] m = c.getMethods();
for (int i = 0; i != m.length; ++i) {
MetaMethod method = m[i];
addMetaMethodToIndex(method, header);
}
}
}
protected LinkedList<CachedClass> getSuperClasses() {
LinkedList<CachedClass> superClasses = new LinkedList<CachedClass>();
if (theClass.isInterface()) {
superClasses.addFirst(ReflectionCache.OBJECT_CLASS);
} else {
for (CachedClass c = theCachedClass; c != null; c = c.getCachedSuperClass()) {
superClasses.addFirst(c);
}
if (theCachedClass.isArray && theClass != Object[].class && !theClass.getComponentType().isPrimitive()) {
superClasses.addFirst(ReflectionCache.OBJECT_ARRAY_CLASS);
}
}
return superClasses;
}
private void removeMultimethodsOverloadedWithPrivateMethods() {
MethodIndexAction mia = new MethodIndexAction() {
public boolean skipClass(Class clazz) {
return clazz == theClass;
}
public void methodNameAction(Class clazz, MetaMethodIndex.Entry e) {
if (e.methods == null)
return;
boolean hasPrivate = false;
if (e.methods instanceof FastArray) {
FastArray methods = (FastArray) e.methods;
final int len = methods.size();
final Object[] data = methods.getArray();
for (int i = 0; i != len; ++i) {
MetaMethod method = (MetaMethod) data[i];
if (method.isPrivate() && clazz == method.getDeclaringClass().getTheClass()) {
hasPrivate = true;
break;
}
}
}
else {
MetaMethod method = (MetaMethod) e.methods;
if (method.isPrivate() && clazz == method.getDeclaringClass().getTheClass()) {
hasPrivate = true;
}
}
if (!hasPrivate) return;
// We have private methods for that name, so remove the
// multimethods. That is the same as in our index for
// super, so just copy the list from there. It is not
// possible to use a pointer here, because the methods
// in the index for super are replaced later by MOP
// methods like super$5$foo
final Object o = e.methodsForSuper;
if (o instanceof FastArray)
e.methods = ((FastArray) o).copy();
else
e.methods = o;
}
};
mia.iterate();
}
private void replaceWithMOPCalls(final CachedMethod[] mopMethods) {
// no MOP methods if not a child of GroovyObject
if (!isGroovyObject) return;
class MOPIter extends MethodIndexAction {
boolean useThis;
public boolean skipClass(CachedClass clazz) {
return !useThis && clazz == theCachedClass;
}
public void methodNameAction(Class clazz, MetaMethodIndex.Entry e) {
if (useThis) {
if (e.methods == null)
return;
if (e.methods instanceof FastArray) {
FastArray methods = (FastArray) e.methods;
processFastArray(methods);
}
else {
MetaMethod method = (MetaMethod) e.methods;
if (method instanceof NewMetaMethod)
return;
if (useThis ^ Modifier.isPrivate(method.getModifiers())) return;
String mopName = method.getMopName();
int index = Arrays.binarySearch(mopMethods, mopName, CachedClass.CachedMethodComparatorWithString.INSTANCE);
if (index >= 0) {
int from = index;
while (from > 0 && mopMethods[from-1].getName().equals(mopName))
from--;
int to = index;
while (to < mopMethods.length-1 && mopMethods[to+1].getName().equals(mopName))
to++;
int matchingMethod = findMatchingMethod(mopMethods, from, to, method);
if (matchingMethod != -1) {
e.methods = mopMethods[matchingMethod];
}
}
}
}
else {
if (e.methodsForSuper == null)
return;
if (e.methodsForSuper instanceof FastArray) {
FastArray methods = (FastArray) e.methodsForSuper;
processFastArray(methods);
}
else {
MetaMethod method = (MetaMethod) e.methodsForSuper;
if (method instanceof NewMetaMethod)
return;
if (useThis ^ Modifier.isPrivate(method.getModifiers())) return;
String mopName = method.getMopName();
// GROOVY-4922: Due to a numbering scheme change, we must find the super$X$method which exists
// with the highest number. If we don't, no method may be found, leading to a stack overflow
String[] decomposedMopName = decomposeMopName(mopName);
int distance = Integer.parseInt(decomposedMopName[1]);
while (distance>0) {
String fixedMopName = decomposedMopName[0] + distance + decomposedMopName[2];
int index = Arrays.binarySearch(mopMethods, fixedMopName, CachedClass.CachedMethodComparatorWithString.INSTANCE);
if (index >= 0) {
int from = index;
while (from > 0 && mopMethods[from-1].getName().equals(fixedMopName))
from--;
int to = index;
while (to < mopMethods.length-1 && mopMethods[to+1].getName().equals(fixedMopName))
to++;
int matchingMethod = findMatchingMethod(mopMethods, from, to, method);
if (matchingMethod != -1) {
e.methodsForSuper = mopMethods[matchingMethod];
distance = 0;
}
}
distance--;
}
}
}
}
private String[] decomposeMopName(final String mopName) {
int idx = mopName.indexOf("$");
if (idx>0) {
int eidx = mopName.indexOf("$", idx+1);
if (eidx>0) {
return new String[] {
mopName.substring(0, idx+1),
mopName.substring(idx+1, eidx),
mopName.substring(eidx)
};
}
}
return new String[]{"","0",mopName};
}
private void processFastArray(FastArray methods) {
final int len = methods.size();
final Object[] data = methods.getArray();
for (int i = 0; i != len; ++i) {
MetaMethod method = (MetaMethod) data[i];
if (method instanceof NewMetaMethod) continue;
boolean isPrivate = Modifier.isPrivate(method.getModifiers());
if (useThis ^ isPrivate) continue;
String mopName = method.getMopName();
int index = Arrays.binarySearch(mopMethods, mopName, CachedClass.CachedMethodComparatorWithString.INSTANCE);
if (index >= 0) {
int from = index;
while (from > 0 && mopMethods[from-1].getName().equals(mopName))
from--;
int to = index;
while (to < mopMethods.length-1 && mopMethods[to+1].getName().equals(mopName))
to++;
int matchingMethod = findMatchingMethod(mopMethods, from, to, method);
if (matchingMethod != -1) {
methods.set(i, mopMethods[matchingMethod]);
}
}
}
}
}
MOPIter iter = new MOPIter();
// replace all calls for super with the correct MOP method
iter.useThis = false;
iter.iterate();
// replace all calls for this with the correct MOP method
iter.useThis = true;
iter.iterate();
}
private void inheritInterfaceNewMetaMethods(Set<CachedClass> interfaces) {
// add methods declared by DGM for interfaces
for (CachedClass cls : interfaces) {
MetaMethod methods[] = getNewMetaMethods(cls);
for (MetaMethod method : methods) {
if (!newGroovyMethodsSet.contains(method)) {
newGroovyMethodsSet.add(method);
}
addMetaMethodToIndex(method, mainClassMethodHeader);
}
}
}
private void connectMultimethods(List<CachedClass> superClasses, CachedClass firstGroovyClass) {
superClasses = DefaultGroovyMethods.reverse(superClasses);
MetaMethodIndex.Header last = null;
for (final CachedClass c : superClasses) {
MetaMethodIndex.Header methodIndex = metaMethodIndex.getHeader(c.getTheClass());
// We don't copy DGM methods to superclasses' indexes
// The reason we can do that is particular set of DGM methods in use,
// if at some point we will define DGM method for some Groovy class or
// for a class derived from such, we will need to revise this condition.
// It saves us a lot of space and some noticeable time
if (last != null) metaMethodIndex.copyNonPrivateNonNewMetaMethods(last, methodIndex);
last = methodIndex;
if (c == firstGroovyClass)
break;
}
}
private CachedClass calcFirstGroovySuperClass(Collection superClasses) {
if (theCachedClass.isInterface)
return ReflectionCache.OBJECT_CLASS;
CachedClass firstGroovy = null;
Iterator iter = superClasses.iterator();
for (; iter.hasNext();) {
CachedClass c = (CachedClass) iter.next();
if (GroovyObject.class.isAssignableFrom(c.getTheClass())) {
firstGroovy = c;
break;
}
}
if (firstGroovy == null)
firstGroovy = theCachedClass;
else {
if (firstGroovy.getTheClass() == GroovyObjectSupport.class && iter.hasNext()) {
firstGroovy = (CachedClass) iter.next();
if (firstGroovy.getTheClass() == Closure.class && iter.hasNext()) {
firstGroovy = (CachedClass) iter.next();
}
}
}
return GroovyObject.class.isAssignableFrom(firstGroovy.getTheClass()) ? firstGroovy.getCachedSuperClass() : firstGroovy;
}
/**
* Gets all instance methods available on this class for the given name
*
* @return all the normal instance methods available on this class for the
* given name
*/
private Object getMethods(Class sender, String name, boolean isCallToSuper) {
Object answer;
final MetaMethodIndex.Entry entry = metaMethodIndex.getMethods(sender, name);
if (entry == null)
answer = FastArray.EMPTY_LIST;
else
if (isCallToSuper) {
answer = entry.methodsForSuper;
} else {
answer = entry.methods;
}
if (answer == null) answer = FastArray.EMPTY_LIST;
if (!isCallToSuper) {
List used = GroovyCategorySupport.getCategoryMethods(name);
if (used != null) {
FastArray arr;
if (answer instanceof MetaMethod) {
arr = new FastArray();
arr.add(answer);
}
else
arr = ((FastArray) answer).copy();
for (Iterator iter = used.iterator(); iter.hasNext();) {
MetaMethod element = (MetaMethod) iter.next();
if (!element.getDeclaringClass().getTheClass().isAssignableFrom(sender))
continue;
filterMatchingMethodForCategory(arr, element);
}
answer = arr;
}
}
return answer;
}
/**
* Returns all the normal static methods on this class for the given name
*
* @return all the normal static methods available on this class for the
* given name
*/
private Object getStaticMethods(Class sender, String name) {
final MetaMethodIndex.Entry entry = metaMethodIndex.getMethods(sender, name);
if (entry == null)
return FastArray.EMPTY_LIST;
Object answer = entry.staticMethods;
if (answer == null)
return FastArray.EMPTY_LIST;
return answer;
}
/**
* Returns wether this metaclassimpl has been modified. Since MetaClassImpl
* is not designed for modificaiton this method always returns false
*
* @return false
*/
public boolean isModified() {
return false; // MetaClassImpl not designed for modification, just return false
}
/**
*Adds an instance method to this metaclass.
*
* @param method The method to be added
*/
public void addNewInstanceMethod(Method method) {
final CachedMethod cachedMethod = CachedMethod.find(method);
NewInstanceMetaMethod newMethod = new NewInstanceMetaMethod(cachedMethod);
final CachedClass declaringClass = newMethod.getDeclaringClass();
addNewInstanceMethodToIndex(newMethod, metaMethodIndex.getHeader(declaringClass.getTheClass()));
}
private void addNewInstanceMethodToIndex(MetaMethod newMethod, MetaMethodIndex.Header header) {
if (!newGroovyMethodsSet.contains(newMethod)) {
newGroovyMethodsSet.add(newMethod);
addMetaMethodToIndex(newMethod, header);
}
}
/**
*Adds a static method to this metaclass.
*
* @param method The method to be added
*/
public void addNewStaticMethod(Method method) {
final CachedMethod cachedMethod = CachedMethod.find(method);
NewStaticMetaMethod newMethod = new NewStaticMetaMethod(cachedMethod);
final CachedClass declaringClass = newMethod.getDeclaringClass();
addNewStaticMethodToIndex(newMethod, metaMethodIndex.getHeader(declaringClass.getTheClass()));
}
private void addNewStaticMethodToIndex(MetaMethod newMethod, MetaMethodIndex.Header header) {
if (!newGroovyMethodsSet.contains(newMethod)) {
newGroovyMethodsSet.add(newMethod);
addMetaMethodToIndex(newMethod, header);
}
}
/**
* Invoke a method on the given object with the given arguments.
*
* @param object The object the method should be invoked on.
* @param methodName The name of the method to invoke.
* @param arguments The arguments to the invoked method as null, a Tuple, an array or a single argument of any type.
*
* @return The result of the method invocation.
*/
public Object invokeMethod(Object object, String methodName, Object arguments) {
if (arguments == null) {
return invokeMethod(object, methodName, MetaClassHelper.EMPTY_ARRAY);
}
if (arguments instanceof Tuple) {
Tuple tuple = (Tuple) arguments;
return invokeMethod(object, methodName, tuple.toArray());
}
if (arguments instanceof Object[]) {
return invokeMethod(object, methodName, (Object[]) arguments);
} else {
return invokeMethod(object, methodName, new Object[]{arguments});
}
}
/**
* Invoke a missing method on the given object with the given arguments.
*
* @param instance The object the method should be invoked on.
* @param methodName The name of the method to invoke.
* @param arguments The arguments to the invoked method.
*
* @return The result of the method invocation.
*/
public Object invokeMissingMethod(Object instance, String methodName, Object[] arguments) {
return invokeMissingMethod(instance, methodName, arguments, null, false);
}
/**
* Invoke a missing property on the given object with the given arguments.
*
* @param instance The object the method should be invoked on.
* @param propertyName The name of the property to invoke.
* @param optionalValue The (optional) new value for the property
* @param isGetter Wether the method is a getter
*
* @return The result of the method invocation.
*/
public Object invokeMissingProperty(Object instance, String propertyName, Object optionalValue, boolean isGetter) {
Class theClass = instance instanceof Class ? (Class)instance : instance.getClass();
CachedClass superClass = theCachedClass;
while(superClass != null && superClass != ReflectionCache.OBJECT_CLASS) {
final MetaBeanProperty property = findPropertyInClassHierarchy(propertyName, superClass);
if(property != null) {
onSuperPropertyFoundInHierarchy(property);
if(!isGetter) {
property.setProperty(instance, optionalValue);
return null;
}
else {
return property.getProperty(instance);
}
}
superClass = superClass.getCachedSuperClass();
}
// got here to property not found, look for getProperty or setProperty overrides
if(isGetter) {
final Class[] getPropertyArgs = {String.class};
final MetaMethod method = findMethodInClassHierarchy(instance.getClass(), GET_PROPERTY_METHOD, getPropertyArgs, this);
if(method != null && method instanceof ClosureMetaMethod) {
onGetPropertyFoundInHierarchy(method);
return method.invoke(instance,new Object[]{propertyName});
}
}
else {
final Class[] setPropertyArgs = {String.class, Object.class};
final MetaMethod method = findMethodInClassHierarchy(instance.getClass(), SET_PROPERTY_METHOD, setPropertyArgs, this);
if(method != null && method instanceof ClosureMetaMethod) {
onSetPropertyFoundInHierarchy(method);
return method.invoke(instance, new Object[]{propertyName, optionalValue});
}
}
try {
if (!(instance instanceof Class)) {
if (isGetter && propertyMissingGet != null) {
return propertyMissingGet.invoke(instance, new Object[]{propertyName});
} else {
if (propertyMissingSet != null)
return propertyMissingSet.invoke(instance, new Object[]{propertyName, optionalValue});
}
}
} catch (InvokerInvocationException iie) {
boolean shouldHandle = isGetter && propertyMissingGet != null;
if (!shouldHandle) shouldHandle = !isGetter && propertyMissingSet != null;
if (shouldHandle && iie.getCause() instanceof MissingPropertyException) {
throw (MissingPropertyException) iie.getCause();
}
throw iie;
}
if (instance instanceof Class && theClass != Class.class) {
final MetaProperty metaProperty = InvokerHelper.getMetaClass(Class.class).hasProperty(instance, propertyName);
if (metaProperty != null)
if (isGetter)
return metaProperty.getProperty(instance);
else {
metaProperty.setProperty(instance, optionalValue);
return null;
}
}
throw new MissingPropertyExceptionNoStack(propertyName, theClass);
}
private Object invokeMissingMethod(Object instance, String methodName, Object[] arguments, RuntimeException original, boolean isCallToSuper) {
if (!isCallToSuper) {
Class instanceKlazz = instance.getClass();
if (theClass != instanceKlazz && theClass.isAssignableFrom(instanceKlazz))
instanceKlazz = theClass;
Class[] argClasses = MetaClassHelper.castArgumentsToClassArray(arguments);
MetaMethod method = findMixinMethod(methodName, argClasses);
if(method != null) {
onMixinMethodFound(method);
return method.invoke(instance, arguments);
}
method = findMethodInClassHierarchy(instanceKlazz, methodName, argClasses, this);
if(method != null) {
onSuperMethodFoundInHierarchy(method);
return method.invoke(instance, arguments);
}
// still not method here, so see if there is an invokeMethod method up the hierarchy
final Class[] invokeMethodArgs = {String.class, Object[].class};
method = findMethodInClassHierarchy(instanceKlazz, INVOKE_METHOD_METHOD, invokeMethodArgs, this );
if(method != null && method instanceof ClosureMetaMethod) {
onInvokeMethodFoundInHierarchy(method);
return method.invoke(instance, invokeMethodArgs);
}
}
if (methodMissing != null) {
try {
return methodMissing.invoke(instance, new Object[]{methodName, arguments});
} catch (InvokerInvocationException iie) {
if (methodMissing instanceof ClosureMetaMethod && iie.getCause() instanceof MissingMethodException) {
MissingMethodException mme = (MissingMethodException) iie.getCause();
throw new MissingMethodExecutionFailed (mme.getMethod(), mme.getClass(),
mme.getArguments(),mme.isStatic(),mme);
}
throw iie;
} catch (MissingMethodException mme) {
if (methodMissing instanceof ClosureMetaMethod)
throw new MissingMethodExecutionFailed (mme.getMethod(), mme.getClass(),
mme.getArguments(),mme.isStatic(),mme);
else
throw mme;
}
} else if (original != null) throw original;
else throw new MissingMethodExceptionNoStack(methodName, theClass, arguments, false);
}
protected void onSuperPropertyFoundInHierarchy(MetaBeanProperty property) {
}
protected void onMixinMethodFound(MetaMethod method) {
}
protected void onSuperMethodFoundInHierarchy(MetaMethod method) {
}
protected void onInvokeMethodFoundInHierarchy(MetaMethod method) {
}
protected void onSetPropertyFoundInHierarchy(MetaMethod method) {
}
protected void onGetPropertyFoundInHierarchy(MetaMethod method) {
}
/**
* Hook to deal with the case of MissingProperty for static properties. The method will look attempt to look up
* "propertyMissing" handlers and invoke them otherwise thrown a MissingPropertyException
*
* @param instance The instance
* @param propertyName The name of the property
* @param optionalValue The value in the case of a setter
* @param isGetter True if its a getter
* @return The value in the case of a getter or a MissingPropertyException
*/
protected Object invokeStaticMissingProperty(Object instance, String propertyName, Object optionalValue, boolean isGetter) {
MetaClass mc = instance instanceof Class ? registry.getMetaClass((Class) instance) : this;
if (isGetter) {
MetaMethod propertyMissing = mc.getMetaMethod(STATIC_PROPERTY_MISSING, GETTER_MISSING_ARGS);
if (propertyMissing != null) {
return propertyMissing.invoke(instance, new Object[]{propertyName});
}
} else {
MetaMethod propertyMissing = mc.getMetaMethod(STATIC_PROPERTY_MISSING, SETTER_MISSING_ARGS);
if (propertyMissing != null) {
return propertyMissing.invoke(instance, new Object[]{propertyName, optionalValue});
}
}
if (instance instanceof Class) {
throw new MissingPropertyException(propertyName, (Class) instance);
}
throw new MissingPropertyException(propertyName, theClass);
}
/**
* Invokes a method on the given receiver for the specified arguments.
* The MetaClass will attempt to establish the method to invoke based on the name and arguments provided.
*
*
* @param object The object which the method was invoked on
* @param methodName The name of the method
* @param originalArguments The arguments to the method
*
* @return The return value of the method
*
* @see MetaClass#invokeMethod(Class, Object, String, Object[], boolean, boolean)
*/
public Object invokeMethod(Object object, String methodName, Object[] originalArguments) {
return invokeMethod(theClass, object, methodName, originalArguments, false, false);
}
/**
* <p>Invokes a method on the given receiver for the specified arguments. The sender is the class that invoked the method on the object.
* The MetaClass will attempt to establish the method to invoke based on the name and arguments provided.
*
* <p>The isCallToSuper and fromInsideClass help the Groovy runtime perform optimisations on the call to go directly
* to the super class if necessary
*
* @param sender The java.lang.Class instance that invoked the method
* @param object The object which the method was invoked on
* @param methodName The name of the method
* @param originalArguments The arguments to the method
* @param isCallToSuper Whether the method is a call to a super class method
* @param fromInsideClass Whether the call was invoked from the inside or the outside of the class
*
* @return The return value of the method
*
* @see MetaClass#invokeMethod(Class, Object, String, Object[], boolean, boolean)
*/
public Object invokeMethod(Class sender, Object object, String methodName, Object[] originalArguments, boolean isCallToSuper, boolean fromInsideClass) {
checkInitalised();
if (object == null) {
throw new NullPointerException("Cannot invoke method: " + methodName + " on null object");
}
final Object[] arguments = originalArguments == null ? EMPTY_ARGUMENTS : originalArguments;
// final Class[] argClasses = MetaClassHelper.convertToTypeArray(arguments);
//
// unwrap(arguments);
MetaMethod method = null;
if (CLOSURE_CALL_METHOD.equals(methodName) && object instanceof GeneratedClosure) {
method = getMethodWithCaching(sender, "doCall", arguments, isCallToSuper);
}
if (method==null) {
method = getMethodWithCaching(sender, methodName, arguments, isCallToSuper);
}
MetaClassHelper.unwrap(arguments);
if (method == null)
method = tryListParamMetaMethod(sender, methodName, isCallToSuper, arguments);
final boolean isClosure = object instanceof Closure;
if (isClosure) {
final Closure closure = (Closure) object;
final Object owner = closure.getOwner();
if (CLOSURE_CALL_METHOD.equals(methodName) || CLOSURE_DO_CALL_METHOD.equals(methodName)) {
final Class objectClass = object.getClass();
if (objectClass == MethodClosure.class) {
final MethodClosure mc = (MethodClosure) object;
methodName = mc.getMethod();
final Class ownerClass = owner instanceof Class ? (Class) owner : owner.getClass();
final MetaClass ownerMetaClass = registry.getMetaClass(ownerClass);
return ownerMetaClass.invokeMethod(ownerClass, owner, methodName, arguments, false, false);
} else if (objectClass == CurriedClosure.class) {
final CurriedClosure cc = (CurriedClosure) object;
// change the arguments for an uncurried call
final Object[] curriedArguments = cc.getUncurriedArguments(arguments);
final Class ownerClass = owner instanceof Class ? (Class) owner : owner.getClass();
final MetaClass ownerMetaClass = registry.getMetaClass(ownerClass);
return ownerMetaClass.invokeMethod(owner, methodName, curriedArguments);
}
if (method==null) invokeMissingMethod(object,methodName,arguments);
}
final Object delegate = closure.getDelegate();
final boolean isClosureNotOwner = owner != closure;
final int resolveStrategy = closure.getResolveStrategy();
final Class[] argClasses = MetaClassHelper.convertToTypeArray(arguments);
switch (resolveStrategy) {
case Closure.TO_SELF:
method = closure.getMetaClass().pickMethod(methodName, argClasses);
if (method != null) return method.invoke(closure, arguments);
break;
case Closure.DELEGATE_ONLY:
if (method == null && delegate != closure && delegate != null) {
MetaClass delegateMetaClass = lookupObjectMetaClass(delegate);
method = delegateMetaClass.pickMethod(methodName, argClasses);
if (method != null)
return delegateMetaClass.invokeMethod(delegate, methodName, originalArguments);
else if (delegate != closure && (delegate instanceof GroovyObject)) {
return invokeMethodOnGroovyObject(methodName, originalArguments, delegate);
}
}
break;
case Closure.OWNER_ONLY:
if (method == null && owner != closure) {
MetaClass ownerMetaClass = lookupObjectMetaClass(owner);
return ownerMetaClass.invokeMethod(owner, methodName, originalArguments);
}
break;
case Closure.DELEGATE_FIRST:
if (method == null && delegate != closure && delegate != null) {
MetaClass delegateMetaClass = lookupObjectMetaClass(delegate);
method = delegateMetaClass.pickMethod(methodName, argClasses);
if (method != null)
return delegateMetaClass.invokeMethod(delegate, methodName, originalArguments);
}
if (method == null && owner != closure) {
MetaClass ownerMetaClass = lookupObjectMetaClass(owner);
method = ownerMetaClass.pickMethod(methodName, argClasses);
if (method != null) return ownerMetaClass.invokeMethod(owner, methodName, originalArguments);
}
if (method == null && resolveStrategy != Closure.TO_SELF) {
// still no methods found, test if delegate or owner are GroovyObjects
// and invoke the method on them if so.
MissingMethodException last = null;
if (delegate != closure && (delegate instanceof GroovyObject)) {
try {
return invokeMethodOnGroovyObject(methodName, originalArguments, delegate);
} catch (MissingMethodException mme) {
if (last == null) last = mme;
}
}
if (isClosureNotOwner && (owner instanceof GroovyObject)) {
try {
return invokeMethodOnGroovyObject(methodName, originalArguments, owner);
} catch (MissingMethodException mme) {
last = mme;
}
}
if (last != null) return invokeMissingMethod(object, methodName, originalArguments, last, isCallToSuper);
}
break;
default:
if (method == null && owner != closure) {
MetaClass ownerMetaClass = lookupObjectMetaClass(owner);
method = ownerMetaClass.pickMethod(methodName, argClasses);
if (method != null) return ownerMetaClass.invokeMethod(owner, methodName, originalArguments);
}
if (method == null && delegate != closure && delegate != null) {
MetaClass delegateMetaClass = lookupObjectMetaClass(delegate);
method = delegateMetaClass.pickMethod(methodName, argClasses);
if (method != null)
return delegateMetaClass.invokeMethod(delegate, methodName, originalArguments);
}
if (method == null && resolveStrategy != Closure.TO_SELF) {
// still no methods found, test if delegate or owner are GroovyObjects
// and invoke the method on them if so.
MissingMethodException last = null;
if (isClosureNotOwner && (owner instanceof GroovyObject)) {
try {
return invokeMethodOnGroovyObject(methodName, originalArguments, owner);
} catch (MissingMethodException mme) {
if (methodName.equals(mme.getMethod())) {
if (last == null) last = mme;
} else {
throw mme;
}
}
catch (InvokerInvocationException iie) {
if (iie.getCause() instanceof MissingMethodException) {
MissingMethodException mme = (MissingMethodException) iie.getCause();
if (methodName.equals(mme.getMethod())) {
if (last == null) last = mme;
} else {
throw iie;
}
}
else
throw iie;
}
}
if (delegate != closure && (delegate instanceof GroovyObject)) {
try {
return invokeMethodOnGroovyObject(methodName, originalArguments, delegate);
} catch (MissingMethodException mme) {
last = mme;
}
catch (InvokerInvocationException iie) {
if (iie.getCause() instanceof MissingMethodException) {
last = (MissingMethodException) iie.getCause();
}
else
throw iie;
}
}
if (last != null) return invokeMissingMethod(object, methodName, originalArguments, last, isCallToSuper);
}
}
}
if (method != null) {
return method.doMethodInvoke(object, arguments);
} else {
return invokePropertyOrMissing(object, methodName, originalArguments, fromInsideClass, isCallToSuper);
}
}
private MetaMethod tryListParamMetaMethod(Class sender, String methodName, boolean isCallToSuper, Object[] arguments) {
MetaMethod method = null;
if (arguments.length == 1 && arguments[0] instanceof List) {
Object[] newArguments = ((List) arguments[0]).toArray();
method = getMethodWithCaching(sender, methodName, newArguments, isCallToSuper);
if (method != null) {
method = new TransformMetaMethod(method) {
public Object invoke(Object object, Object[] arguments) {
Object firstArgument = arguments[0];
List list = (List) firstArgument;
arguments = list.toArray();
return super.invoke(object, arguments);
}
};
}
}
return method;
}
private Object invokePropertyOrMissing(Object object, String methodName, Object[] originalArguments, boolean fromInsideClass, boolean isCallToSuper) {
// if no method was found, try to find a closure defined as a field of the class and run it
Object value = null;
final MetaProperty metaProperty = this.getMetaProperty(methodName, false);
if (metaProperty != null)
value = metaProperty.getProperty(object);
else {
if (object instanceof Map)
value = ((Map)object).get(methodName);
}
if (value instanceof Closure) { // This test ensures that value != this If you ever change this ensure that value != this
Closure closure = (Closure) value;
MetaClass delegateMetaClass = closure.getMetaClass();
return delegateMetaClass.invokeMethod(closure.getClass(), closure, CLOSURE_DO_CALL_METHOD, originalArguments, false, fromInsideClass);
}
if (object instanceof Script) {
Object bindingVar = ((Script) object).getBinding().getVariables().get(methodName);
if (bindingVar != null) {
MetaClass bindingVarMC = ((MetaClassRegistryImpl) registry).getMetaClass(bindingVar);
return bindingVarMC.invokeMethod(bindingVar, CLOSURE_CALL_METHOD, originalArguments);
}
}
return invokeMissingMethod(object, methodName, originalArguments, null, isCallToSuper);
}
private MetaClass lookupObjectMetaClass(Object object) {
if (object instanceof GroovyObject) {
GroovyObject go = (GroovyObject) object;
return go.getMetaClass();
}
Class ownerClass = object.getClass();
if (ownerClass == Class.class) ownerClass = (Class) object;
MetaClass metaClass = registry.getMetaClass(ownerClass);
return metaClass;
}
private Object invokeMethodOnGroovyObject(String methodName, Object[] originalArguments, Object owner) {
GroovyObject go = (GroovyObject) owner;
return go.invokeMethod(methodName, originalArguments);
}
public MetaMethod getMethodWithCaching(Class sender, String methodName, Object[] arguments, boolean isCallToSuper) {
// let's try use the cache to find the method
if (!isCallToSuper && GroovyCategorySupport.hasCategoryInCurrentThread()) {
return getMethodWithoutCaching(sender, methodName, MetaClassHelper.convertToTypeArray(arguments), isCallToSuper);
} else {
final MetaMethodIndex.Entry e = metaMethodIndex.getMethods(sender, methodName);
if (e == null)
return null;
return isCallToSuper ? getSuperMethodWithCaching(arguments, e) : getNormalMethodWithCaching(arguments, e);
}
}
private static boolean sameClasses(Class[] params, Class[] arguments) {
// we do here a null check because the params field might not have been set yet
if (params == null) return false;
if (params.length != arguments.length)
return false;
for (int i = params.length - 1; i >= 0; i--) {
Object arg = arguments[i];
if (arg != null) {
if (params[i] != arguments[i]) return false;
} else return false;
}
return true;
}
// This method should be called by CallSite only
private MetaMethod getMethodWithCachingInternal (Class sender, CallSite site, Class [] params) {
if (GroovyCategorySupport.hasCategoryInCurrentThread())
return getMethodWithoutCaching(sender, site.getName (), params, false);
final MetaMethodIndex.Entry e = metaMethodIndex.getMethods(sender, site.getName());
if (e == null) {
return null;
}
MetaMethodIndex.CacheEntry cacheEntry;
final Object methods = e.methods;
if (methods == null)
return null;
cacheEntry = e.cachedMethod;
if (cacheEntry != null && (sameClasses(cacheEntry.params, params))) {
return cacheEntry.method;
}
cacheEntry = new MetaMethodIndex.CacheEntry (params, (MetaMethod) chooseMethod(e.name, methods, params));
e.cachedMethod = cacheEntry;
return cacheEntry.method;
}
private MetaMethod getSuperMethodWithCaching(Object[] arguments, MetaMethodIndex.Entry e) {
MetaMethodIndex.CacheEntry cacheEntry;
if (e.methodsForSuper == null)
return null;
cacheEntry = e.cachedMethodForSuper;
if (cacheEntry != null &&
MetaClassHelper.sameClasses(cacheEntry.params, arguments, e.methodsForSuper instanceof MetaMethod))
{
MetaMethod method = cacheEntry.method;
if (method!=null) return method;
}
final Class[] classes = MetaClassHelper.convertToTypeArray(arguments);
MetaMethod method = (MetaMethod) chooseMethod(e.name, e.methodsForSuper, classes);
cacheEntry = new MetaMethodIndex.CacheEntry (classes, method.isAbstract()?null:method);
e.cachedMethodForSuper = cacheEntry;
return cacheEntry.method;
}
private MetaMethod getNormalMethodWithCaching(Object[] arguments, MetaMethodIndex.Entry e) {
MetaMethodIndex.CacheEntry cacheEntry;
final Object methods = e.methods;
if (methods == null)
return null;
cacheEntry = e.cachedMethod;
if (cacheEntry != null &&
MetaClassHelper.sameClasses(cacheEntry.params, arguments, methods instanceof MetaMethod))
{
MetaMethod method = cacheEntry.method;
if (method!=null) return method;
}
final Class[] classes = MetaClassHelper.convertToTypeArray(arguments);
cacheEntry = new MetaMethodIndex.CacheEntry (classes, (MetaMethod) chooseMethod(e.name, methods, classes));
e.cachedMethod = cacheEntry;
return cacheEntry.method;
}
public Constructor retrieveConstructor(Class[] arguments) {
CachedConstructor constructor = (CachedConstructor) chooseMethod("<init>", constructors, arguments);
if (constructor != null) {
return constructor.cachedConstructor;
}
constructor = (CachedConstructor) chooseMethod("<init>", constructors, arguments);
if (constructor != null) {
return constructor.cachedConstructor;
}
return null;
}
public MetaMethod retrieveStaticMethod(String methodName, Object[] arguments) {
final MetaMethodIndex.Entry e = metaMethodIndex.getMethods(theClass, methodName);
MetaMethodIndex.CacheEntry cacheEntry;
if (e != null) {
cacheEntry = e.cachedStaticMethod;
if (cacheEntry != null &&
MetaClassHelper.sameClasses(cacheEntry.params, arguments, e.staticMethods instanceof MetaMethod))
{
return cacheEntry.method;
}
final Class[] classes = MetaClassHelper.convertToTypeArray(arguments);
cacheEntry = new MetaMethodIndex.CacheEntry (classes, pickStaticMethod(methodName, classes));
e.cachedStaticMethod = cacheEntry;
return cacheEntry.method;
}
else
return pickStaticMethod(methodName, MetaClassHelper.convertToTypeArray(arguments));
}
public MetaMethod getMethodWithoutCaching(Class sender, String methodName, Class[] arguments, boolean isCallToSuper) {
MetaMethod method = null;
Object methods = getMethods(sender, methodName, isCallToSuper);
if (methods != null) {
method = (MetaMethod) chooseMethod(methodName, methods, arguments);
}
return method;
}
public Object invokeStaticMethod(Object object, String methodName, Object[] arguments) {
checkInitalised();
final Class sender = object instanceof Class ? (Class) object : object.getClass();
if (sender != theClass) {
MetaClass mc = registry.getMetaClass(sender);
return mc.invokeStaticMethod(sender, methodName, arguments);
}
if (sender == Class.class) {
return invokeMethod(object, methodName, arguments);
}
if (arguments == null) arguments = EMPTY_ARGUMENTS;
// Class[] argClasses = MetaClassHelper.convertToTypeArray(arguments);
MetaMethod method = retrieveStaticMethod(methodName, arguments);
// let's try use the cache to find the method
if (method != null) {
MetaClassHelper.unwrap(arguments);
return method.doMethodInvoke(object, arguments);
}
Object prop = null;
try {
prop = getProperty(theClass, theClass, methodName, false, false);
} catch (MissingPropertyException mpe) {
// ignore
}
if (prop instanceof Closure) {
return invokeStaticClosureProperty(arguments, prop);
}
Object[] originalArguments = (Object[]) arguments.clone();
MetaClassHelper.unwrap(arguments);
Class superClass = sender.getSuperclass();
Class[] argClasses = MetaClassHelper.convertToTypeArray(arguments);
while (superClass != Object.class && superClass != null) {
MetaClass mc = registry.getMetaClass(superClass);
method = mc.getStaticMetaMethod(methodName, argClasses);
if (method != null) return method.doMethodInvoke(object, arguments);
try {
prop = mc.getProperty(superClass, superClass, methodName, false, false);
} catch (MissingPropertyException mpe) {
// ignore
}
if (prop instanceof Closure) {
return invokeStaticClosureProperty(originalArguments, prop);
}
superClass = superClass.getSuperclass();
}
if (prop != null) {
MetaClass propMC = registry.getMetaClass(prop.getClass());
return propMC.invokeMethod(prop, CLOSURE_CALL_METHOD, arguments);
}
return invokeStaticMissingMethod(sender, methodName, arguments);
}
private Object invokeStaticClosureProperty(Object[] originalArguments, Object prop) {
Closure closure = (Closure) prop;
MetaClass delegateMetaClass = closure.getMetaClass();
return delegateMetaClass.invokeMethod(closure.getClass(), closure, CLOSURE_DO_CALL_METHOD, originalArguments, false, false);
}
private Object invokeStaticMissingMethod(Class sender, String methodName, Object[] arguments) {
MetaMethod metaMethod = getStaticMetaMethod(STATIC_METHOD_MISSING, METHOD_MISSING_ARGS);
if (metaMethod != null) {
return metaMethod.invoke(sender, new Object[]{methodName, arguments});
}
throw new MissingMethodException(methodName, sender, arguments, true);
}
private MetaMethod pickStaticMethod(String methodName, Class[] arguments) {
MetaMethod method = null;
MethodSelectionException mse = null;
Object methods = getStaticMethods(theClass, methodName);
if (!(methods instanceof FastArray) || !((FastArray)methods).isEmpty()) {
try {
method = (MetaMethod) chooseMethod(methodName, methods, arguments);
} catch(MethodSelectionException msex) {
mse = msex;
}
}
if (method == null && theClass != Class.class) {
MetaClass classMetaClass = registry.getMetaClass(Class.class);
method = classMetaClass.pickMethod(methodName, arguments);
}
if (method == null) {
method = (MetaMethod) chooseMethod(methodName, methods, MetaClassHelper.convertToTypeArray(arguments));
}
if (method == null && mse != null) {
throw mse;
} else {
return method;
}
}
public Object invokeConstructor(Object[] arguments) {
return invokeConstructor(theClass, arguments);
}
public int selectConstructorAndTransformArguments(int numberOfConstructors, Object[] arguments) {
if (numberOfConstructors==-1) {
return selectConstructorAndTransformArguments1(arguments);
} else {
// falling back to pre 2.1.9 selection algorithm
// in practice this branch will only be reached if the class calling this code is a Groovy class
// compiled with an earlier version of the Groovy compiler
return selectConstructorAndTransformArguments0(numberOfConstructors, arguments);
}
}
private int selectConstructorAndTransformArguments0(final int numberOfConstructors, Object[] arguments) {
//TODO: that is just a quick prototype, not the real thing!
if (numberOfConstructors != constructors.size()) {
throw new IncompatibleClassChangeError("the number of constructors during runtime and compile time for " +
this.theClass.getName() + " do not match. Expected " + numberOfConstructors + " but got " + constructors.size());
}
if (arguments == null) arguments = EMPTY_ARGUMENTS;
Class[] argClasses = MetaClassHelper.convertToTypeArray(arguments);
MetaClassHelper.unwrap(arguments);
CachedConstructor constructor = (CachedConstructor) chooseMethod("<init>", constructors, argClasses);
if (constructor == null) {
constructor = (CachedConstructor) chooseMethod("<init>", constructors, argClasses);
}
if (constructor == null) {
throw new GroovyRuntimeException(
"Could not find matching constructor for: "
+ theClass.getName()
+ "(" + InvokerHelper.toTypeString(arguments) + ")");
}
List l = new ArrayList(constructors.toList());
Comparator comp = new Comparator() {
public int compare(Object arg0, Object arg1) {
CachedConstructor c0 = (CachedConstructor) arg0;
CachedConstructor c1 = (CachedConstructor) arg1;
String descriptor0 = BytecodeHelper.getMethodDescriptor(Void.TYPE, c0.getNativeParameterTypes());
String descriptor1 = BytecodeHelper.getMethodDescriptor(Void.TYPE, c1.getNativeParameterTypes());
return descriptor0.compareTo(descriptor1);
}
};
Collections.sort(l, comp);
int found = -1;
for (int i = 0; i < l.size(); i++) {
if (l.get(i) != constructor) continue;
found = i;
break;
}
// NOTE: must be changed to "1 |" if constructor was vargs
return 0 | (found << 8);
}
/**
* Constructor selection algorithm for Groovy 2.1.9+.
* This selection algorithm was introduced as a workaround for GROOVY-6080. Instead of generating an index between
* 0 and N where N is the number of super constructors at the time the class is compiled, this algorithm uses
* a hash of the constructor descriptor instead.
*
* This has the advantage of letting the super class add new constructors while being binary compatible. But there
* are still problems with this approach:
* <ul>
* <li>There's a risk of hash collision, even if it's very low (two constructors of the same class must have the same hash)</li>
* <li>If the super class adds a new constructor which takes as an argument a superclass of an existing constructor parameter and
* that this new constructor is selected at runtime, it would not find it.</li>
* </ul>
*
* Hopefully in the last case, the error message is much nicer now since it explains that it's a binary incompatible change.
*
* @param arguments the actual constructor call arguments
* @return a hash used to identify the constructor to be called
* @since 2.1.9
*/
private int selectConstructorAndTransformArguments1(Object[] arguments) {
if (arguments == null) arguments = EMPTY_ARGUMENTS;
Class[] argClasses = MetaClassHelper.convertToTypeArray(arguments);
MetaClassHelper.unwrap(arguments);
CachedConstructor constructor = (CachedConstructor) chooseMethod("<init>", constructors, argClasses);
if (constructor == null) {
constructor = (CachedConstructor) chooseMethod("<init>", constructors, argClasses);
}
if (constructor == null) {
throw new GroovyRuntimeException(
"Could not find matching constructor for: "
+ theClass.getName()
+ "(" + InvokerHelper.toTypeString(arguments) + ")");
}
final String methodDescriptor = BytecodeHelper.getMethodDescriptor(Void.TYPE, constructor.getNativeParameterTypes());
// keeping 3 bits for additional information such as vargs
return BytecodeHelper.hashCode(methodDescriptor);
}
/**
* checks if the initialisation of the class id complete.
* This method should be called as a form of assert, it is no
* way to test if there is still initialisation work to be done.
* Such logic must be implemented in a different way.
*
* @throws IllegalStateException if the initialisation is incomplete yet
*/
protected void checkInitalised() {
if (!isInitialized())
throw new IllegalStateException(
"initialize must be called for meta " +
"class of " + theClass +
"(" + this.getClass() + ") " +
"to complete initialisation process " +
"before any invocation or field/property " +
"access can be done");
}
/**
* This is a helper class introduced in Groovy 2.1.0, which is used only by
* indy. This class is for internal use only.
* @since Groovy 2.1.0
*/
public static final class MetaConstructor extends MetaMethod {
private final CachedConstructor cc;
private final boolean beanConstructor;
private MetaConstructor(CachedConstructor cc, boolean bean) {
super(cc.getNativeParameterTypes());
this.setParametersTypes(cc.getParameterTypes());
this.cc = cc;
this.beanConstructor = bean;
}
@Override
public int getModifiers() { return cc.getModifiers(); }
@Override
public String getName() { return "<init>"; }
@Override
public Class getReturnType() { return cc.getCachedClass().getTheClass(); }
@Override
public CachedClass getDeclaringClass() { return cc.getCachedClass(); }
@Override
public Object invoke(Object object, Object[] arguments) {
return cc.doConstructorInvoke(arguments);
}
public CachedConstructor getCachedConstrcutor() { return cc; }
public boolean isBeanConstructor() { return beanConstructor; }
}
/**
* This is a helper method added in Groovy 2.1.0, which is used only by indy.
* This method is for internal use only.
* @since Groovy 2.1.0
*/
public MetaMethod retrieveConstructor(Object[] arguments) {
checkInitalised();
if (arguments == null) arguments = EMPTY_ARGUMENTS;
Class[] argClasses = MetaClassHelper.convertToTypeArray(arguments);
MetaClassHelper.unwrap(arguments);
Object res = chooseMethod("<init>", constructors, argClasses);
if (res instanceof MetaMethod) return (MetaMethod) res;
CachedConstructor constructor = (CachedConstructor) res;
if (constructor != null) return new MetaConstructor(constructor, false);
if (arguments.length == 1 && arguments[0] instanceof Map) {
res = chooseMethod("<init>", constructors, MetaClassHelper.EMPTY_TYPE_ARRAY);
} else if (
arguments.length == 2 && arguments[1] instanceof Map &&
theClass.getEnclosingClass()!=null &&
theClass.getEnclosingClass().isAssignableFrom(argClasses[0]))
{
res = chooseMethod("<init>", constructors, new Class[]{argClasses[0]});
}
if (res instanceof MetaMethod) return (MetaMethod) res;
constructor = (CachedConstructor) res;
if (constructor != null) return new MetaConstructor(constructor, true);
return null;
}
private Object invokeConstructor(Class at, Object[] arguments) {
checkInitalised();
if (arguments == null) arguments = EMPTY_ARGUMENTS;
Class[] argClasses = MetaClassHelper.convertToTypeArray(arguments);
MetaClassHelper.unwrap(arguments);
CachedConstructor constructor = (CachedConstructor) chooseMethod("<init>", constructors, argClasses);
if (constructor != null) {
return constructor.doConstructorInvoke(arguments);
}
if (arguments.length == 1) {
Object firstArgument = arguments[0];
if (firstArgument instanceof Map) {
constructor = (CachedConstructor) chooseMethod("<init>", constructors, MetaClassHelper.EMPTY_TYPE_ARRAY);
if (constructor != null) {
Object bean = constructor.doConstructorInvoke(MetaClassHelper.EMPTY_ARRAY);
setProperties(bean, ((Map) firstArgument));
return bean;
}
}
}
throw new GroovyRuntimeException(
"Could not find matching constructor for: "
+ theClass.getName()
+ "(" + InvokerHelper.toTypeString(arguments) + ")");
}
/**
* Sets a number of bean properties from the given Map where the keys are
* the String names of properties and the values are the values of the
* properties to set
*/
public void setProperties(Object bean, Map map) {
checkInitalised();
for (Iterator iter = map.entrySet().iterator(); iter.hasNext();) {
Map.Entry entry = (Map.Entry) iter.next();
String key = entry.getKey().toString();
Object value = entry.getValue();
setProperty(bean, key, value);
}
}
/**
* @return the given property's value on the object
*/
public Object getProperty(Class sender, Object object, String name, boolean useSuper, boolean fromInsideClass) {
//----------------------------------------------------------------------
// handling of static
//----------------------------------------------------------------------
boolean isStatic = theClass != Class.class && object instanceof Class;
if (isStatic && object != theClass) {
MetaClass mc = registry.getMetaClass((Class) object);
return mc.getProperty(sender, object, name, useSuper, false);
}
checkInitalised();
//----------------------------------------------------------------------
// turn getProperty on a Map to get on the Map itself
//----------------------------------------------------------------------
if (!isStatic && this.isMap) {
return ((Map) object).get(name);
}
MetaMethod method = null;
Object[] arguments = EMPTY_ARGUMENTS;
//----------------------------------------------------------------------
// getter
//----------------------------------------------------------------------
MetaProperty mp = getMetaProperty(sender, name, useSuper, isStatic);
if (mp != null) {
if (mp instanceof MetaBeanProperty) {
MetaBeanProperty mbp = (MetaBeanProperty) mp;
method = mbp.getGetter();
mp = mbp.getField();
}
}
// check for a category method named like a getter
if (!useSuper && !isStatic && GroovyCategorySupport.hasCategoryInCurrentThread()) {
String getterName = GroovyCategorySupport.getPropertyCategoryGetterName(name);
if (getterName != null) {
MetaMethod categoryMethod = getCategoryMethodGetter(sender, getterName, false);
if (categoryMethod != null) method = categoryMethod;
}
}
//----------------------------------------------------------------------
// field
//----------------------------------------------------------------------
if (method == null && mp != null) {
try {
return mp.getProperty(object);
} catch (IllegalArgumentException e) {
// can't access the field directly but there may be a getter
mp = null;
}
}
//----------------------------------------------------------------------
// generic get method
//----------------------------------------------------------------------
// check for a generic get method provided through a category
if (method == null && !useSuper && !isStatic && GroovyCategorySupport.hasCategoryInCurrentThread()) {
method = getCategoryMethodGetter(sender, "get", true);
if (method != null) arguments = new Object[]{name};
}
// the generic method is valid, if available (!=null), if static or
// if it is not static and we do no static access
if (method == null && genericGetMethod != null && !(!genericGetMethod.isStatic() && isStatic)) {
arguments = new Object[]{name};
method = genericGetMethod;
}
//----------------------------------------------------------------------
// special cases
//----------------------------------------------------------------------
if (method == null) {
/** todo these special cases should be special MetaClasses maybe */
if (theClass != Class.class && object instanceof Class) {
MetaClass mc = registry.getMetaClass(Class.class);
return mc.getProperty(Class.class, object, name, useSuper, false);
} else if (object instanceof Collection) {
return DefaultGroovyMethods.getAt((Collection) object, name);
} else if (object instanceof Object[]) {
return DefaultGroovyMethods.getAt(Arrays.asList((Object[]) object), name);
} else {
MetaMethod addListenerMethod = (MetaMethod) listeners.get(name);
if (addListenerMethod != null) {
//TODO: one day we could try return the previously registered Closure listener for easy removal
return null;
}
}
} else {
//----------------------------------------------------------------------
// executing the getter method
//----------------------------------------------------------------------
return method.doMethodInvoke(object, arguments);
}
//----------------------------------------------------------------------
// error due to missing method/field
//----------------------------------------------------------------------
if (isStatic || object instanceof Class)
return invokeStaticMissingProperty(object, name, null, true);
else
return invokeMissingProperty(object, name, null, true);
}
public MetaProperty getEffectiveGetMetaProperty(final Class sender, final Object object, String name, final boolean useSuper) {
//----------------------------------------------------------------------
// handling of static
//----------------------------------------------------------------------
boolean isStatic = theClass != Class.class && object instanceof Class;
if (isStatic && object != theClass) {
return new MetaProperty(name, Object.class) {
final MetaClass mc = registry.getMetaClass((Class) object);
public Object getProperty(Object object) {
return mc.getProperty(sender, object, name, useSuper,false);
}
public void setProperty(Object object, Object newValue) {
throw new UnsupportedOperationException();
}
};
}
checkInitalised();
//----------------------------------------------------------------------
// turn getProperty on a Map to get on the Map itself
//----------------------------------------------------------------------
if (!isStatic && this.isMap) {
return new MetaProperty(name, Object.class) {
public Object getProperty(Object object) {
return ((Map) object).get(name);
}
public void setProperty(Object object, Object newValue) {
throw new UnsupportedOperationException();
}
};
}
MetaMethod method = null;
//----------------------------------------------------------------------
// getter
//----------------------------------------------------------------------
MetaProperty mp = getMetaProperty(sender, name, useSuper, isStatic);
if (mp != null) {
if (mp instanceof MetaBeanProperty) {
MetaBeanProperty mbp = (MetaBeanProperty) mp;
method = mbp.getGetter();
mp = mbp.getField();
}
}
// check for a category method named like a getter
if (!useSuper && !isStatic && GroovyCategorySupport.hasCategoryInCurrentThread()) {
String getterName = GroovyCategorySupport.getPropertyCategoryGetterName(name);
if (getterName != null) {
MetaMethod categoryMethod = getCategoryMethodGetter(theClass, getterName, false);
if (categoryMethod != null)
method = categoryMethod;
}
}
//----------------------------------------------------------------------
// field
//----------------------------------------------------------------------
if (method != null)
return new GetBeanMethodMetaProperty(name, method);
if (mp != null) {
return mp;
// try {
// return mp.getProperty(object);
// } catch (IllegalArgumentException e) {
// // can't access the field directly but there may be a getter
// mp = null;
// }
}
//----------------------------------------------------------------------
// generic get method
//----------------------------------------------------------------------
// check for a generic get method provided through a category
if (!useSuper && !isStatic && GroovyCategorySupport.hasCategoryInCurrentThread()) {
method = getCategoryMethodGetter(sender, "get", true);
if (method != null)
return new GetMethodMetaProperty(name, method);
}
// the generic method is valid, if available (!=null), if static or
// if it is not static and we do no static access
if (genericGetMethod != null && !(!genericGetMethod.isStatic() && isStatic)) {
method = genericGetMethod;
if (method != null)
return new GetMethodMetaProperty(name, method);
}
//----------------------------------------------------------------------
// special cases
//----------------------------------------------------------------------
/** todo these special cases should be special MetaClasses maybe */
if (theClass != Class.class && object instanceof Class) {
return new MetaProperty(name, Object.class) {
public Object getProperty(Object object) {
MetaClass mc = registry.getMetaClass(Class.class);
return mc.getProperty(Class.class, object, name, useSuper, false);
}
public void setProperty(Object object, Object newValue) {
throw new UnsupportedOperationException();
}
};
} else if (object instanceof Collection) {
return new MetaProperty(name, Object.class) {
public Object getProperty(Object object) {
return DefaultGroovyMethods.getAt((Collection) object, name);
}
public void setProperty(Object object, Object newValue) {
throw new UnsupportedOperationException();
}
};
} else if (object instanceof Object[]) {
return new MetaProperty(name, Object.class) {
public Object getProperty(Object object) {
return DefaultGroovyMethods.getAt(Arrays.asList((Object[]) object), name);
}
public void setProperty(Object object, Object newValue) {
throw new UnsupportedOperationException();
}
};
} else {
MetaMethod addListenerMethod = (MetaMethod) listeners.get(name);
if (addListenerMethod != null) {
//TODO: one day we could try return the previously registered Closure listener for easy removal
return new MetaProperty(name, Object.class) {
public Object getProperty(Object object) {
return null;
}
public void setProperty(Object object, Object newValue) {
throw new UnsupportedOperationException();
}
};
}
}
//----------------------------------------------------------------------
// error due to missing method/field
//----------------------------------------------------------------------
if (isStatic || object instanceof Class)
return new MetaProperty(name, Object.class) {
public Object getProperty(Object object) {
return invokeStaticMissingProperty(object, name, null, true);
}
public void setProperty(Object object, Object newValue) {
throw new UnsupportedOperationException();
}
};
else
return new MetaProperty(name, Object.class) {
public Object getProperty(Object object) {
return invokeMissingProperty(object, name, null, true);
}
public void setProperty(Object object, Object newValue) {
throw new UnsupportedOperationException();
}
};
}
private MetaMethod getCategoryMethodGetter(Class sender, String name, boolean useLongVersion) {
List possibleGenericMethods = GroovyCategorySupport.getCategoryMethods(name);
if (possibleGenericMethods != null) {
for (Iterator iter = possibleGenericMethods.iterator(); iter.hasNext();) {
MetaMethod mmethod = (MetaMethod) iter.next();
if (!mmethod.getDeclaringClass().getTheClass().isAssignableFrom(sender))
continue;
CachedClass[] paramTypes = mmethod.getParameterTypes();
if (useLongVersion) {
if (paramTypes.length == 1 && paramTypes[0].getTheClass() == String.class) {
return mmethod;
}
} else {
if (paramTypes.length == 0) return mmethod;
}
}
}
return null;
}
private MetaMethod getCategoryMethodSetter(Class sender, String name, boolean useLongVersion) {
List possibleGenericMethods = GroovyCategorySupport.getCategoryMethods(name);
if (possibleGenericMethods != null) {
for (Iterator iter = possibleGenericMethods.iterator(); iter.hasNext();) {
MetaMethod mmethod = (MetaMethod) iter.next();
if (!mmethod.getDeclaringClass().getTheClass().isAssignableFrom(sender))
continue;
CachedClass[] paramTypes = mmethod.getParameterTypes();
if (useLongVersion) {
if (paramTypes.length == 2 && paramTypes[0].getTheClass() == String.class) {
return mmethod;
}
} else {
if (paramTypes.length == 1) return mmethod;
}
}
}
return null;
}
/**
* Get all the properties defined for this type
*
* @return a list of MetaProperty objects
*/
public List<MetaProperty> getProperties() {
checkInitalised();
SingleKeyHashMap propertyMap = classPropertyIndex.getNullable(theCachedClass);
if (propertyMap==null) {
// GROOVY-6903: May happen in some special environment, like under Android, due
// to classloading issues
propertyMap = new SingleKeyHashMap();
}
// simply return the values of the metaproperty map as a List
List ret = new ArrayList(propertyMap.size());
for (ComplexKeyHashMap.EntryIterator iter = propertyMap.getEntrySetIterator(); iter.hasNext();) {
MetaProperty element = (MetaProperty) ((SingleKeyHashMap.Entry) iter.next()).value;
if (element instanceof CachedField) continue;
// filter out DGM beans
if (element instanceof MetaBeanProperty) {
MetaBeanProperty mp = (MetaBeanProperty) element;
boolean setter = true;
boolean getter = true;
if (mp.getGetter() == null || mp.getGetter() instanceof GeneratedMetaMethod || mp.getGetter() instanceof NewInstanceMetaMethod) {
getter = false;
}
if (mp.getSetter() == null || mp.getSetter() instanceof GeneratedMetaMethod || mp.getSetter() instanceof NewInstanceMetaMethod) {
setter = false;
}
if (!setter && !getter) continue;
// TODO: I (ait) don't know why these strange tricks needed and comment following as it effects some Grails tests
// if (!setter && mp.getSetter() != null) {
// element = new MetaBeanProperty(mp.getName(), mp.getType(), mp.getGetter(), null);
// }
// if (!getter && mp.getGetter() != null) {
// element = new MetaBeanProperty(mp.getName(), mp.getType(), null, mp.getSetter());
// }
}
ret.add(element);
}
return ret;
}
/**
* return null if nothing valid has been found, a MetaMethod (for getter always the case if not null) or
* a LinkedList<MetaMethod> if there are multiple setter
*/
private Object filterPropertyMethod(Object methodOrList, boolean isGetter, boolean booleanGetter) {
// Method has been optimized to reach a target of 325 bytecode size, making it JIT'able
Object ret = null;
if (methodOrList instanceof MetaMethod) {
MetaMethod element = (MetaMethod)methodOrList;
int parameterCount = element.getParameterTypes().length;
if (!isGetter &&
//(element.getReturnType() == Void.class || element.getReturnType() == Void.TYPE) &&
parameterCount == 1) {
ret = element;
}
Class returnType = element.getReturnType();
if (isGetter &&
!(returnType == Void.class || returnType == Void.TYPE) &&
(!booleanGetter || returnType == Boolean.class || returnType == Boolean.TYPE) &&
parameterCount == 0) {
ret = element;
}
}
if (methodOrList instanceof FastArray) {
FastArray methods = (FastArray) methodOrList;
final int len = methods.size();
final Object[] data = methods.getArray();
for (int i = 0; i != len; ++i) {
MetaMethod element = (MetaMethod) data[i];
int parameterCount = element.getParameterTypes().length;
if (!isGetter &&
//(element.getReturnType() == Void.class || element.getReturnType() == Void.TYPE) &&
parameterCount == 1) {
ret = addElementToList(ret, element);
}
Class returnType = element.getReturnType();
if (isGetter &&
!(returnType == Void.class || returnType == Void.TYPE) &&
parameterCount == 0) {
ret = addElementToList(ret, element);
}
}
}
if (ret == null
|| (ret instanceof MetaMethod)
|| !isGetter) {
return ret;
}
// we found multiple matching methods
// this is a problem, because we can use only one
// if it is a getter, then use the most general return
// type to decide which method to use. If it is a setter
// we use the type of the first parameter
MetaMethod method = null;
int distance = -1;
for (final Object o : ((List) ret)) {
MetaMethod element = (MetaMethod) o;
int localDistance = distanceToObject(element.getReturnType());
//TODO: maybe implement the case localDistance==distance
if (distance == -1 || distance > localDistance) {
distance = localDistance;
method = element;
}
}
return method;
}
private Object addElementToList(Object ret, MetaMethod element) {
if (ret == null)
ret = element;
else if (ret instanceof List)
((List) ret).add(element);
else {
List list = new LinkedList();
list.add(ret);
list.add(element);
ret = list;
}
return ret;
}
private static int distanceToObject(Class c) {
int count;
for (count = 0; c != null; count++) {
c = c.getSuperclass();
}
return count;
}
/**
* This will build up the property map (Map of MetaProperty objects, keyed on
* property name).
*
* @param propertyDescriptors
*/
@SuppressWarnings("unchecked")
private void setupProperties(PropertyDescriptor[] propertyDescriptors) {
if (theCachedClass.isInterface) {
LinkedList<CachedClass> superClasses = new LinkedList<CachedClass>();
superClasses.add(ReflectionCache.OBJECT_CLASS);
Set interfaces = theCachedClass.getInterfaces();
LinkedList<CachedClass> superInterfaces = new LinkedList<CachedClass>(interfaces);
// sort interfaces so that we may ensure a deterministic behaviour in case of
// ambiguous fields (class implementing two interfaces using the same field)
if (superInterfaces.size()>1) {
Collections.sort(superInterfaces, CACHED_CLASS_NAME_COMPARATOR);
}
SingleKeyHashMap iPropertyIndex = classPropertyIndex.getNotNull(theCachedClass);
for (CachedClass iclass : superInterfaces) {
SingleKeyHashMap sPropertyIndex = classPropertyIndex.getNotNull(iclass);
copyNonPrivateFields(sPropertyIndex, iPropertyIndex);
addFields(iclass, iPropertyIndex);
}
addFields(theCachedClass, iPropertyIndex);
applyPropertyDescriptors(propertyDescriptors);
applyStrayPropertyMethods(superClasses, classPropertyIndex, true);
makeStaticPropertyIndex();
} else {
LinkedList<CachedClass> superClasses = getSuperClasses();
LinkedList<CachedClass> interfaces = new LinkedList<CachedClass>(theCachedClass.getInterfaces());
// sort interfaces so that we may ensure a deterministic behaviour in case of
// ambiguous fields (class implementing two interfaces using the same field)
if (interfaces.size()>1) {
Collections.sort(interfaces, CACHED_CLASS_NAME_COMPARATOR);
}
// if this an Array, then add the special read-only "length" property
if (theCachedClass.isArray) {
SingleKeyHashMap map = new SingleKeyHashMap();
map.put("length", arrayLengthProperty);
classPropertyIndex.put(theCachedClass, map);
}
inheritStaticInterfaceFields(superClasses, new LinkedHashSet(interfaces));
inheritFields(superClasses);
applyPropertyDescriptors(propertyDescriptors);
applyStrayPropertyMethods(superClasses, classPropertyIndex, true);
applyStrayPropertyMethods(superClasses, classPropertyIndexForSuper, false);
copyClassPropertyIndexForSuper(classPropertyIndexForSuper);
makeStaticPropertyIndex();
}
}
private void makeStaticPropertyIndex() {
SingleKeyHashMap propertyMap = classPropertyIndex.getNotNull(theCachedClass);
for (ComplexKeyHashMap.EntryIterator iter = propertyMap.getEntrySetIterator(); iter.hasNext();) {
SingleKeyHashMap.Entry entry = ((SingleKeyHashMap.Entry) iter.next());
MetaProperty mp = (MetaProperty) entry.getValue();
if (mp instanceof CachedField) {
CachedField mfp = (CachedField) mp;
if (!mfp.isStatic()) continue;
} else if (mp instanceof MetaBeanProperty) {
MetaProperty result = establishStaticMetaProperty(mp);
if (result == null) continue;
else {
mp = result;
}
} else if (mp instanceof MultipleSetterProperty) {
MultipleSetterProperty msp = (MultipleSetterProperty) mp;
mp = msp.createStaticVersion();
} else {
continue; // ignore all other types
}
staticPropertyIndex.put(entry.getKey(), mp);
}
}
private MetaProperty establishStaticMetaProperty(MetaProperty mp) {
MetaBeanProperty mbp = (MetaBeanProperty) mp;
MetaProperty result = null;
final MetaMethod getterMethod = mbp.getGetter();
final MetaMethod setterMethod = mbp.getSetter();
final CachedField metaField = mbp.getField();
boolean getter = getterMethod == null || getterMethod.isStatic();
boolean setter = setterMethod == null || setterMethod.isStatic();
boolean field = metaField == null || metaField.isStatic();
if (!getter && !setter && !field) {
return result;
} else {
final String propertyName = mbp.getName();
final Class propertyType = mbp.getType();
if (setter && getter) {
if (field) {
result = mbp; // nothing to do
} else {
result = new MetaBeanProperty(propertyName, propertyType, getterMethod, setterMethod);
}
} else if (getter && !setter) {
if (getterMethod == null) {
result = metaField;
} else {
MetaBeanProperty newmp = new MetaBeanProperty(propertyName, propertyType, getterMethod, null);
if (field) newmp.setField(metaField);
result = newmp;
}
} else if (setter && !getter) {
if (setterMethod == null) {
result = metaField;
} else {
MetaBeanProperty newmp = new MetaBeanProperty(propertyName, propertyType, null, setterMethod);
if (field) newmp.setField(metaField);
result = newmp;
}
} else
result = metaField;
}
return result;
}
private void copyClassPropertyIndexForSuper(Index dest) {
for (ComplexKeyHashMap.EntryIterator iter = classPropertyIndex.getEntrySetIterator(); iter.hasNext();) {
SingleKeyHashMap.Entry entry = (SingleKeyHashMap.Entry) iter.next();
SingleKeyHashMap newVal = new SingleKeyHashMap();
dest.put((CachedClass) entry.getKey(), newVal);
}
}
private void inheritStaticInterfaceFields(LinkedList superClasses, Set interfaces) {
for (Iterator interfaceIter = interfaces.iterator(); interfaceIter.hasNext();) {
CachedClass iclass = (CachedClass) interfaceIter.next();
SingleKeyHashMap iPropertyIndex = classPropertyIndex.getNotNull(iclass);
addFields(iclass, iPropertyIndex);
for (Iterator classIter = superClasses.iterator(); classIter.hasNext();) {
CachedClass sclass = (CachedClass) classIter.next();
if (!iclass.getTheClass().isAssignableFrom(sclass.getTheClass())) continue;
SingleKeyHashMap sPropertyIndex = classPropertyIndex.getNotNull(sclass);
copyNonPrivateFields(iPropertyIndex, sPropertyIndex);
}
}
}
private void inheritFields(LinkedList<CachedClass> superClasses) {
SingleKeyHashMap last = null;
for (CachedClass klass : superClasses) {
SingleKeyHashMap propertyIndex = classPropertyIndex.getNotNull(klass);
if (last != null) {
copyNonPrivateFields(last, propertyIndex);
}
last = propertyIndex;
addFields(klass, propertyIndex);
}
}
private void addFields(final CachedClass klass, SingleKeyHashMap propertyIndex) {
CachedField[] fields = klass.getFields();
for (CachedField field : fields) {
propertyIndex.put(field.getName(), field);
}
}
private void copyNonPrivateFields(SingleKeyHashMap from, SingleKeyHashMap to) {
for (ComplexKeyHashMap.EntryIterator iter = from.getEntrySetIterator(); iter.hasNext();) {
SingleKeyHashMap.Entry entry = (SingleKeyHashMap.Entry) iter.next();
CachedField mfp = (CachedField) entry.getValue();
if (!Modifier.isPublic(mfp.getModifiers()) && !Modifier.isProtected(mfp.getModifiers())) continue;
to.put(entry.getKey(), mfp);
}
}
private void applyStrayPropertyMethods(LinkedList<CachedClass> superClasses, Index classPropertyIndex, boolean isThis) {
// now look for any stray getters that may be used to define a property
for (CachedClass klass : superClasses) {
MetaMethodIndex.Header header = metaMethodIndex.getHeader(klass.getTheClass());
SingleKeyHashMap propertyIndex = classPropertyIndex.getNotNull(klass);
for (MetaMethodIndex.Entry e = header.head; e != null; e = e.nextClassEntry) {
String methodName = e.name;
// name too short?
if (methodName.length() < 3 ||
(!methodName.startsWith("is") && methodName.length() < 4)) continue;
// possible getter/setter?
boolean isGetter = methodName.startsWith("get") || methodName.startsWith("is");
boolean isBooleanGetter = methodName.startsWith("is");
boolean isSetter = methodName.startsWith("set");
if (!isGetter && !isSetter) continue;
Object propertyMethods = filterPropertyMethod(isThis ? e.methods : e.methodsForSuper, isGetter, isBooleanGetter);
if (propertyMethods == null) continue;
String propName = getPropName(methodName);
if (propertyMethods instanceof MetaMethod) {
createMetaBeanProperty(propertyIndex, propName, isGetter, (MetaMethod) propertyMethods);
} else {
LinkedList<MetaMethod> methods = (LinkedList<MetaMethod>) propertyMethods;
for (MetaMethod m: methods) {
createMetaBeanProperty(propertyIndex, propName, isGetter, m);
}
}
}
}
}
private static final ConcurrentMap<String, String> PROP_NAMES = new ConcurrentHashMap<String, String>(1024);
private String getPropName(String methodName) {
String name = PROP_NAMES.get(methodName);
if (name == null) {
// assume "is" or "[gs]et"
String stripped = methodName.startsWith("is") ? methodName.substring(2) : methodName.substring(3);
String propName = java.beans.Introspector.decapitalize(stripped);
PROP_NAMES.putIfAbsent(methodName, propName);
name = PROP_NAMES.get(methodName);
}
return name;
}
private MetaProperty makeReplacementMetaProperty(MetaProperty mp, String propName, boolean isGetter, MetaMethod propertyMethod) {
if (mp == null) {
if (isGetter) {
return new MetaBeanProperty(propName,
propertyMethod.getReturnType(),
propertyMethod, null);
} else {
//isSetter
return new MetaBeanProperty(propName,
propertyMethod.getParameterTypes()[0].getTheClass(),
null, propertyMethod);
}
}
if (mp instanceof CachedField) {
CachedField mfp = (CachedField) mp;
MetaBeanProperty mbp = new MetaBeanProperty(propName, mfp.getType(),
isGetter? propertyMethod: null,
isGetter? null: propertyMethod);
mbp.setField(mfp);
return mbp;
} else if (mp instanceof MultipleSetterProperty) {
MultipleSetterProperty msp = (MultipleSetterProperty) mp;
if (isGetter) {
msp.setGetter(propertyMethod);
}
return msp;
} else if (mp instanceof MetaBeanProperty) {
MetaBeanProperty mbp = (MetaBeanProperty) mp;
if (isGetter) {
mbp.setGetter(propertyMethod);
return mbp;
} else if (mbp.getSetter()==null || mbp.getSetter()==propertyMethod) {
mbp.setSetter(propertyMethod);
return mbp;
} else {
MultipleSetterProperty msp = new MultipleSetterProperty(propName);
msp.setField(mbp.getField());
msp.setGetter(mbp.getGetter());
return msp;
}
} else {
throw new GroovyBugError("unknown MetaProperty class used. Class is " + mp.getClass());
}
}
private void createMetaBeanProperty(SingleKeyHashMap propertyIndex, String propName, boolean isGetter, MetaMethod propertyMethod) {
// is this property already accounted for?
MetaProperty mp = (MetaProperty) propertyIndex.get(propName);
MetaProperty newMp = makeReplacementMetaProperty(mp, propName, isGetter, propertyMethod);
if (newMp!=mp) {
propertyIndex.put(propName, newMp);
}
}
protected void applyPropertyDescriptors(PropertyDescriptor[] propertyDescriptors) {
// now iterate over the map of property descriptors and generate
// MetaBeanProperty objects
for (PropertyDescriptor pd : propertyDescriptors) {
// skip if the property type is unknown (this seems to be the case if the
// property descriptor is based on a setX() method that has two parameters,
// which is not a valid property)
if (pd.getPropertyType() == null)
continue;
// get the getter method
Method method = pd.getReadMethod();
MetaMethod getter;
if (method != null) {
CachedMethod cachedGetter = CachedMethod.find(method);
getter = cachedGetter == null ? null : findMethod(cachedGetter);
} else {
getter = null;
}
// get the setter method
MetaMethod setter;
method = pd.getWriteMethod();
if (method != null) {
CachedMethod cachedSetter = CachedMethod.find(method);
setter = cachedSetter == null ? null : findMethod(cachedSetter);
} else {
setter = null;
}
// now create the MetaProperty object
MetaBeanProperty mp = new MetaBeanProperty(pd.getName(), pd.getPropertyType(), getter, setter);
addMetaBeanProperty(mp);
}
}
/**
* Adds a new MetaBeanProperty to this MetaClass
*
* @param mp The MetaBeanProperty
*/
public void addMetaBeanProperty(MetaBeanProperty mp) {
MetaProperty staticProperty = establishStaticMetaProperty(mp);
if (staticProperty != null) {
staticPropertyIndex.put(mp.getName(), mp);
} else {
SingleKeyHashMap propertyMap = classPropertyIndex.getNotNull(theCachedClass);
//keep field
CachedField field;
MetaProperty old = (MetaProperty) propertyMap.get(mp.getName());
if (old != null) {
if (old instanceof MetaBeanProperty) {
field = ((MetaBeanProperty) old).getField();
} else if (old instanceof MultipleSetterProperty) {
field = ((MultipleSetterProperty)old).getField();
} else {
field = (CachedField) old;
}
mp.setField(field);
}
// put it in the list
// this will overwrite a possible field property
propertyMap.put(mp.getName(), mp);
}
}
/**
* <p>Retrieves a property on the given receiver for the specified arguments. The sender is the class that is requesting the property from the object.
* The MetaClass will attempt to establish the method to invoke based on the name and arguments provided.
*
* <p>The useSuper and fromInsideClass help the Groovy runtime perform optimisations on the call to go directly
* to the super class if necessary
*
* @param sender The java.lang.Class instance that is mutating the property
* @param object The Object which the property is being set on
* @param name The name of the property
* @param newValue The new value of the property to set
* @param useSuper Whether the call is to a super class property
* @param fromInsideClass Whether the call was invoked from the inside or the outside of the class.
*/
public void setProperty(Class sender, Object object, String name, Object newValue, boolean useSuper, boolean fromInsideClass) {
checkInitalised();
//----------------------------------------------------------------------
// handling of static
//----------------------------------------------------------------------
boolean isStatic = theClass != Class.class && object instanceof Class;
if (isStatic && object != theClass) {
MetaClass mc = registry.getMetaClass((Class) object);
mc.getProperty(sender, object, name, useSuper, fromInsideClass);
return;
}
//----------------------------------------------------------------------
// Unwrap wrapped values fo now - the new MOP will handle them properly
//----------------------------------------------------------------------
if (newValue instanceof Wrapper) newValue = ((Wrapper) newValue).unwrap();
MetaMethod method = null;
Object[] arguments = null;
//----------------------------------------------------------------------
// setter
//----------------------------------------------------------------------
MetaProperty mp = getMetaProperty(sender, name, useSuper, isStatic);
MetaProperty field = null;
if (mp != null) {
if (mp instanceof MetaBeanProperty) {
MetaBeanProperty mbp = (MetaBeanProperty) mp;
method = mbp.getSetter();
MetaProperty f = mbp.getField();
if (method != null || (f != null && !Modifier.isFinal(f.getModifiers()))) {
arguments = new Object[]{newValue};
field = f;
}
} else {
field = mp;
}
}
// check for a category method named like a setter
if (!useSuper && !isStatic && GroovyCategorySupport.hasCategoryInCurrentThread()
&& name.length() > 0) {
String getterName = GroovyCategorySupport.getPropertyCategorySetterName(name);
if (getterName != null) {
MetaMethod categoryMethod = getCategoryMethodSetter(sender, getterName, false);
if (categoryMethod != null) {
method = categoryMethod;
arguments = new Object[]{newValue};
}
}
}
//----------------------------------------------------------------------
// listener method
//----------------------------------------------------------------------
boolean ambiguousListener = false;
if (method == null) {
method = (MetaMethod) listeners.get(name);
ambiguousListener = method == AMBIGUOUS_LISTENER_METHOD;
if (method != null &&
!ambiguousListener &&
newValue instanceof Closure) {
// let's create a dynamic proxy
Object proxy = Proxy.newProxyInstance(
theClass.getClassLoader(),
new Class[]{method.getParameterTypes()[0].getTheClass()},
new ConvertedClosure((Closure) newValue, name));
arguments = new Object[]{proxy};
newValue = proxy;
} else {
method = null;
}
}
//----------------------------------------------------------------------
// field
//----------------------------------------------------------------------
if (method == null && field != null) {
if (Modifier.isFinal(field.getModifiers())) {
throw new ReadOnlyPropertyException(name, theClass);
}
if(!(this.isMap && isPrivateOrPkgPrivate(field.getModifiers()))) {
field.setProperty(object, newValue);
return;
}
}
//----------------------------------------------------------------------
// generic set method
//----------------------------------------------------------------------
// check for a generic get method provided through a category
if (method == null && !useSuper && !isStatic && GroovyCategorySupport.hasCategoryInCurrentThread()) {
method = getCategoryMethodSetter(sender, "set", true);
if (method != null) arguments = new Object[]{name, newValue};
}
// the generic method is valid, if available (!=null), if static or
// if it is not static and we do no static access
if (method == null && genericSetMethod != null && !(!genericSetMethod.isStatic() && isStatic)) {
arguments = new Object[]{name, newValue};
method = genericSetMethod;
}
//----------------------------------------------------------------------
// executing the getter method
//----------------------------------------------------------------------
if (method != null) {
if (arguments.length == 1) {
newValue = DefaultTypeTransformation.castToType(
newValue,
method.getParameterTypes()[0].getTheClass());
arguments[0] = newValue;
} else {
newValue = DefaultTypeTransformation.castToType(
newValue,
method.getParameterTypes()[1].getTheClass());
arguments[1] = newValue;
}
method.doMethodInvoke(object, arguments);
return;
}
//----------------------------------------------------------------------
// turn setProperty on a Map to put on the Map itself
//----------------------------------------------------------------------
if (!isStatic && this.isMap) {
((Map) object).put(name, newValue);
return;
}
//----------------------------------------------------------------------
// error due to missing method/field
//----------------------------------------------------------------------
if (ambiguousListener) {
throw new GroovyRuntimeException("There are multiple listeners for the property " + name + ". Please do not use the bean short form to access this listener.");
}
if (mp != null) {
throw new ReadOnlyPropertyException(name, theClass);
}
invokeMissingProperty(object, name, newValue, false);
}
private boolean isPrivateOrPkgPrivate(int mod) {
return !Modifier.isProtected(mod) && !Modifier.isPublic(mod);
}
private MetaProperty getMetaProperty(Class _clazz, String name, boolean useSuper, boolean useStatic) {
if (_clazz == theClass)
return getMetaProperty(name, useStatic);
CachedClass clazz = ReflectionCache.getCachedClass(_clazz);
while (true) {
SingleKeyHashMap propertyMap;
if (useStatic) {
propertyMap = staticPropertyIndex;
} else if (useSuper) {
propertyMap = classPropertyIndexForSuper.getNullable(clazz);
} else {
propertyMap = classPropertyIndex.getNullable(clazz);
}
if (propertyMap == null) {
if (clazz != theCachedClass) {
clazz = theCachedClass;
continue;
} else {
return null;
}
}
return (MetaProperty) propertyMap.get(name);
}
}
private MetaProperty getMetaProperty(String name, boolean useStatic) {
CachedClass clazz = theCachedClass;
SingleKeyHashMap propertyMap;
if (useStatic) {
propertyMap = staticPropertyIndex;
} else {
propertyMap = classPropertyIndex.getNullable(clazz);
}
if (propertyMap == null) {
return null;
}
return (MetaProperty) propertyMap.get(name);
}
/**
* Retrieves the value of an attribute (field). This method is to support the Groovy runtime and not for general client API usage.
*
* @param sender The class of the object that requested the attribute
* @param receiver The instance
* @param messageName The name of the attribute
* @param useSuper Whether to look-up on the super class or not
* @return The attribute value
*/
public Object getAttribute(Class sender, Object receiver, String messageName, boolean useSuper) {
return getAttribute(receiver, messageName);
}
/**
* Retrieves the value of an attribute (field). This method is to support the Groovy runtime and not for general client API usage.
*
* @param sender The class of the object that requested the attribute
* @param object The instance the attribute is to retrived from
* @param attribute The name of the attribute
* @param useSuper Whether to look-up on the super class or not
* @param fromInsideClass Whether the call was invoked from the inside or the outside of the class.
*
* @return The attribute value
*/
public Object getAttribute(Class sender, Object object, String attribute, boolean useSuper, boolean fromInsideClass) {
checkInitalised();
boolean isStatic = theClass != Class.class && object instanceof Class;
if (isStatic && object != theClass) {
MetaClass mc = registry.getMetaClass((Class) object);
return mc.getAttribute(sender, object, attribute, useSuper);
}
MetaProperty mp = getMetaProperty(sender, attribute, useSuper, isStatic);
if (mp != null) {
if (mp instanceof MetaBeanProperty) {
MetaBeanProperty mbp = (MetaBeanProperty) mp;
mp = mbp.getField();
}
try {
// delegate the get operation to the metaproperty
if (mp != null) return mp.getProperty(object);
} catch (Exception e) {
throw new GroovyRuntimeException("Cannot read field: " + attribute, e);
}
}
throw new MissingFieldException(attribute, theClass);
}
/**
* <p>Sets an attribute on the given receiver for the specified arguments. The sender is the class that is setting the attribute from the object.
* The MetaClass will attempt to establish the method to invoke based on the name and arguments provided.
*
* <p>The isCallToSuper and fromInsideClass help the Groovy runtime perform optimisations on the call to go directly
* to the super class if necessary
*
* @param sender The java.lang.Class instance that is mutating the property
* @param object The Object which the property is being set on
* @param attribute The name of the attribute,
* @param newValue The new value of the attribute to set
* @param useSuper Whether the call is to a super class property
* @param fromInsideClass Whether the call was invoked from the inside or the outside of the class
*/
public void setAttribute(Class sender, Object object, String attribute, Object newValue, boolean useSuper, boolean fromInsideClass) {
checkInitalised();
boolean isStatic = theClass != Class.class && object instanceof Class;
if (isStatic && object != theClass) {
MetaClass mc = registry.getMetaClass((Class) object);
mc.setAttribute(sender, object, attribute, newValue, useSuper, fromInsideClass);
return;
}
MetaProperty mp = getMetaProperty(sender, attribute, useSuper, isStatic);
if (mp != null) {
if (mp instanceof MetaBeanProperty) {
MetaBeanProperty mbp = (MetaBeanProperty) mp;
mp = mbp.getField();
}
if (mp != null) {
mp.setProperty(object, newValue);
return;
}
}
throw new MissingFieldException(attribute, theClass);
}
/**
* Obtains a reference to the original AST for the MetaClass if it is available at runtime
*
* @return The original AST or null if it cannot be returned
*/
public ClassNode getClassNode() {
if (classNode == null && GroovyObject.class.isAssignableFrom(theClass)) {
// let's try load it from the classpath
String groovyFile = theClass.getName();
int idx = groovyFile.indexOf('$');
if (idx > 0) {
groovyFile = groovyFile.substring(0, idx);
}
groovyFile = groovyFile.replace('.', '/') + ".groovy";
//System.out.println("Attempting to load: " + groovyFile);
URL url = theClass.getClassLoader().getResource(groovyFile);
if (url == null) {
url = Thread.currentThread().getContextClassLoader().getResource(groovyFile);
}
if (url != null) {
try {
/**
* todo there is no CompileUnit in scope so class name
* checking won't work but that mostly affects the bytecode
* generation rather than viewing the AST
*/
CompilationUnit.ClassgenCallback search = new CompilationUnit.ClassgenCallback() {
public void call(ClassVisitor writer, ClassNode node) {
if (node.getName().equals(theClass.getName())) {
MetaClassImpl.this.classNode = node;
}
}
};
final ClassLoader parent = theClass.getClassLoader();
CompilationUnit unit = new CompilationUnit();
unit.setClassgenCallback(search);
unit.addSource(url);
unit.compile(Phases.CLASS_GENERATION);
}
catch (Exception e) {
throw new GroovyRuntimeException("Exception thrown parsing: " + groovyFile + ". Reason: " + e, e);
}
}
}
return classNode;
}
/**
* Returns a string representation of this metaclass
*/
public String toString() {
return super.toString() + "[" + theClass + "]";
}
// Implementation methods
//-------------------------------------------------------------------------
/**
* adds a MetaMethod to this class. WARNING: this method will not
* do the neccessary steps for multimethod logic and using this
* method doesn't mean, that a method added here is replacing another
* method from a parent class completely. These steps are usually done
* by initialize, which means if you need these steps, you have to add
* the method before running initialize the first time.
*
* @param method the MetaMethod
* @see #initialize()
*/
public void addMetaMethod(MetaMethod method) {
if (isInitialized()) {
throw new RuntimeException("Already initialized, cannot add new method: " + method);
}
final CachedClass declaringClass = method.getDeclaringClass();
addMetaMethodToIndex(method, metaMethodIndex.getHeader(declaringClass.getTheClass()));
}
protected void addMetaMethodToIndex(MetaMethod method, MetaMethodIndex.Header header) {
checkIfStdMethod(method);
String name = method.getName();
MetaMethodIndex.Entry e = metaMethodIndex.getOrPutMethods(name, header);
if (method.isStatic()) {
e.staticMethods = metaMethodIndex.addMethodToList(e.staticMethods, method);
}
e.methods = metaMethodIndex.addMethodToList(e.methods, method);
}
/**
* Checks if the metaMethod is a method from the GroovyObject interface such as setProperty, getProperty and invokeMethod
*
* @param metaMethod The metaMethod instance
* @see GroovyObject
*/
protected final void checkIfGroovyObjectMethod(MetaMethod metaMethod) {
if (metaMethod instanceof ClosureMetaMethod || metaMethod instanceof MixinInstanceMetaMethod) {
if(isGetPropertyMethod(metaMethod)) {
getPropertyMethod = metaMethod;
}
else if(isInvokeMethod(metaMethod)) {
invokeMethodMethod = metaMethod;
}
else if(isSetPropertyMethod(metaMethod)) {
setPropertyMethod = metaMethod;
}
}
}
private boolean isSetPropertyMethod(MetaMethod metaMethod) {
return SET_PROPERTY_METHOD.equals(metaMethod.getName()) && metaMethod.getParameterTypes().length == 2;
}
private boolean isGetPropertyMethod(MetaMethod metaMethod) {
return GET_PROPERTY_METHOD.equals(metaMethod.getName());
}
private boolean isInvokeMethod(MetaMethod metaMethod) {
return INVOKE_METHOD_METHOD.equals(metaMethod.getName()) && metaMethod.getParameterTypes().length == 2;
}
private void checkIfStdMethod(MetaMethod method) {
checkIfGroovyObjectMethod(method);
if (isGenericGetMethod(method) && genericGetMethod == null) {
genericGetMethod = method;
} else if (MetaClassHelper.isGenericSetMethod(method) && genericSetMethod == null) {
genericSetMethod = method;
}
if (method.getName().equals(PROPERTY_MISSING)) {
CachedClass[] parameterTypes = method.getParameterTypes();
if (parameterTypes.length == 1) {
propertyMissingGet = method;
}
}
if (propertyMissingSet == null && method.getName().equals(PROPERTY_MISSING)) {
CachedClass[] parameterTypes = method.getParameterTypes();
if (parameterTypes.length == 2) {
propertyMissingSet = method;
}
}
if (method.getName().equals(METHOD_MISSING)) {
CachedClass[] parameterTypes = method.getParameterTypes();
if (parameterTypes.length == 2
&& parameterTypes[0].getTheClass() == String.class
&& parameterTypes[1].getTheClass() == Object.class) {
methodMissing = method;
}
}
if (theCachedClass.isNumber) {
NumberMathModificationInfo.instance.checkIfStdMethod (method);
}
}
protected boolean isInitialized() {
return initialized;
}
/**
* return false: add method
* null: ignore method
* true: replace
*/
private Boolean getMatchKindForCategory(MetaMethod aMethod, MetaMethod categoryMethod) {
CachedClass[] params1 = aMethod.getParameterTypes();
CachedClass[] params2 = categoryMethod.getParameterTypes();
if (params1.length != params2.length) return Boolean.FALSE;
for (int i = 0; i < params1.length; i++) {
if (params1[i] != params2[i]) return Boolean.FALSE;
}
Class aMethodClass = aMethod.getDeclaringClass().getTheClass();
Class categoryMethodClass = categoryMethod.getDeclaringClass().getTheClass();
if (aMethodClass==categoryMethodClass) return Boolean.TRUE;
boolean match = aMethodClass.isAssignableFrom(categoryMethodClass);
if (match) return Boolean.TRUE;
return null;
}
private void filterMatchingMethodForCategory(FastArray list, MetaMethod method) {
int len = list.size();
if (len==0) {
list.add(method);
return;
}
Object data[] = list.getArray();
for (int j = 0; j != len; ++j) {
MetaMethod aMethod = (MetaMethod) data[j];
Boolean match = getMatchKindForCategory(aMethod, method);
// true == replace
if (match==Boolean.TRUE) {
list.set(j, method);
return;
// null == ignore (we have a better method already)
} else if (match==null) {
return;
}
}
// the casese true and null for a match are through, the
// remaining case is false and that means adding the method
// to our list
list.add(method);
}
private int findMatchingMethod(CachedMethod[] data, int from, int to, MetaMethod method) {
for (int j = from; j <= to; ++j) {
CachedMethod aMethod = data[j];
CachedClass[] params1 = aMethod.getParameterTypes();
CachedClass[] params2 = method.getParameterTypes();
if (params1.length == params2.length) {
boolean matches = true;
for (int i = 0; i < params1.length; i++) {
if (params1[i] != params2[i]) {
matches = false;
break;
}
}
if (matches) {
return j;
}
}
}
return -1;
}
/**
* @return the matching method which should be found
*/
private MetaMethod findMethod(CachedMethod aMethod) {
Object methods = getMethods(theClass, aMethod.getName(), false);
if (methods instanceof FastArray) {
FastArray m = (FastArray) methods;
final int len = m.size;
final Object data[] = m.getArray();
for (int i = 0; i != len; ++i) {
MetaMethod method = (MetaMethod) data[i];
if (method.isMethod(aMethod)) {
return method;
}
}
}
else {
MetaMethod method = (MetaMethod) methods;
if (method.getName().equals(aMethod.getName())
// TODO: should be better check for case when only diff in modifiers can be SYNTHETIC flag
// && method.getModifiers() == aMethod.getModifiers()
&& method.getReturnType().equals(aMethod.getReturnType())
&& MetaMethod.equal(method.getParameterTypes(), aMethod.getParameterTypes())) {
return method;
}
}
return aMethod;
}
/**
* Chooses the correct method to use from a list of methods which match by
* name.
*
* @param methodOrList the possible methods to choose from
* @param arguments
*/
protected Object chooseMethod(String methodName, Object methodOrList, Class[] arguments) {
Object method = chooseMethodInternal(methodName, methodOrList, arguments);
if (method instanceof GeneratedMetaMethod.Proxy)
return ((GeneratedMetaMethod.Proxy)method).proxy ();
return method;
}
private Object chooseMethodInternal(String methodName, Object methodOrList, Class[] arguments) {
if (methodOrList instanceof MetaMethod) {
if (((ParameterTypes) methodOrList).isValidMethod(arguments)) {
return methodOrList;
}
return null;
}
FastArray methods = (FastArray) methodOrList;
if (methods==null) return null;
int methodCount = methods.size();
if (methodCount <= 0) {
return null;
} else if (methodCount == 1) {
Object method = methods.get(0);
if (((ParameterTypes) method).isValidMethod(arguments)) {
return method;
}
return null;
}
Object answer;
if (arguments == null || arguments.length == 0) {
answer = MetaClassHelper.chooseEmptyMethodParams(methods);
} else {
Object matchingMethods = null;
final int len = methods.size;
Object data[] = methods.getArray();
for (int i = 0; i != len; ++i) {
Object method = data[i];
// making this false helps find matches
if (((ParameterTypes) method).isValidMethod(arguments)) {
if (matchingMethods == null)
matchingMethods = method;
else
if (matchingMethods instanceof ArrayList)
((ArrayList)matchingMethods).add(method);
else {
List arr = new ArrayList(4);
arr.add(matchingMethods);
arr.add(method);
matchingMethods = arr;
}
}
}
if (matchingMethods == null) {
return null;
} else if (!(matchingMethods instanceof ArrayList)) {
return matchingMethods;
}
return chooseMostSpecificParams(methodName, (List) matchingMethods, arguments);
}
if (answer != null) {
return answer;
}
throw new MethodSelectionException(methodName, methods, arguments);
}
private Object chooseMostSpecificParams(String name, List matchingMethods, Class[] arguments) {
long matchesDistance = -1;
LinkedList matches = new LinkedList();
for (Object method : matchingMethods) {
ParameterTypes paramTypes = (ParameterTypes) method;
long dist = MetaClassHelper.calculateParameterDistance(arguments, paramTypes);
if (dist == 0) return method;
if (matches.isEmpty()) {
matches.add(method);
matchesDistance = dist;
} else if (dist < matchesDistance) {
matchesDistance = dist;
matches.clear();
matches.add(method);
} else if (dist == matchesDistance) {
matches.add(method);
}
}
if (matches.size() == 1) {
return matches.getFirst();
}
if (matches.isEmpty()) {
return null;
}
//more than one matching method found --> ambiguous!
StringBuilder msg = new StringBuilder("Ambiguous method overloading for method ");
msg.append(theClass.getName()).append("#").append(name)
.append(".\nCannot resolve which method to invoke for ")
.append(InvokerHelper.toString(arguments))
.append(" due to overlapping prototypes between:");
for (final Object matche : matches) {
Class[] types = ((ParameterTypes) matche).getNativeParameterTypes();
msg.append("\n\t").append(InvokerHelper.toString(types));
}
throw new GroovyRuntimeException(msg.toString());
}
private boolean isGenericGetMethod(MetaMethod method) {
if (method.getName().equals("get")) {
CachedClass[] parameterTypes = method.getParameterTypes();
return parameterTypes.length == 1 && parameterTypes[0].getTheClass() == String.class;
}
return false;
}
/**
* Complete the initialisation process. After this method
* is called no methods should be added to the meta class.
* Invocation of methods or access to fields/properties is
* forbidden unless this method is called. This method
* should contain any initialisation code, taking a longer
* time to complete. An example is the creation of the
* Reflector. It is suggested to synchronize this
* method.
*/
public synchronized void initialize() {
if (!isInitialized()) {
fillMethodIndex();
try {
addProperties();
} catch (Throwable e) {
if (!AndroidSupport.isRunningAndroid()) {
ExceptionUtils.sneakyThrow(e);
}
// Introspection failure...
// May happen in Android
}
initialized = true;
}
}
private void addProperties() {
BeanInfo info;
final Class stopClass;
// introspect
try {
if (isBeanDerivative(theClass)) {
info = (BeanInfo) AccessController.doPrivileged(new PrivilegedExceptionAction() {
public Object run() throws IntrospectionException {
return Introspector.getBeanInfo(theClass, Introspector.IGNORE_ALL_BEANINFO);
}
});
} else {
info = (BeanInfo) AccessController.doPrivileged(new PrivilegedExceptionAction() {
public Object run() throws IntrospectionException {
return Introspector.getBeanInfo(theClass);
}
});
}
} catch (PrivilegedActionException pae) {
throw new GroovyRuntimeException("exception during bean introspection", pae.getException());
}
PropertyDescriptor[] descriptors = info.getPropertyDescriptors();
// build up the metaproperties based on the public fields, property descriptors,
// and the getters and setters
setupProperties(descriptors);
EventSetDescriptor[] eventDescriptors = info.getEventSetDescriptors();
for (EventSetDescriptor descriptor : eventDescriptors) {
Method[] listenerMethods = descriptor.getListenerMethods();
for (Method listenerMethod : listenerMethods) {
final MetaMethod metaMethod = CachedMethod.find(descriptor.getAddListenerMethod());
// GROOVY-5202
// there might be a non public listener of some kind
// we skip that here
if (metaMethod==null) continue;
addToAllMethodsIfPublic(metaMethod);
String name = listenerMethod.getName();
if (listeners.containsKey(name)) {
listeners.put(name, AMBIGUOUS_LISTENER_METHOD);
} else {
listeners.put(name, metaMethod);
}
}
}
}
private boolean isBeanDerivative(Class theClass) {
Class next = theClass;
while (next != null) {
if (Arrays.asList(next.getInterfaces()).contains(BeanInfo.class)) return true;
next = next.getSuperclass();
}
return false;
}
private void addToAllMethodsIfPublic(MetaMethod metaMethod) {
if (Modifier.isPublic(metaMethod.getModifiers()))
allMethods.add(metaMethod);
}
/**
* Retrieves the list of MetaMethods held by the class. This list does not include MetaMethods added by groovy.lang.ExpandoMetaClass.
*
* @return A list of MetaMethods
*/
public List<MetaMethod> getMethods() {
return allMethods;
}
/**
* Retrieves the list of MetaMethods held by this class. This list includes MetaMethods added by groovy.lang.ExpandoMetaClass.
*
* @return A list of MetaMethods
*/
public List<MetaMethod> getMetaMethods() {
return new ArrayList<MetaMethod>(newGroovyMethodsSet);
}
protected void dropStaticMethodCache(String name) {
metaMethodIndex.clearCaches(name);
}
protected void dropMethodCache(String name) {
metaMethodIndex.clearCaches(name);
}
/**
* Create a CallSite
*/
public CallSite createPojoCallSite(CallSite site, Object receiver, Object[] args) {
if (!(this instanceof AdaptingMetaClass)) {
Class [] params = MetaClassHelper.convertToTypeArray(args);
MetaMethod metaMethod = getMethodWithCachingInternal(getTheClass(), site, params);
if (metaMethod != null)
return PojoMetaMethodSite.createPojoMetaMethodSite(site, this, metaMethod, params, receiver, args);
}
return new PojoMetaClassSite(site, this);
}
/**
* Create a CallSite
*/
public CallSite createStaticSite(CallSite site, Object[] args) {
if (!(this instanceof AdaptingMetaClass)) {
Class [] params = MetaClassHelper.convertToTypeArray(args);
MetaMethod metaMethod = retrieveStaticMethod(site.getName(), args);
if (metaMethod != null)
return StaticMetaMethodSite.createStaticMetaMethodSite(site, this, metaMethod, params, args);
}
return new StaticMetaClassSite(site, this);
}
/**
* Create a CallSite
*/
public CallSite createPogoCallSite(CallSite site, Object[] args) {
if (!GroovyCategorySupport.hasCategoryInCurrentThread() && !(this instanceof AdaptingMetaClass)) {
Class [] params = MetaClassHelper.convertToTypeArray(args);
CallSite tempSite = site;
if (site.getName().equals("call") && GeneratedClosure.class.isAssignableFrom(theClass)) {
// here, we want to point to a method named "doCall" instead of "call"
// but we don't want to replace the original call site name, otherwise
// we loose the fact that the original method name was "call" so instead
// we will point to a metamethod called "doCall"
// see GROOVY-5806 for details
tempSite = new AbstractCallSite(site.getArray(),site.getIndex(),"doCall");
}
MetaMethod metaMethod = getMethodWithCachingInternal(theClass, tempSite, params);
if (metaMethod != null)
return PogoMetaMethodSite.createPogoMetaMethodSite(site, this, metaMethod, params, args);
}
return new PogoMetaClassSite(site, this);
}
/**
* Create a CallSite
*/
public CallSite createPogoCallCurrentSite(CallSite site, Class sender, Object[] args) {
if (!GroovyCategorySupport.hasCategoryInCurrentThread() && !(this instanceof AdaptingMetaClass)) {
Class [] params = MetaClassHelper.convertToTypeArray(args);
MetaMethod metaMethod = getMethodWithCachingInternal(sender, site, params);
if (metaMethod != null)
return PogoMetaMethodSite.createPogoMetaMethodSite(site, this, metaMethod, params, args);
}
return new PogoMetaClassSite(site, this);
}
/**
* Create a CallSite
*/
public CallSite createConstructorSite(CallSite site, Object[] args) {
if (!(this instanceof AdaptingMetaClass)) {
Class[] params = MetaClassHelper.convertToTypeArray(args);
CachedConstructor constructor = (CachedConstructor) chooseMethod("<init>", constructors, params);
if (constructor != null) {
return ConstructorSite.createConstructorSite(site, this,constructor,params, args);
}
else {
if (args.length == 1 && args[0] instanceof Map) {
constructor = (CachedConstructor) chooseMethod("<init>", constructors, MetaClassHelper.EMPTY_TYPE_ARRAY);
if (constructor != null) {
return new ConstructorSite.NoParamSite(site, this,constructor,params);
}
} else if (args.length == 2 && theClass.getEnclosingClass() != null && args[1] instanceof Map) {
Class enclosingClass = theClass.getEnclosingClass();
String enclosingInstanceParamType = args[0] != null ? args[0].getClass().getName() : "";
if(enclosingClass.getName().equals(enclosingInstanceParamType)) {
constructor = (CachedConstructor) chooseMethod("<init>", constructors, new Class[]{enclosingClass});
if (constructor != null) {
return new ConstructorSite.NoParamSiteInnerClass(site, this,constructor,params);
}
}
}
}
}
return new MetaClassConstructorSite(site, this);
}
/**
* Returns ClasInfo for the contained Class
*
* @return The ClassInfo for the contained class.
*/
public ClassInfo getClassInfo() {
return theCachedClass.classInfo;
}
/**
* Returns version of the contained Class
*
* @return The version of the contained class.
*/
public int getVersion() {
return theCachedClass.classInfo.getVersion();
}
/**
* Increments version of the contained Class
*/
public void incVersion() {
theCachedClass.classInfo.incVersion();
}
/**
* Retrieves a list of additional MetaMethods held by this class
*
* @return A list of MetaMethods
*/
public MetaMethod[] getAdditionalMetaMethods() {
return additionalMetaMethods;
}
protected MetaBeanProperty findPropertyInClassHierarchy(String propertyName, CachedClass theClass) {
MetaBeanProperty property= null;
if (theClass == null)
return null;
final CachedClass superClass = theClass.getCachedSuperClass();
if (superClass == null)
return null;
MetaClass metaClass = this.registry.getMetaClass(superClass.getTheClass());
if(metaClass instanceof MutableMetaClass) {
property = getMetaPropertyFromMutableMetaClass(propertyName,metaClass);
if(property == null) {
if(superClass != ReflectionCache.OBJECT_CLASS) {
property = findPropertyInClassHierarchy(propertyName, superClass);
}
if(property == null) {
final Class[] interfaces = theClass.getTheClass().getInterfaces();
property = searchInterfacesForMetaProperty(propertyName, interfaces);
}
}
}
return property;
}
private MetaBeanProperty searchInterfacesForMetaProperty(String propertyName, Class[] interfaces) {
MetaBeanProperty property = null;
for (Class anInterface : interfaces) {
MetaClass metaClass = registry.getMetaClass(anInterface);
if (metaClass instanceof MutableMetaClass) {
property = getMetaPropertyFromMutableMetaClass(propertyName, metaClass);
if (property != null) break;
}
Class[] superInterfaces = anInterface.getInterfaces();
if (superInterfaces.length > 0) {
property = searchInterfacesForMetaProperty(propertyName, superInterfaces);
if (property != null) break;
}
}
return property;
}
private MetaBeanProperty getMetaPropertyFromMutableMetaClass(String propertyName, MetaClass metaClass) {
final boolean isModified = ((MutableMetaClass) metaClass).isModified();
if (isModified) {
final MetaProperty metaProperty = metaClass.getMetaProperty(propertyName);
if(metaProperty instanceof MetaBeanProperty)
return (MetaBeanProperty)metaProperty;
}
return null;
}
protected MetaMethod findMixinMethod(String methodName, Class[] arguments) {
return null;
}
protected static MetaMethod findMethodInClassHierarchy(Class instanceKlazz, String methodName, Class[] arguments, MetaClass metaClass) {
if (metaClass instanceof MetaClassImpl) {
boolean check = false;
for (ClassInfo ci : ((MetaClassImpl)metaClass).theCachedClass.getHierarchy ()) {
final MetaClass aClass = ci.getStrongMetaClass();
if (aClass instanceof MutableMetaClass && ((MutableMetaClass)aClass).isModified()) {
check = true;
break;
}
}
if (!check)
return null;
}
MetaMethod method = null;
Class superClass;
if (metaClass.getTheClass().isArray() && !metaClass.getTheClass().getComponentType().isPrimitive() && metaClass.getTheClass().getComponentType() != Object.class) {
superClass = Object[].class;
}
else {
superClass = metaClass.getTheClass().getSuperclass();
}
if (superClass != null) {
MetaClass superMetaClass = GroovySystem.getMetaClassRegistry().getMetaClass(superClass);
method = findMethodInClassHierarchy(instanceKlazz, methodName, arguments, superMetaClass);
}
else {
if (metaClass.getTheClass().isInterface()) {
MetaClass superMetaClass = GroovySystem.getMetaClassRegistry().getMetaClass(Object.class);
method = findMethodInClassHierarchy(instanceKlazz, methodName, arguments, superMetaClass);
}
}
method = findSubClassMethod(instanceKlazz, methodName, arguments, metaClass, method);
MetaMethod infMethod = searchInterfacesForMetaMethod(instanceKlazz, methodName, arguments, metaClass);
if (infMethod != null) {
if (method == null)
method = infMethod;
else
method = mostSpecific(method, infMethod, instanceKlazz);
}
method = findOwnMethod(instanceKlazz, methodName, arguments, metaClass, method);
return method;
}
private static MetaMethod findSubClassMethod(Class instanceKlazz, String methodName, Class[] arguments, MetaClass metaClass, MetaMethod method) {
if (metaClass instanceof MetaClassImpl) {
Object list = ((MetaClassImpl) metaClass).getSubclassMetaMethods(methodName);
if (list != null) {
if (list instanceof MetaMethod) {
MetaMethod m = (MetaMethod) list;
if (m.getDeclaringClass().getTheClass().isAssignableFrom(instanceKlazz)) {
if (m.isValidExactMethod(arguments)) {
if (method == null)
method = m;
else {
method = mostSpecific (method, m, instanceKlazz);
}
}
}
}
else {
FastArray arr = (FastArray) list;
for (int i = 0; i != arr.size(); ++i) {
MetaMethod m = (MetaMethod) arr.get(i);
if (m.getDeclaringClass().getTheClass().isAssignableFrom(instanceKlazz)) {
if (m.isValidExactMethod(arguments)) {
if (method == null)
method = m;
else {
method = mostSpecific (method, m, instanceKlazz);
}
}
}
}
}
}
}
return method;
}
private static MetaMethod mostSpecific(MetaMethod method, MetaMethod newMethod, Class instanceKlazz) {
Class newMethodC = newMethod.getDeclaringClass().getTheClass();
Class methodC = method.getDeclaringClass().getTheClass();
if (!newMethodC.isAssignableFrom(instanceKlazz))
return method;
if (newMethodC == methodC)
return newMethod;
if (newMethodC.isAssignableFrom(methodC)) {
return method;
}
if (methodC.isAssignableFrom(newMethodC)) {
return newMethod;
}
return newMethod;
}
private static MetaMethod searchInterfacesForMetaMethod(Class instanceKlazz, String methodName, Class[] arguments, MetaClass metaClass) {
Class[] interfaces = metaClass.getTheClass().getInterfaces();
MetaMethod method = null;
for (Class anInterface : interfaces) {
MetaClass infMetaClass = GroovySystem.getMetaClassRegistry().getMetaClass(anInterface);
MetaMethod infMethod = searchInterfacesForMetaMethod(instanceKlazz, methodName, arguments, infMetaClass);
if (infMethod != null) {
if (method == null)
method = infMethod;
else
method = mostSpecific(method, infMethod, instanceKlazz);
}
}
method = findSubClassMethod(instanceKlazz, methodName, arguments, metaClass, method);
method = findOwnMethod(instanceKlazz, methodName, arguments, metaClass, method);
return method;
}
protected static MetaMethod findOwnMethod(Class instanceKlazz, String methodName, Class[] arguments, MetaClass metaClass, MetaMethod method) {
// we trick ourselves here
if (instanceKlazz == metaClass.getTheClass())
return method;
MetaMethod ownMethod = metaClass.pickMethod(methodName, arguments);
if (ownMethod != null) {
if (method == null)
method = ownMethod;
else
method = mostSpecific(method, ownMethod, instanceKlazz);
}
return method;
}
protected Object getSubclassMetaMethods(String methodName) {
return null;
}
private abstract class MethodIndexAction {
public void iterate() {
final ComplexKeyHashMap.Entry[] table = metaMethodIndex.methodHeaders.getTable();
int len = table.length;
for (int i = 0; i != len; ++i) {
for (SingleKeyHashMap.Entry classEntry = (SingleKeyHashMap.Entry) table[i];
classEntry != null;
classEntry = (SingleKeyHashMap.Entry) classEntry.next) {
Class clazz = (Class) classEntry.getKey();
if (skipClass(clazz)) continue;
MetaMethodIndex.Header header = (MetaMethodIndex.Header) classEntry.getValue();
for (MetaMethodIndex.Entry nameEntry = header.head; nameEntry != null; nameEntry = nameEntry.nextClassEntry) {
methodNameAction(clazz, nameEntry);
}
}
}
}
public abstract void methodNameAction(Class clazz, MetaMethodIndex.Entry methods);
public boolean skipClass(Class clazz) {
return false;
}
}
/**
* <p>Retrieves a property on the given object for the specified arguments.
*
*
* @param object The Object which the property is being retrieved from
* @param property The name of the property
*
* @return The properties value
*/
public Object getProperty(Object object, String property) {
return getProperty(theClass, object, property, false, false);
}
/**
* <p>Sets a property on the given object for the specified arguments.
*
*
* @param object The Object which the property is being retrieved from
* @param property The name of the property
* @param newValue The new value
*/
public void setProperty(Object object, String property, Object newValue) {
setProperty(theClass, object, property, newValue, false, false);
}
/**
* Retrieves the value of an attribute (field). This method is to support the Groovy runtime and not for general client API usage.
*
* @param object The object to get the attribute from
* @param attribute The name of the attribute
* @return The attribute value
*/
public Object getAttribute(Object object, String attribute) {
return getAttribute(theClass, object, attribute, false, false);
}
/**
* Sets the value of an attribute (field). This method is to support the Groovy runtime and not for general client API usage.
*
* @param object The object to get the attribute from
* @param attribute The name of the attribute
* @param newValue The new value of the attribute
*/
public void setAttribute(Object object, String attribute, Object newValue) {
setAttribute(theClass, object, attribute, newValue, false, false);
}
/**
* Selects a method by name and argument classes. This method
* does not search for an exact match, it searches for a compatible
* method. For this the method selection mechanism is used as provided
* by the implementation of this MetaClass. pickMethod may or may
* not be used during the method selection process when invoking a method.
* There is no warranty for that.
*
* @return a matching MetaMethod or null
* @throws GroovyRuntimeException if there is more than one matching method
* @param methodName the name of the method to pick
* @param arguments the method arguments
*/
public MetaMethod pickMethod(String methodName, Class[] arguments) {
return getMethodWithoutCaching(theClass, methodName, arguments, false);
}
/**
* indicates is the meta class method invocation for non-static methods is done
* through a custom invoker object.
*
* @return true - if the method invocation is not done by the meta class itself
*/
public boolean hasCustomInvokeMethod() {return invokeMethodMethod!=null; }
/**
* indicates is the meta class method invocation for static methods is done
* through a custom invoker object.
*
* @return true - if the method invocation is not done by the meta class itself
*/
public boolean hasCustomStaticInvokeMethod() {return false; }
/**
* remove all method call cache entries. This should be done if a
* method is added during runtime, but not by using a category.
*/
protected void clearInvocationCaches() {
metaMethodIndex.clearCaches ();
}
private static final SingleKeyHashMap.Copier NAME_INDEX_COPIER = new SingleKeyHashMap.Copier() {
public Object copy(Object value) {
if (value instanceof FastArray)
return ((FastArray) value).copy();
else
return value;
}
};
private static final SingleKeyHashMap.Copier METHOD_INDEX_COPIER = new SingleKeyHashMap.Copier() {
public Object copy(Object value) {
return SingleKeyHashMap.copy(new SingleKeyHashMap(false), (SingleKeyHashMap) value, NAME_INDEX_COPIER);
}
};
class MethodIndex extends Index {
public MethodIndex(boolean b) {
super(false);
}
public MethodIndex(int size) {
super(size);
}
public MethodIndex() {
super();
}
MethodIndex copy() {
return (MethodIndex) SingleKeyHashMap.copy(new MethodIndex(false), this, METHOD_INDEX_COPIER);
}
protected Object clone() throws CloneNotSupportedException {
return super.clone();
}
}
public static class Index extends SingleKeyHashMap {
public Index(int size) {
}
public Index() {
}
public Index(boolean size) {
super(false);
}
public SingleKeyHashMap getNotNull(CachedClass key) {
Entry res = getOrPut(key);
if (res.value == null) {
res.value = new SingleKeyHashMap();
}
return (SingleKeyHashMap) res.value;
}
public void put(CachedClass key, SingleKeyHashMap value) {
((Entry) getOrPut(key)).value = value;
}
public SingleKeyHashMap getNullable(CachedClass clazz) {
return (SingleKeyHashMap) get(clazz);
}
public boolean checkEquals(ComplexKeyHashMap.Entry e, Object key) {
return ((Entry) e).key.equals(key);
}
}
private static class DummyMetaMethod extends MetaMethod {
public int getModifiers() {
return 0;
}
public String getName() {
return null;
}
public Class getReturnType() {
return null;
}
public CachedClass getDeclaringClass() {
return null;
}
public ParameterTypes getParamTypes() {
return null;
}
public Object invoke(Object object, Object[] arguments) {
return null;
}
}
}
| {
"content_hash": "5cba00781777793e71335fc3972320c7",
"timestamp": "",
"source": "github",
"line_count": 3889,
"max_line_length": 171,
"avg_line_length": 42.65723836461815,
"alnum_prop": 0.578447683460523,
"repo_name": "paplorinc/incubator-groovy",
"id": "f0c8fe5a1d22d5eb0c0b38f24b9cb582ae7adb26",
"size": "166715",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "src/main/groovy/lang/MetaClassImpl.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "18768"
},
{
"name": "CSS",
"bytes": "132839"
},
{
"name": "GAP",
"bytes": "220640"
},
{
"name": "Groovy",
"bytes": "7527151"
},
{
"name": "HTML",
"bytes": "180331"
},
{
"name": "Java",
"bytes": "11128829"
},
{
"name": "JavaScript",
"bytes": "1191"
},
{
"name": "Shell",
"bytes": "22275"
},
{
"name": "Smarty",
"bytes": "7699"
}
],
"symlink_target": ""
} |
package org.apache.skywalking.e2e.base;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.util.concurrent.TimeUnit;
import org.apache.skywalking.e2e.SkyWalkingExtension;
import org.junit.jupiter.api.TestInstance;
import org.junit.jupiter.api.Timeout;
import org.junit.jupiter.api.extension.ExtendWith;
/**
* A composite annotation that combines {@link SkyWalkingExtension}, {@link Timeout}, and {@link TestInstance}.
*/
@Documented
@Target(ElementType.TYPE)
@Retention(RetentionPolicy.RUNTIME)
@ExtendWith(SkyWalkingExtension.class)
@Timeout(value = 10, unit = TimeUnit.MINUTES)
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
public @interface SkyWalkingE2E {
}
| {
"content_hash": "81fbdfde2ab50a02db9ea8f4be4a4795",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 111,
"avg_line_length": 32,
"alnum_prop": 0.8125,
"repo_name": "ascrutae/sky-walking",
"id": "cfedd20b893ef59d3a6f00806a2ef2a70026b946",
"size": "1636",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "test/e2e/e2e-test/src/test/java/org/apache/skywalking/e2e/base/SkyWalkingE2E.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "7666"
},
{
"name": "Batchfile",
"bytes": "6450"
},
{
"name": "Dockerfile",
"bytes": "2490"
},
{
"name": "FreeMarker",
"bytes": "13355"
},
{
"name": "Java",
"bytes": "8090931"
},
{
"name": "Kotlin",
"bytes": "7757"
},
{
"name": "Makefile",
"bytes": "3785"
},
{
"name": "Python",
"bytes": "2443"
},
{
"name": "Scala",
"bytes": "6067"
},
{
"name": "Shell",
"bytes": "137411"
},
{
"name": "Smarty",
"bytes": "5594"
},
{
"name": "TSQL",
"bytes": "101499"
},
{
"name": "Thrift",
"bytes": "2814"
}
],
"symlink_target": ""
} |
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("2. Advertisement Message")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("")]
[assembly: AssemblyProduct("2. Advertisement Message")]
[assembly: AssemblyCopyright("Copyright © 2017")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
// The following GUID is for the ID of the typelib if this project is exposed to COM
[assembly: Guid("4c96f743-0d0a-42b0-be59-9704d3de95a4")]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")]
| {
"content_hash": "10fb2806cbc9bba72796768cd2c6a876",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 84,
"avg_line_length": 39.47222222222222,
"alnum_prop": 0.745249824067558,
"repo_name": "DGochew/ProgrammingFundamentals",
"id": "61a9d7a9c13c465170545aea594ac8bee327a559",
"size": "1424",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Objects and Classes/2. Advertisement Message/Properties/AssemblyInfo.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "268637"
}
],
"symlink_target": ""
} |
<?php
namespace Swoopaholic\Bundle\FrameworkBundle\Navigation\Provider;
use Swoopaholic\Component\Navigation\NavigationInterface;
use Swoopaholic\Component\Navigation\ProviderInterface;
use Symfony\Component\DependencyInjection\ContainerInterface;
/**
* Class ContainerAwareProvider
* @package Swoopaholic\Bundle\FrameworkBundle\NavBar\Provider
*/
class ContainerAwareProvider implements ProviderInterface
{
/**
* @var \Symfony\Component\DependencyInjection\ContainerInterface
*/
private $container;
/**
* @var array
*/
private $elementIds;
/**
* @param ContainerInterface $container
* @param array $elementIds
*/
public function __construct(ContainerInterface $container, array $elementIds)
{
$this->container = $container;
$this->elementIds = $elementIds;
}
/**
* @param string $name
* @param array $options
* @throws \InvalidArgumentException
* @internal param string $section
* @return NavigationInterface
*/
public function get($name, array $options = array())
{
if (!$this->has($name)) {
throw new \InvalidArgumentException(sprintf('The element "%s" is not defined.', $name));
}
return $this->container->get($this->elementIds[$name]);
}
/**
* @param string $name
* @internal param string $section
* @return bool
*/
public function has($name)
{
return isset($this->elementIds[$name]);
}
}
| {
"content_hash": "275d48a19f6b2375b59a41a9a59e96aa",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 100,
"avg_line_length": 25.233333333333334,
"alnum_prop": 0.6505944517833554,
"repo_name": "swoopaholic/FrameworkBundle",
"id": "882368bab9e13f8579590858e7c45f659c5f1bbb",
"size": "1755",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Navigation/Provider/ContainerAwareProvider.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PHP",
"bytes": "45486"
}
],
"symlink_target": ""
} |
All notable changes to this project will be documented in this file.
This project adheres to [Semantic Versioning](http://semver.org/).
## [3.1.1](https://github.com/sonata-project/SonataDoctrineMongoDBAdminBundle/compare/3.1.0...3.1.1) - 2018-02-08
### Changed
- All templates references are updated to twig namespaced syntax
### Fixed
- Compatibility with autocomplete form type
- FQCN for form types (Symfony 3 compatibility)
- Association field popup
## [3.1.0](https://github.com/sonata-project/SonataDoctrineMongoDBAdminBundle/compare/3.0.0...3.1.0) - 2018-01-08
### Added
- Added php 7.0 support
- Added twig dependency
### Changed
- Changed internal folder structure to `src`, `tests` and `docs`
- Add support for FQCNs form types
- Switched to templates from SonataAdminBundle
- Replace twig paths with new naming conventions
### Deprecated
- Association templates
### Fixed
- call of render function now Sf3 compatible
- Fix `FormContractor::getDefaultOptions` not checking against form types FQCNs
- Throw an exception if property name is not found in field mappings
- A list field with `actions` type will get all the required field options just like the `_action` field.
- `_action` field will get a proper `actions` type.
- Patched collection form handling script to maintain File input state when new items are added to collections
- Typo in javascript in `edit_mongo_one_association_script.html.twig`
- Check for filter Value in StringFilter
- Missing explicit Symfony dependencies
### Removed
- internal test classes are now excluded from the autoloader
- Support for old versions of Symfony.
| {
"content_hash": "0ef3d2423570aacb5c6db4d1d3269eb5",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 113,
"avg_line_length": 40.425,
"alnum_prop": 0.7699443413729128,
"repo_name": "yedey/SonataDoctrineMongoDBAdminBundle",
"id": "663838f15bd8d90ab15b6267ba0c292b3e5bde1d",
"size": "1630",
"binary": false,
"copies": "1",
"ref": "refs/heads/3.x",
"path": "CHANGELOG.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "41910"
},
{
"name": "Makefile",
"bytes": "1382"
},
{
"name": "PHP",
"bytes": "140379"
},
{
"name": "Shell",
"bytes": "2593"
}
],
"symlink_target": ""
} |
import os, re
import pytest
from pytest import raises
from jenkinsflow.flow import parallel, FailedChildJobsException
from .framework import api_select
from .framework.utils import assert_lines_in
from .framework.abort_job import abort
from .cfg import ApiType
here = os.path.abspath(os.path.dirname(__file__))
@pytest.mark.not_apis(ApiType.SCRIPT)
def test_abort(api_type, capsys):
with api_select.api(__file__, api_type, login=True) as api:
api.flow_job()
api.job('quick', exec_time=0.01, max_fails=0, expect_invocations=1, expect_order=1)
api.job('wait10_abort', exec_time=10, max_fails=0, expect_invocations=1, expect_order=1, final_result='ABORTED')
api.job('wait1_fail', exec_time=1, max_fails=1, expect_invocations=1, expect_order=1)
abort(api, 'wait10_abort', 2)
with raises(FailedChildJobsException) as exinfo:
with parallel(api, timeout=40, job_name_prefix=api.job_name_prefix, report_interval=3) as ctrl:
ctrl.invoke('quick')
ctrl.invoke('wait10_abort')
ctrl.invoke('wait1_fail')
assert "wait10_abort" in str(exinfo.value)
assert "wait1_fail" in str(exinfo.value)
sout, _ = capsys.readouterr()
assert_lines_in(
api_type, sout,
re.compile("^ABORTED: 'jenkinsflow_test__abort__wait10_abort' - build: .*/jenkinsflow_test__abort__wait10_abort.* after:"),
)
| {
"content_hash": "bb3fdf7b017f8fcb111d90466bef5375",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 135,
"avg_line_length": 36.275,
"alnum_prop": 0.656788421778084,
"repo_name": "lechat/jenkinsflow",
"id": "da5160310159cedee040c071769b0c8276b66e99",
"size": "1589",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/abort_test.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "399"
},
{
"name": "HTML",
"bytes": "992"
},
{
"name": "JavaScript",
"bytes": "1410"
},
{
"name": "Python",
"bytes": "353496"
},
{
"name": "Shell",
"bytes": "801"
}
],
"symlink_target": ""
} |
An app for creating and managing your front-end pattern library.
1. Create patterns in Markdown files
2. Create structure by creating folders
3. Browseable interface
### Built with Gulp, Angular and Markdown/YAML
## 3 Seperate Sub-Modules
* Mural Patterns (this Read Me)
* Mural API (<a href="../api/Readme.md" target="_blank">read me</a>)
* Mural Styles (<a href="../styles/Readme.md" target="_blank">read me</a>)
## How to install
Dependecy
* [NodeJS](http://nodejs.org/)
* Bower
`npm install -g bower`
1. Clone the repository
`git clone https://github.com/stephenst/mural.git`
2. Install NPM and bower packages
`npm install && bower install`
3. Launch the server
`gulp serve`
3. Build the documentation
`gulp Mural`
4. Open your browser and navigate to
`http://localhost:3000/mural/patterns/index.html`
## How Mural Patterns works
1. Patterns folder is watched by Gulp and JSON files are generated for each root pattern
2. AngularJS uses these JSON documents to show a browseable interface of the patterns
3. Inject your own CSS by editing `index.html` and add your own patterns
## Pattern (markdown) Formats
The meta information in these files go between lines with `---`
Name: Title of the Pattern Section
hidecode: true|false (if you have HTML code showing the exmaple have it under the bottom --- and we'll seperate it out.)
Description: This is the body copy that will display.
Subsections within Description: (use markdown)
What - What the item is.
Use When - When do you implement it.
HTML Code goes below the meta information.
```
---
name: Alert success
hidecode: true
description: >
### What
Page level information or service alert. Critical updates with a defined time period should be pushed using the alert box.
### Use when
For page level critical updates.
---
<div class="ui-alert ui-alert--success">
<div class="alert__title">This is a success alert</div>
<div class="alert__body">More body text</div>
<a href="#" class="alert_close"></a>
</div>
```
| {
"content_hash": "3ab7c8f254c7c658ffec9a6fde6135c0",
"timestamp": "",
"source": "github",
"line_count": 63,
"max_line_length": 130,
"avg_line_length": 33.61904761904762,
"alnum_prop": 0.6931067044381491,
"repo_name": "stephenst/mural",
"id": "07ed56d1a99cfa8fa8d15bf42eb1ef32a9a9bace",
"size": "2169",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Readme.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "346167"
},
{
"name": "Groff",
"bytes": "2072"
},
{
"name": "Groovy",
"bytes": "1321"
},
{
"name": "HTML",
"bytes": "118295"
},
{
"name": "JavaScript",
"bytes": "1395610"
},
{
"name": "Makefile",
"bytes": "5792"
},
{
"name": "PHP",
"bytes": "7869"
},
{
"name": "Python",
"bytes": "7780"
},
{
"name": "Shell",
"bytes": "439"
}
],
"symlink_target": ""
} |
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
<meta http-equiv="X-UA-Compatible" content="IE=9"/>
<meta name="generator" content="Doxygen 1.8.11"/>
<title>TestWebProject: File List</title>
<link href="tabs.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="jquery.js"></script>
<script type="text/javascript" src="dynsections.js"></script>
<link href="search/search.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="search/searchdata.js"></script>
<script type="text/javascript" src="search/search.js"></script>
<script type="text/javascript">
$(document).ready(function() { init_search(); });
</script>
<link href="doxygen.css" rel="stylesheet" type="text/css" />
</head>
<body>
<div id="top"><!-- do not remove this div, it is closed by doxygen! -->
<div id="titlearea">
<table cellspacing="0" cellpadding="0">
<tbody>
<tr style="height: 56px;">
<td id="projectalign" style="padding-left: 0.5em;">
<div id="projectname">TestWebProject
 <span id="projectnumber">bounswegroup5</span>
</div>
</td>
</tr>
</tbody>
</table>
</div>
<!-- end header part -->
<!-- Generated by Doxygen 1.8.11 -->
<script type="text/javascript">
var searchBox = new SearchBox("searchBox", "search",false,'Search');
</script>
<div id="navrow1" class="tabs">
<ul class="tablist">
<li><a href="index.html"><span>Main Page</span></a></li>
<li><a href="namespaces.html"><span>Packages</span></a></li>
<li><a href="annotated.html"><span>Classes</span></a></li>
<li class="current"><a href="files.html"><span>Files</span></a></li>
<li>
<div id="MSearchBox" class="MSearchBoxInactive">
<span class="left">
<img id="MSearchSelect" src="search/mag_sel.png"
onmouseover="return searchBox.OnSearchSelectShow()"
onmouseout="return searchBox.OnSearchSelectHide()"
alt=""/>
<input type="text" id="MSearchField" value="Search" accesskey="S"
onfocus="searchBox.OnSearchFieldFocus(true)"
onblur="searchBox.OnSearchFieldFocus(false)"
onkeyup="searchBox.OnSearchFieldChange(event)"/>
</span><span class="right">
<a id="MSearchClose" href="javascript:searchBox.CloseResultsWindow()"><img id="MSearchCloseImg" border="0" src="search/close.png" alt=""/></a>
</span>
</div>
</li>
</ul>
</div>
<div id="navrow2" class="tabs2">
<ul class="tablist">
<li class="current"><a href="files.html"><span>File List</span></a></li>
</ul>
</div>
</div><!-- top -->
<!-- window showing the filter options -->
<div id="MSearchSelectWindow"
onmouseover="return searchBox.OnSearchSelectShow()"
onmouseout="return searchBox.OnSearchSelectHide()"
onkeydown="return searchBox.OnSearchSelectKey(event)">
</div>
<!-- iframe showing the search results (closed by default) -->
<div id="MSearchResultsWindow">
<iframe src="javascript:void(0)" frameborder="0"
name="MSearchResults" id="MSearchResults">
</iframe>
</div>
<div class="header">
<div class="headertitle">
<div class="title">File List</div> </div>
</div><!--header-->
<div class="contents">
<div class="textblock">Here is a list of all files with brief descriptions:</div><div class="directory">
<div class="levels">[detail level <span onclick="javascript:toggleLevel(1);">1</span><span onclick="javascript:toggleLevel(2);">2</span><span onclick="javascript:toggleLevel(3);">3</span><span onclick="javascript:toggleLevel(4);">4</span><span onclick="javascript:toggleLevel(5);">5</span>]</div><table class="directory">
<tr id="row_0_" class="even"><td class="entry"><span style="width:0px;display:inline-block;"> </span><span id="arr_0_" class="arrow" onclick="toggleFolder('0_')">▼</span><span id="img_0_" class="iconfopen" onclick="toggleFolder('0_')"> </span><a class="el" href="dir_68267d1309a1af8e8297ef4c3efbcdba.html" target="_self">src</a></td><td class="desc"></td></tr>
<tr id="row_0_0_"><td class="entry"><span style="width:16px;display:inline-block;"> </span><span id="arr_0_0_" class="arrow" onclick="toggleFolder('0_0_')">▼</span><span id="img_0_0_" class="iconfopen" onclick="toggleFolder('0_0_')"> </span><a class="el" href="dir_541eb0a6c58a7690acc5b848a4b1b724.html" target="_self">com</a></td><td class="desc"></td></tr>
<tr id="row_0_0_0_" class="even"><td class="entry"><span style="width:32px;display:inline-block;"> </span><span id="arr_0_0_0_" class="arrow" onclick="toggleFolder('0_0_0_')">▼</span><span id="img_0_0_0_" class="iconfopen" onclick="toggleFolder('0_0_0_')"> </span><a class="el" href="dir_fa108e2554a5743d0f58389782f7de5d.html" target="_self">example</a></td><td class="desc"></td></tr>
<tr id="row_0_0_0_0_"><td class="entry"><span style="width:48px;display:inline-block;"> </span><span id="arr_0_0_0_0_" class="arrow" onclick="toggleFolder('0_0_0_0_')">▼</span><span id="img_0_0_0_0_" class="iconfopen" onclick="toggleFolder('0_0_0_0_')"> </span><a class="el" href="dir_4e3ef058c7327a5301708bb894038bd8.html" target="_self">servlets</a></td><td class="desc"></td></tr>
<tr id="row_0_0_0_0_0_" class="even"><td class="entry"><span style="width:80px;display:inline-block;"> </span><span class="icondoc"></span><a class="el" href="_atakan_servlet_8java.html" target="_self">AtakanServlet.java</a></td><td class="desc"></td></tr>
<tr id="row_0_0_0_0_1_"><td class="entry"><span style="width:80px;display:inline-block;"> </span><span class="icondoc"></span><a class="el" href="_bugra_servlet_8java.html" target="_self">BugraServlet.java</a></td><td class="desc"></td></tr>
<tr id="row_0_0_0_0_2_" class="even"><td class="entry"><span style="width:80px;display:inline-block;"> </span><span class="icondoc"></span><a class="el" href="_burak_servlet_8java.html" target="_self">BurakServlet.java</a></td><td class="desc"></td></tr>
<tr id="row_0_0_0_0_3_"><td class="entry"><span style="width:80px;display:inline-block;"> </span><span class="icondoc"></span><a class="el" href="_hello_servlet_8java.html" target="_self">HelloServlet.java</a></td><td class="desc"></td></tr>
<tr id="row_0_0_0_0_4_" class="even"><td class="entry"><span style="width:80px;display:inline-block;"> </span><span class="icondoc"></span><a class="el" href="_kerim_servlet_8java.html" target="_self">KerimServlet.java</a></td><td class="desc"></td></tr>
<tr id="row_0_0_0_0_5_"><td class="entry"><span style="width:80px;display:inline-block;"> </span><span class="icondoc"></span><a class="el" href="_ozer2_servlet_8java.html" target="_self">Ozer2Servlet.java</a></td><td class="desc"></td></tr>
<tr id="row_0_0_0_0_6_" class="even"><td class="entry"><span style="width:80px;display:inline-block;"> </span><span class="icondoc"></span><a class="el" href="_sevda_servlet_8java.html" target="_self">SevdaServlet.java</a></td><td class="desc"></td></tr>
<tr id="row_0_0_0_0_7_"><td class="entry"><span style="width:80px;display:inline-block;"> </span><span class="icondoc"></span><a class="el" href="_umut_servlet_8java.html" target="_self">UmutServlet.java</a></td><td class="desc"></td></tr>
</table>
</div><!-- directory -->
</div><!-- contents -->
<!-- start footer part -->
<hr class="footer"/><address class="footer"><small>
Generated by  <a href="http://www.doxygen.org/index.html">
<img class="footer" src="doxygen.png" alt="doxygen"/>
</a> 1.8.11
</small></address>
</body>
</html>
| {
"content_hash": "29e7da13ecad5c78517bbcbecf9a385a",
"timestamp": "",
"source": "github",
"line_count": 112,
"max_line_length": 401,
"avg_line_length": 69.04464285714286,
"alnum_prop": 0.657312815207552,
"repo_name": "bounswe/bounswe2016group5",
"id": "58f3cd19c291ee54a5b015ddd273064dbec04249",
"size": "7733",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "TestWebProject/html/files.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "44337"
},
{
"name": "HTML",
"bytes": "1327433"
},
{
"name": "Java",
"bytes": "217041"
},
{
"name": "JavaScript",
"bytes": "277500"
},
{
"name": "Makefile",
"bytes": "508"
},
{
"name": "PostScript",
"bytes": "30598"
},
{
"name": "TeX",
"bytes": "96693"
}
],
"symlink_target": ""
} |
namespace DefiningClassesPartOne
{
using System;
public class GSMTest
{
public static void Main()
{
var phones = new GSM[]
{
new GSM("Lumia550", "Microsoft", 120.80m, "Pesho Petrov", new Battery("KP1250", 500, 150, BatteryType.LiIon), new Display("1220x600", 65000)),
new GSM("Phab", "Lenovo", 1200m, "Bacho Kiro", new Battery("AH-11", 480, 24), new Display("1280x720", 16000000)),
new GSM("Galaxy S6", "Samsung")
};
foreach (var phone in phones)
{
Console.WriteLine(new string('-', 50));
PrintPhoneInfo(phone);
}
Console.Write(new string('*', 17));
Console.Write(" STATIC PROPERTY ");
Console.WriteLine(new string('*', 17));
PrintPhoneInfo(GSM.IPhone4S);
}
public static void PrintPhoneInfo(GSM gsm)
{
var currentGsmData = gsm.GetPhoneInfo(gsm.ToString());
for (int i = 0; i < 10; i++)
{
switch (i)
{
case 0:
Console.Write("Manifacturer: ");
break;
case 1:
Console.Write("Model: ");
break;
case 2:
Console.Write("Price: ");
break;
case 3:
Console.Write("Owner: ");
break;
case 4:
Console.Write("Battery Model: ");
break;
case 5:
Console.Write("Battery Capacity (idle hours): ");
break;
case 6:
Console.Write("Battery Capacity (talk hours): ");
break;
case 7:
Console.Write("Battery Type: ");
break;
case 8:
Console.Write("Display Size: ");
break;
case 9:
Console.Write("Display Max. Colors: ");
break;
default:
break;
}
Console.ForegroundColor = ConsoleColor.Cyan;
Console.WriteLine(currentGsmData[i]);
Console.ForegroundColor = ConsoleColor.White;
}
Console.WriteLine();
}
}
}
| {
"content_hash": "9ee940cabf40ea29bcca4d6f625db293",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 158,
"avg_line_length": 34.5921052631579,
"alnum_prop": 0.39596804868771396,
"repo_name": "jorosoft/Telerik-Academy",
"id": "80901429e9c4a10bf46deec5038afc38b2445c9f",
"size": "2631",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Homeworks/C# OOP/01.DefiningClassesPartOne/DefiningClassesPartOne/GSMTest.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "802179"
},
{
"name": "CSS",
"bytes": "28985"
},
{
"name": "HTML",
"bytes": "94575"
},
{
"name": "JavaScript",
"bytes": "84023"
},
{
"name": "XSLT",
"bytes": "3604"
}
],
"symlink_target": ""
} |
<!DOCTYPE html>
<html>
<head>
<title>Curriculum vitae</title>
<link rel="stylesheet" type="text/css" href="../css/cvcss.css" media="screen and (min-width: 661px) and (max-width:2800px)">
<link rel="stylesheet" type="text/css" href="../css/cvcss1.css" media="screen and (max-width: 660px)">
</head>
<body>
<header>
<center><h2>Martha Zuhey Fernández Quispe</h2></center>
</header>
<div class="text">
<p>
<b>Datos Personales</b><br><br>
<b>Nombre:</b> Martha Zuhey Fernández Quispe.<br>
<b>Vivienda:</b> Hospitalet de Llobregat, Av. Masnou. <br>
<b>Nacionalidad:</b> Peruana.<br>
<b>Fecha de nacimiento:</b> 21/02/1998.<br><br>
<hr>
<b>Experiencia Profesional</b><br><br>
Actualmente no tengo experiencia profesional en relación a lo que estudio.<br><br>
<hr>
<b>Estudios</b><br><br>
Titulo de bachillerato Humanístico en Jesuïtes Bellvitge Joan XXIII - 2015-2016.<br><br>
<hr>
<b>Idiomas</b><br><br>
<b>Castellano:</b> Lengua materna.<br>
<b>Catalán:</b> Leído, escrito y hablado de forma fluida.<br>
<b>Inglés:</b> Leído y escrito de forma fluida, hablado en nivel intermedio.<br><br>
<hr>
<b>Capacidades</b><br><br>
<b>Habilidades de liderazgo<br>
Propuesta rápida y flexible de ideas<br>
Buen trabajo de equipo<br>
Habilidades en redacción</b>
</p>
</div>
<div class="foto">
<img class="foto" src="../img/yop.jpg"><br><br>
<b>Contacto:</b> [email protected]<br>
</div>
</body>
</html> | {
"content_hash": "8c49884868507b559bea3843875d605a",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 125,
"avg_line_length": 32.19565217391305,
"alnum_prop": 0.6522619851451722,
"repo_name": "MarthaZFQ/MarthaZFQ.github.io",
"id": "e4307221d6793a2c0fd22fe05700707565965039",
"size": "1492",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cv_sobremi.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1636"
},
{
"name": "HTML",
"bytes": "2655"
}
],
"symlink_target": ""
} |
// private
// This is a support class used internally by the Grid components
Ext.grid.SplitDragZone = function(grid, hd, hd2){
this.grid = grid;
this.view = grid.getView();
this.proxy = this.view.resizeProxy;
Ext.grid.SplitDragZone.superclass.constructor.call(this, hd,
"gridSplitters" + this.grid.getGridEl().id, {
dragElId : Ext.id(this.proxy.dom), resizeFrame:false
});
this.setHandleElId(Ext.id(hd));
this.setOuterHandleElId(Ext.id(hd2));
this.scroll = false;
};
Ext.extend(Ext.grid.SplitDragZone, Ext.dd.DDProxy, {
fly: Ext.Element.fly,
b4StartDrag : function(x, y){
this.view.headersDisabled = true;
this.proxy.setHeight(this.view.mainWrap.getHeight());
var w = this.cm.getColumnWidth(this.cellIndex);
var minw = Math.max(w-this.grid.minColumnWidth, 0);
this.resetConstraints();
this.setXConstraint(minw, 1000);
this.setYConstraint(0, 0);
this.minX = x - minw;
this.maxX = x + 1000;
this.startPos = x;
Ext.dd.DDProxy.prototype.b4StartDrag.call(this, x, y);
},
handleMouseDown : function(e){
ev = Ext.EventObject.setEvent(e);
var t = this.fly(ev.getTarget());
if(t.hasClass("x-grid-split")){
this.cellIndex = this.view.getCellIndex(t.dom);
this.split = t.dom;
this.cm = this.grid.colModel;
if(this.cm.isResizable(this.cellIndex) && !this.cm.isFixed(this.cellIndex)){
Ext.grid.SplitDragZone.superclass.handleMouseDown.apply(this, arguments);
}
}
},
endDrag : function(e){
this.view.headersDisabled = false;
var endX = Math.max(this.minX, Ext.lib.Event.getPageX(e));
var diff = endX - this.startPos;
this.view.onColumnSplitterMoved(this.cellIndex, this.cm.getColumnWidth(this.cellIndex)+diff);
},
autoOffset : function(){
this.setDelta(0,0);
}
}); | {
"content_hash": "870307ac0b24bc443f49eec863ef4d9f",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 101,
"avg_line_length": 34.12068965517241,
"alnum_prop": 0.617988883274381,
"repo_name": "jywarren/cartagen",
"id": "513b9b50d1aa15ffe174363d7c57098831d17886",
"size": "2108",
"binary": false,
"copies": "12",
"ref": "refs/heads/main",
"path": "lib/jsdoc/templates/ext/lib/source/widgets/grid/ColumnSplitDD.js",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "ASP.NET",
"bytes": "93"
},
{
"name": "CSS",
"bytes": "387311"
},
{
"name": "HTML",
"bytes": "15474274"
},
{
"name": "Hack",
"bytes": "916"
},
{
"name": "Java",
"bytes": "743"
},
{
"name": "JavaScript",
"bytes": "5237579"
},
{
"name": "PHP",
"bytes": "13706"
},
{
"name": "Python",
"bytes": "1965"
},
{
"name": "Ruby",
"bytes": "25357"
},
{
"name": "Smarty",
"bytes": "18842"
},
{
"name": "Visual Basic .NET",
"bytes": "1724"
}
],
"symlink_target": ""
} |
package org.elasticsearch.transport.nio.channel;
import org.elasticsearch.transport.nio.NetworkBytesReference;
import org.elasticsearch.transport.nio.ESSelector;
import org.elasticsearch.transport.nio.SocketSelector;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.nio.ByteBuffer;
import java.nio.channels.ClosedChannelException;
import java.nio.channels.SocketChannel;
import java.util.Arrays;
public class NioSocketChannel extends AbstractNioChannel<SocketChannel> {
private final InetSocketAddress remoteAddress;
private final ConnectFuture connectFuture = new ConnectFuture();
private volatile SocketSelector socketSelector;
private WriteContext writeContext;
private ReadContext readContext;
public NioSocketChannel(String profile, SocketChannel socketChannel) throws IOException {
super(profile, socketChannel);
this.remoteAddress = (InetSocketAddress) socketChannel.getRemoteAddress();
}
@Override
public CloseFuture closeAsync() {
clearQueuedWrites();
return super.closeAsync();
}
@Override
public void closeFromSelector() {
// Even if the channel has already been closed we will clear any pending write operations just in case
clearQueuedWrites();
super.closeFromSelector();
}
@Override
public SocketSelector getSelector() {
return socketSelector;
}
@Override
boolean markRegistered(ESSelector selector) {
this.socketSelector = (SocketSelector) selector;
return super.markRegistered(selector);
}
public int write(NetworkBytesReference[] references) throws IOException {
int written;
if (references.length == 1) {
written = socketChannel.write(references[0].getReadByteBuffer());
} else {
ByteBuffer[] buffers = new ByteBuffer[references.length];
for (int i = 0; i < references.length; ++i) {
buffers[i] = references[i].getReadByteBuffer();
}
written = (int) socketChannel.write(buffers);
}
if (written <= 0) {
return written;
}
NetworkBytesReference.vectorizedIncrementReadIndexes(Arrays.asList(references), written);
return written;
}
public int read(NetworkBytesReference reference) throws IOException {
int bytesRead = socketChannel.read(reference.getWriteByteBuffer());
if (bytesRead == -1) {
return bytesRead;
}
reference.incrementWrite(bytesRead);
return bytesRead;
}
public void setContexts(ReadContext readContext, WriteContext writeContext) {
this.readContext = readContext;
this.writeContext = writeContext;
}
public WriteContext getWriteContext() {
return writeContext;
}
public ReadContext getReadContext() {
return readContext;
}
public InetSocketAddress getRemoteAddress() {
return remoteAddress;
}
public boolean isConnectComplete() {
return connectFuture.isConnectComplete();
}
public boolean isWritable() {
return state.get() == REGISTERED;
}
public boolean isReadable() {
return state.get() == REGISTERED;
}
/**
* This method will attempt to complete the connection process for this channel. It should be called for
* new channels or for a channel that has produced a OP_CONNECT event. If this method returns true then
* the connection is complete and the channel is ready for reads and writes. If it returns false, the
* channel is not yet connected and this method should be called again when a OP_CONNECT event is
* received.
*
* @return true if the connection process is complete
* @throws IOException if an I/O error occurs
*/
public boolean finishConnect() throws IOException {
if (connectFuture.isConnectComplete()) {
return true;
} else if (connectFuture.connectFailed()) {
Exception exception = connectFuture.getException();
if (exception instanceof IOException) {
throw (IOException) exception;
} else {
throw (RuntimeException) exception;
}
}
boolean isConnected = socketChannel.isConnected();
if (isConnected == false) {
isConnected = internalFinish();
}
if (isConnected) {
connectFuture.setConnectionComplete(this);
}
return isConnected;
}
public ConnectFuture getConnectFuture() {
return connectFuture;
}
private boolean internalFinish() throws IOException {
try {
return socketChannel.finishConnect();
} catch (IOException e) {
connectFuture.setConnectionFailed(e);
throw e;
} catch (RuntimeException e) {
connectFuture.setConnectionFailed(e);
throw e;
}
}
private void clearQueuedWrites() {
// Even if the channel has already been closed we will clear any pending write operations just in case
if (state.get() > UNREGISTERED) {
SocketSelector selector = getSelector();
if (selector != null && selector.isOnCurrentThread() && writeContext.hasQueuedWriteOps()) {
writeContext.clearQueuedWriteOps(new ClosedChannelException());
}
}
}
}
| {
"content_hash": "bb12ba1916fd50ea61dfd1cf5a1a97c1",
"timestamp": "",
"source": "github",
"line_count": 172,
"max_line_length": 110,
"avg_line_length": 31.83139534883721,
"alnum_prop": 0.6546118721461187,
"repo_name": "LeoYao/elasticsearch",
"id": "62404403de0282086ae5aecb5aa353f3dbdd5ab4",
"size": "6263",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "test/framework/src/main/java/org/elasticsearch/transport/nio/channel/NioSocketChannel.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "10878"
},
{
"name": "Groovy",
"bytes": "451"
},
{
"name": "HTML",
"bytes": "1427"
},
{
"name": "Java",
"bytes": "29436347"
},
{
"name": "Perl",
"bytes": "264378"
},
{
"name": "Perl6",
"bytes": "103207"
},
{
"name": "Python",
"bytes": "79156"
},
{
"name": "Ruby",
"bytes": "17776"
},
{
"name": "Shell",
"bytes": "79736"
}
],
"symlink_target": ""
} |
package com.hs.mail.imap.message.request.ext;
import com.hs.mail.imap.ImapSession;
import com.hs.mail.imap.ImapSession.State;
import com.hs.mail.imap.message.request.ImapRequest;
/**
*
* @author Won Chul Doh
* @since Apr 19, 2010
*
*/
public class GetQuotaRequest extends ImapRequest {
private String quotaRoot;
public GetQuotaRequest(String tag, String command, String quotaRoot) {
super(tag, command);
this.quotaRoot = quotaRoot;
}
public String getQuotaRoot() {
return quotaRoot;
}
@Override
public boolean validForState(State state) {
return (state == ImapSession.State.AUTHENTICATED || state == ImapSession.State.SELECTED);
}
}
| {
"content_hash": "c572bc0310e756c5bfa50462105c22d1",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 91,
"avg_line_length": 21.78125,
"alnum_prop": 0.7015781922525107,
"repo_name": "svn2github/hwmail-mirror",
"id": "636ad76afb91b05c47d36788fc48f05551df93b2",
"size": "1335",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "hedwig-server/src/main/java/com/hs/mail/imap/message/request/ext/GetQuotaRequest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "5479"
},
{
"name": "CSS",
"bytes": "70352"
},
{
"name": "HTML",
"bytes": "17940"
},
{
"name": "Java",
"bytes": "1581229"
},
{
"name": "JavaScript",
"bytes": "587214"
},
{
"name": "Shell",
"bytes": "4392"
}
],
"symlink_target": ""
} |
@interface HLTabBarController ()
@end
@implementation HLTabBarController
- (void)viewDidLoad {
[super viewDidLoad];
UITabBar *tabBar = self.tabBar;
tabBar.tintColor = [UIColor orangeColor];
UIImage *image = [UIImage imageNamed:@"NavBar"];
CGFloat width = image.size.width * 0.5;
CGFloat height = image.size.height * 0.5;
tabBar.backgroundImage = [image resizableImageWithCapInsets:UIEdgeInsetsMake(height, width, height, width) resizingMode:UIImageResizingModeStretch];
UITabBarItem *item = [UITabBarItem appearance];
NSMutableDictionary *attributes = [NSMutableDictionary dictionary];
attributes[NSFontAttributeName] = [UIFont boldSystemFontOfSize:13];
[item setTitleTextAttributes:attributes forState:UIControlStateNormal];
}
@end
| {
"content_hash": "f4fcfc622df4a41d4ce61d0d29767b17",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 152,
"avg_line_length": 35.72727272727273,
"alnum_prop": 0.7531806615776081,
"repo_name": "HanKKK1515/NewsApp",
"id": "c889a1314878f7cad595884612fb8b45cb62db49",
"size": "958",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "简闻/简闻/Main/Controller/HLTabBarController.m",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "4555"
},
{
"name": "Objective-C",
"bytes": "1431593"
}
],
"symlink_target": ""
} |
assert(type(file) == 'nil')
assert(type(describe) == 'function')
assert(type(context) == 'function')
assert(type(it) == 'function')
assert(type(spec) == 'function')
assert(type(test) == 'function')
assert(type(before_each) == 'function')
assert(type(after_each) == 'function')
assert(type(spy) == 'table')
assert(type(stub) == 'table')
assert(type(mock) == 'table')
assert(type(match) == 'table')
assert(type(assert) == 'table')
describe('Before each', function()
local test_val = false
before_each(function()
test_val = true
end)
it('is called', function()
assert(test_val)
end)
end)
describe('After each', function()
local test_val = false
after_each(function()
test_val = true
end)
it('runs once to fire an after_each and then', function() end)
it('checks if after_each was called', function()
assert(test_val)
end)
end)
describe('Both before and after each', function()
local test_val = 0
before_each(function()
test_val = test_val + 1
end)
after_each(function()
test_val = test_val + 1
end)
it('checks if both were called', function() end)
it('runs again just to be sure', function() end)
it('checks the value', function()
assert(test_val == 5)
end)
end)
describe('Before_each on describe blocks', function()
local test_val = 0
before_each(function()
test_val = test_val + 1
end)
describe('A block', function()
it('derps', function()
assert(test_val == 1)
end)
it('herps', function()
assert(test_val == 2)
end)
end)
end)
describe('Before_each on describe blocks, part II', function()
local test_val = 0
before_each(function()
test_val = test_val + 1
end)
it('checks the value', function()
assert.are.equal(1, test_val)
end)
describe('A block', function()
before_each(function()
test_val = test_val + 1
end)
it('derps', function() end) --add two: two before-eaches
it('herps', function() end)
it('checks the value', function()
assert.equal(7, test_val)
end)
end)
end)
describe('A failing test', function()
it('explodes', function()
assert.has.error(function() assert(false, 'this should fail') end)
end)
end)
describe('tagged tests #test', function()
it('runs', function()
assert(true)
end)
end)
describe('Testing test order', function()
local testorder, level = '', 0
local function report_level(desc)
testorder = testorder .. string.rep(' ', level * 2) .. desc .. '\n'
end
describe('describe, level A', function()
setup(function()
report_level('setup A')
level = level + 1
end)
teardown(function()
level = level - 1
report_level('teardown A')
end)
before_each(function()
report_level('before_each A')
level = level + 1
end)
after_each(function()
level = level - 1
report_level('after_each A')
end)
it('tests A one', function()
report_level('test A one')
end)
it('tests A two', function()
report_level('test A two')
end)
describe('describe level B', function()
setup(function()
report_level('setup B')
level = level + 1
end)
teardown(function()
level = level - 1
report_level('teardown B')
end)
before_each(function()
report_level('before_each B')
level = level + 1
end)
after_each(function()
level = level - 1
report_level('after_each B')
end)
it('tests B one', function()
report_level('test B one')
end)
it('tests B two', function()
report_level('test B two')
end)
end)
it('tests A three', function()
report_level('test A three')
end)
end)
describe('Test testorder', function()
it('verifies order of execution', function()
local expected = [[setup A
before_each A
test A one
after_each A
before_each A
test A two
after_each A
setup B
before_each A
before_each B
test B one
after_each B
after_each A
before_each A
before_each B
test B two
after_each B
after_each A
teardown B
before_each A
test A three
after_each A
teardown A
]]
assert.is.equal(expected, testorder)
end)
end)
end)
describe('finally callback is called in case of success', function()
local f = spy.new(function() end)
it('write variable in finally', function()
finally(f)
assert.is_true(true)
end)
it('ensures finally was called', function()
assert.spy(f).was_called(1)
end)
end)
describe('tests environment', function()
global = 'global'
setup(function()
globalsetup = 'globalsetup'
end)
teardown(function()
globalteardown = 'globalteardown'
end)
before_each(function()
globalbefore = 'globalbefore'
end)
after_each(function()
globalafter = 'globalafter'
end)
it('cannot access globals which have not been created yet', function()
assert.equal(nil, globalafter)
assert.equal(nil, globalteardown)
notglobal = 'notglobal'
end)
it('can access globals', function()
assert.equal('global', global)
assert.equal('globalsetup', globalsetup)
assert.equal('globalbefore', globalbefore)
assert.equal('globalafter', globalafter)
notglobal = 'notglobal'
end)
it('cannot access globals set in siblings', function()
assert.equal(nil, notglobal)
end)
describe('can access parent globals', function()
it('from child', function()
assert.equal('global', global)
assert.equal('globalsetup', globalsetup)
assert.equal('globalbefore', globalbefore)
assert.equal('globalafter', globalafter)
end)
end)
describe('cannot access globals set in children', function()
it('has a global', function()
notglobal = 'notglobal'
end)
assert.are.equal(notglobal, nil)
end)
end)
describe('tests clean environment', function()
it('globals in previous describe are not available', function()
assert.is_nil(global)
assert.is_nil(globalsetup)
assert.is_nil(globalbefore)
assert.is_nil(globalafter)
assert.is_nil(globalteardown)
end)
end)
describe 'tests syntactic sugar' (function()
it 'works' (function()
assert(true)
end)
end)
describe('tests aliases', function()
local test_val = 0
context('runs context alias', function()
setup(function()
test_val = test_val + 1
end)
before_each(function()
test_val = test_val + 1
end)
after_each(function()
test_val = test_val + 1
end)
teardown(function()
test_val = test_val + 1
end)
spec('runs spec alias', function()
test_val = test_val + 1
end)
test('runs test alias', function()
test_val = test_val + 1
end)
end)
it('checks aliases were executed', function()
assert.is_equal(8, test_val)
end)
end)
describe('tests unsupported functions', function()
it('it block does not have file executor', function()
assert.is_nil(file)
end)
it('it block throws error on describe/context', function()
assert.has_error(describe, "'describe' not supported inside current context block")
assert.has_error(context, "'context' not supported inside current context block")
end)
it('it block throws error on insulate/expose', function()
assert.has_error(insulate, "'insulate' not supported inside current context block")
assert.has_error(expose, "'expose' not supported inside current context block")
end)
it('it block throws error on it/spec/test', function()
assert.has_error(it, "'it' not supported inside current context block")
assert.has_error(spec, "'spec' not supported inside current context block")
assert.has_error(test, "'test' not supported inside current context block")
end)
it('it block throws error on setup/before_each/after_each/teardown', function()
assert.has_error(setup, "'setup' not supported inside current context block")
assert.has_error(before_each, "'before_each' not supported inside current context block")
assert.has_error(after_each, "'after_each' not supported inside current context block")
assert.has_error(teardown, "'teardown' not supported inside current context block")
end)
it('it block throws error on lazy/strict setup/teardown', function()
assert.has_error(lazy_setup, "'lazy_setup' not supported inside current context block")
assert.has_error(lazy_teardown, "'lazy_teardown' not supported inside current context block")
assert.has_error(strict_setup, "'strict_setup' not supported inside current context block")
assert.has_error(strict_teardown, "'strict_teardown' not supported inside current context block")
end)
it('it block throws error on randomize', function()
assert.has_error(randomize, "'randomize' not supported inside current context block")
end)
it('finaly block throws error on pending', function()
finally(function()
assert.has_error(pending, "'pending' not supported inside current context block")
end)
end)
end)
describe('tests unsupported functions in setup/before_each/after_each/teardown', function()
local function testUnsupported()
assert.is_nil(file)
assert.is_nil(finally)
assert.has_error(randomize, "'randomize' not supported inside current context block")
assert.has_error(describe, "'describe' not supported inside current context block")
assert.has_error(context, "'context' not supported inside current context block")
assert.has_error(insulate, "'insulate' not supported inside current context block")
assert.has_error(expose, "'expose' not supported inside current context block")
assert.has_error(pending, "'pending' not supported inside current context block")
assert.has_error(it, "'it' not supported inside current context block")
assert.has_error(spec, "'spec' not supported inside current context block")
assert.has_error(test, "'test' not supported inside current context block")
assert.has_error(setup, "'setup' not supported inside current context block")
assert.has_error(before_each, "'before_each' not supported inside current context block")
assert.has_error(after_each, "'after_each' not supported inside current context block")
assert.has_error(teardown, "'teardown' not supported inside current context block")
assert.has_error(lazy_setup, "'lazy_setup' not supported inside current context block")
assert.has_error(lazy_teardown, "'lazy_teardown' not supported inside current context block")
assert.has_error(strict_setup, "'strict_setup' not supported inside current context block")
assert.has_error(strict_teardown, "'strict_teardown' not supported inside current context block")
end
setup(testUnsupported)
teardown(testUnsupported)
before_each(testUnsupported)
after_each(testUnsupported)
it('tests nothing, all tests performed by support functions', function()
end)
end)
describe('tests strict setup/teardown', function()
local setup_count = 0
local teardown_count = 0
describe('in describe with no tests', function()
strict_setup(function()
setup_count = setup_count + 1
end)
strict_teardown(function()
teardown_count = teardown_count + 1
end)
end)
it('executes setup/teardown in previous block with no tests', function()
assert.is_equal(1, setup_count)
assert.is_equal(1, teardown_count)
end)
end)
describe('tests lazy setup/teardown not run if no tests found in block', function()
lazy_setup(function()
assert(false, 'setup should not execute since no tests')
end)
lazy_teardown(function()
assert(false, 'teardown should not execute since no tests')
end)
end)
describe('tests lazy setup/teardown in describe with no tests', function()
local setup_count = 0
local teardown_count = 0
describe('with nested describe with no tests', function()
lazy_setup(function()
setup_count = setup_count + 1
end)
lazy_teardown(function()
teardown_count = teardown_count + 1
end)
describe('with inner nested describe with no tests', function()
lazy_setup(function()
setup_count = setup_count + 1
end)
lazy_teardown(function()
teardown_count = teardown_count + 1
end)
end)
end)
it('does not run setup/teardown', function()
assert.is_equal(0, setup_count)
assert.is_equal(0, teardown_count)
end)
end)
describe('tests lazy setup/teardown with nested tests', function()
local setup_count = 0
local teardown_count = 0
lazy_setup(function()
setup_count = setup_count + 1
end)
lazy_teardown(function()
teardown_count = teardown_count + 1
end)
describe('nested describe with tests', function()
lazy_setup(function()
setup_count = setup_count + 1
end)
lazy_teardown(function()
teardown_count = teardown_count + 1
end)
it('runs all setups', function()
assert.is_equal(2, setup_count)
end)
it('runs setups only once', function()
assert.is_equal(2, setup_count)
end)
it('runs teardown after all tests complete', function()
assert.is_equal(0, teardown_count)
end)
end)
describe('second nested describe', function()
lazy_teardown(function()
teardown_count = teardown_count + 1
end)
it('verify teardown ran after previous describe completes', function()
assert.is_equal(1, teardown_count)
end)
end)
describe('another nested describe with tests', function()
lazy_setup(function()
setup_count = setup_count + 1
end)
it('runs setup for new describe', function()
assert.is_equal(3, setup_count)
end)
it('verify teardown ran after previous describe completes', function()
assert.is_equal(2, teardown_count)
end)
end)
end)
| {
"content_hash": "fe7c0da74b883ef0b8afe20b1d128537",
"timestamp": "",
"source": "github",
"line_count": 537,
"max_line_length": 101,
"avg_line_length": 25.8268156424581,
"alnum_prop": 0.6655851178888168,
"repo_name": "istr/busted",
"id": "a726740877d20602bfe13be34feb74205a99d548",
"size": "13900",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "spec/core_spec.lua",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "93"
},
{
"name": "Lua",
"bytes": "208031"
},
{
"name": "MoonScript",
"bytes": "483"
},
{
"name": "Shell",
"bytes": "6341"
}
],
"symlink_target": ""
} |
"""The tests for the Sun component."""
from datetime import datetime, timedelta
from unittest.mock import patch
from pytest import mark
import homeassistant.components.sun as sun
from homeassistant.const import EVENT_STATE_CHANGED
import homeassistant.core as ha
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
async def test_setting_rising(hass):
"""Test retrieving sun setting and rising."""
utc_now = datetime(2016, 11, 1, 8, 0, 0, tzinfo=dt_util.UTC)
with patch("homeassistant.helpers.condition.dt_util.utcnow", return_value=utc_now):
await async_setup_component(
hass, sun.DOMAIN, {sun.DOMAIN: {sun.CONF_ELEVATION: 0}}
)
await hass.async_block_till_done()
state = hass.states.get(sun.ENTITY_ID)
from astral import Astral
astral = Astral()
utc_today = utc_now.date()
latitude = hass.config.latitude
longitude = hass.config.longitude
mod = -1
while True:
next_dawn = astral.dawn_utc(
utc_today + timedelta(days=mod), latitude, longitude
)
if next_dawn > utc_now:
break
mod += 1
mod = -1
while True:
next_dusk = astral.dusk_utc(
utc_today + timedelta(days=mod), latitude, longitude
)
if next_dusk > utc_now:
break
mod += 1
mod = -1
while True:
next_midnight = astral.solar_midnight_utc(
utc_today + timedelta(days=mod), longitude
)
if next_midnight > utc_now:
break
mod += 1
mod = -1
while True:
next_noon = astral.solar_noon_utc(utc_today + timedelta(days=mod), longitude)
if next_noon > utc_now:
break
mod += 1
mod = -1
while True:
next_rising = astral.sunrise_utc(
utc_today + timedelta(days=mod), latitude, longitude
)
if next_rising > utc_now:
break
mod += 1
mod = -1
while True:
next_setting = astral.sunset_utc(
utc_today + timedelta(days=mod), latitude, longitude
)
if next_setting > utc_now:
break
mod += 1
assert next_dawn == dt_util.parse_datetime(
state.attributes[sun.STATE_ATTR_NEXT_DAWN]
)
assert next_dusk == dt_util.parse_datetime(
state.attributes[sun.STATE_ATTR_NEXT_DUSK]
)
assert next_midnight == dt_util.parse_datetime(
state.attributes[sun.STATE_ATTR_NEXT_MIDNIGHT]
)
assert next_noon == dt_util.parse_datetime(
state.attributes[sun.STATE_ATTR_NEXT_NOON]
)
assert next_rising == dt_util.parse_datetime(
state.attributes[sun.STATE_ATTR_NEXT_RISING]
)
assert next_setting == dt_util.parse_datetime(
state.attributes[sun.STATE_ATTR_NEXT_SETTING]
)
async def test_state_change(hass):
"""Test if the state changes at next setting/rising."""
now = datetime(2016, 6, 1, 8, 0, 0, tzinfo=dt_util.UTC)
with patch("homeassistant.helpers.condition.dt_util.utcnow", return_value=now):
await async_setup_component(
hass, sun.DOMAIN, {sun.DOMAIN: {sun.CONF_ELEVATION: 0}}
)
await hass.async_block_till_done()
test_time = dt_util.parse_datetime(
hass.states.get(sun.ENTITY_ID).attributes[sun.STATE_ATTR_NEXT_RISING]
)
assert test_time is not None
assert sun.STATE_BELOW_HORIZON == hass.states.get(sun.ENTITY_ID).state
hass.bus.async_fire(
ha.EVENT_TIME_CHANGED, {ha.ATTR_NOW: test_time + timedelta(seconds=5)}
)
await hass.async_block_till_done()
assert sun.STATE_ABOVE_HORIZON == hass.states.get(sun.ENTITY_ID).state
with patch("homeassistant.helpers.condition.dt_util.utcnow", return_value=now):
await hass.config.async_update(longitude=hass.config.longitude + 90)
await hass.async_block_till_done()
assert sun.STATE_ABOVE_HORIZON == hass.states.get(sun.ENTITY_ID).state
async def test_norway_in_june(hass):
"""Test location in Norway where the sun doesn't set in summer."""
hass.config.latitude = 69.6
hass.config.longitude = 18.8
june = datetime(2016, 6, 1, tzinfo=dt_util.UTC)
with patch("homeassistant.helpers.condition.dt_util.utcnow", return_value=june):
assert await async_setup_component(
hass, sun.DOMAIN, {sun.DOMAIN: {sun.CONF_ELEVATION: 0}}
)
state = hass.states.get(sun.ENTITY_ID)
assert state is not None
assert dt_util.parse_datetime(
state.attributes[sun.STATE_ATTR_NEXT_RISING]
) == datetime(2016, 7, 25, 23, 23, 39, tzinfo=dt_util.UTC)
assert dt_util.parse_datetime(
state.attributes[sun.STATE_ATTR_NEXT_SETTING]
) == datetime(2016, 7, 26, 22, 19, 1, tzinfo=dt_util.UTC)
assert state.state == sun.STATE_ABOVE_HORIZON
@mark.skip
async def test_state_change_count(hass):
"""Count the number of state change events in a location."""
# Skipped because it's a bit slow. Has been validated with
# multiple lattitudes and dates
hass.config.latitude = 10
hass.config.longitude = 0
now = datetime(2016, 6, 1, tzinfo=dt_util.UTC)
with patch("homeassistant.helpers.condition.dt_util.utcnow", return_value=now):
assert await async_setup_component(
hass, sun.DOMAIN, {sun.DOMAIN: {sun.CONF_ELEVATION: 0}}
)
events = []
@ha.callback
def state_change_listener(event):
if event.data.get("entity_id") == "sun.sun":
events.append(event)
hass.bus.async_listen(EVENT_STATE_CHANGED, state_change_listener)
await hass.async_block_till_done()
for _ in range(24 * 60 * 60):
now += timedelta(seconds=1)
hass.bus.async_fire(ha.EVENT_TIME_CHANGED, {ha.ATTR_NOW: now})
await hass.async_block_till_done()
assert len(events) < 721
| {
"content_hash": "f48f239a88d617fa6a63ebbac28db5bb",
"timestamp": "",
"source": "github",
"line_count": 192,
"max_line_length": 87,
"avg_line_length": 30.630208333333332,
"alnum_prop": 0.6340758374426118,
"repo_name": "leppa/home-assistant",
"id": "e04de7e2578ff514811d6dcbf0b466a912bfaf79",
"size": "5881",
"binary": false,
"copies": "3",
"ref": "refs/heads/dev",
"path": "tests/components/sun/test_init.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "18957740"
},
{
"name": "Shell",
"bytes": "6846"
}
],
"symlink_target": ""
} |
<?php
namespace Illuminate\Mail\Transport;
use Swift_Transport;
use Swift_Mime_Message;
use Swift_Events_EventListener;
use GuzzleHttp\ClientInterface;
class MandrillTransport implements Swift_Transport
{
/**
* Guzzle client instance.
*
* @var \GuzzleHttp\ClientInterface
*/
protected $client;
/**
* The Mandrill API key.
*
* @var string
*/
protected $key;
/**
* Create a new Mandrill transport instance.
*
* @param \GuzzleHttp\ClientInterface $client
* @param string $key
* @return void
*/
public function __construct(ClientInterface $client, $key)
{
$this->client = $client;
$this->key = $key;
}
/**
* {@inheritdoc}
*/
public function isStarted()
{
return true;
}
/**
* {@inheritdoc}
*/
public function start()
{
return true;
}
/**
* {@inheritdoc}
*/
public function stop()
{
return true;
}
/**
* {@inheritdoc}
*/
public function send(Swift_Mime_Message $message, &$failedRecipients = null)
{
$data = [
'key' => $this->key,
'to' => $this->getToAddresses($message),
'raw_message' => (string) $message,
'async' => false,
];
if (version_compare(ClientInterface::VERSION, '6') === 1) {
$options = ['form_params' => $data];
} else {
$options = ['body' => $data];
}
return $this->client->post('https://mandrillapp.com/api/1.0/messages/send-raw.json', $options);
}
/**
* Get all the addresses this message should be sent to.
*
* Note that Mandrill still respects CC, BCC headers in raw message itself.
*
* @param Swift_Mime_Message $message
* @return array
*/
protected function getToAddresses(Swift_Mime_Message $message)
{
$to = [];
if ($message->getTo()) {
$to = array_merge($to, array_keys($message->getTo()));
}
if ($message->getCc()) {
$to = array_merge($to, array_keys($message->getCc()));
}
if ($message->getBcc()) {
$to = array_merge($to, array_keys($message->getBcc()));
}
return $to;
}
/**
* {@inheritdoc}
*/
public function registerPlugin(Swift_Events_EventListener $plugin)
{
//
}
/**
* Get the API key being used by the transport.
*
* @return string
*/
public function getKey()
{
return $this->key;
}
/**
* Set the API key being used by the transport.
*
* @param string $key
* @return void
*/
public function setKey($key)
{
return $this->key = $key;
}
}
| {
"content_hash": "75c10997eda6c1fd05be8c4b640e8fbd",
"timestamp": "",
"source": "github",
"line_count": 139,
"max_line_length": 103,
"avg_line_length": 20.381294964028775,
"alnum_prop": 0.5139428168019767,
"repo_name": "jack-webster/framework",
"id": "b730478f9c8ea6197ed6fb9ef2d5dc72c03b79d8",
"size": "2833",
"binary": false,
"copies": "6",
"ref": "refs/heads/5.1",
"path": "src/Illuminate/Mail/Transport/MandrillTransport.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "2704"
},
{
"name": "PHP",
"bytes": "2831985"
},
{
"name": "Shell",
"bytes": "9458"
}
],
"symlink_target": ""
} |
"""
create_group(parent::Union{File,Group}, path::AbstractString; properties...)
Create a new `Group` at `path` under the `parent` object. Optional keyword
arguments include any keywords that that belong to
[`LinkCreateProperties`](@ref) or [`GroupCreateProperties`](@ref).
"""
function create_group(
parent::Union{File,Group},
path::AbstractString,
lcpl::LinkCreateProperties,
gcpl::GroupCreateProperties;
pv...
)
if !isempty(pv)
depwarn(
"Passing properties as positional and keyword arguments in the same call is deprecated.",
:create_group
)
setproperties!(gcpl; pv...)
end
return Group(API.h5g_create(parent, path, lcpl, gcpl, API.H5P_DEFAULT), file(parent))
end
function create_group(parent::Union{File,Group}, path::AbstractString; pv...)
lcpl = _link_properties(path)
gcpl = GroupCreateProperties()
try
pv = setproperties!(lcpl, gcpl; pv...)
isempty(pv) || error("invalid keyword options $pv")
return create_group(parent, path, lcpl, gcpl)
finally
close(lcpl)
close(gcpl)
end
end
"""
open_group(parent::Union{File,Group}, path::AbstratString)
Open an existing `Group` at `path` under the `parent` object.
"""
function open_group(
parent::Union{File,Group},
name::AbstractString,
gapl::GroupAccessProperties=GroupAccessProperties()
)
return Group(API.h5g_open(checkvalid(parent), name, gapl), file(parent))
end
# Get the root group
root(h5file::File) = open_group(h5file, "/")
root(obj::Union{Group,Dataset}) = open_group(file(obj), "/")
group_info(obj::Union{Group,File}) = API.h5g_get_info(checkvalid(obj))
Base.length(obj::Union{Group,File}) = Int(API.h5g_get_num_objs(checkvalid(obj)))
Base.isempty(x::Union{Group,File}) = length(x) == 0
# filename and name
name(obj::Union{File,Group,Dataset,Datatype}) = API.h5i_get_name(checkvalid(obj))
# iteration by objects
function Base.iterate(parent::Union{File,Group}, iter=(1, nothing))
n, prev_obj = iter
prev_obj ≢ nothing && close(prev_obj)
n > length(parent) && return nothing
obj = h5object(
API.h5o_open_by_idx(
checkvalid(parent), ".", idx_type(parent), order(parent), n - 1, API.H5P_DEFAULT
),
parent
)
return (obj, (n + 1, obj))
end
function Base.parent(obj::Union{File,Group,Dataset})
f = file(obj)
path = name(obj)
if length(path) == 1
return f
end
parentname = dirname(path)
if !isempty(parentname)
return open_object(f, dirname(path))
else
return root(f)
end
end
# Path manipulation
function split1(path::AbstractString)
ind = findfirst('/', path)
isnothing(ind) && return path, ""
if ind == 1 # matches root group
return "/", path[2:end]
else
indm1, indp1 = prevind(path, ind), nextind(path, ind)
return path[1:indm1], path[indp1:end] # better to use begin:indm1, but only available on v1.5
end
end
function Base.haskey(
parent::Union{File,Group},
path::AbstractString,
lapl::LinkAccessProperties=LinkAccessProperties()
)
# recursively check each step of the path exists
# see https://portal.hdfgroup.org/display/HDF5/H5L_EXISTS
checkvalid(parent)
first, rest = split1(path)
if first == "/"
parent = root(parent)
elseif !API.h5l_exists(parent, first, lapl)
return false
end
exists = true
if !isempty(rest)
obj = parent[first]
exists = haskey(obj, rest, lapl)
close(obj)
end
return exists
end
function Base.keys(x::Union{Group,File})
checkvalid(x)
children = sizehint!(String[], length(x))
API.h5l_iterate(x, idx_type(x), order(x)) do _, name, _
push!(children, unsafe_string(name))
return API.herr_t(0)
end
return children
end
delete_object(
parent::Union{File,Group},
path::AbstractString,
lapl::LinkAccessProperties=LinkAccessProperties()
) = API.h5l_delete(checkvalid(parent), path, lapl)
delete_object(obj::Object) = delete_object(parent(obj), ascii(split(name(obj), "/")[end])) # FIXME: remove ascii?
# Move links
move_link(
src::Union{File,Group},
src_name::AbstractString,
dest::Union{File,Group},
dest_name::AbstractString=src_name,
lapl::LinkAccessProperties=LinkAccessProperties(),
lcpl::LinkCreateProperties=LinkCreateProperties()
) = API.h5l_move(checkvalid(src), src_name, checkvalid(dest), dest_name, lcpl, lapl)
move_link(
parent::Union{File,Group},
src_name::AbstractString,
dest_name::AbstractString,
lapl::LinkAccessProperties=LinkAccessProperties(),
lcpl::LinkCreateProperties=LinkCreateProperties()
) = API.h5l_move(checkvalid(parent), src_name, parent, dest_name, lcpl, lapl)
"""
create_external(source::Union{HDF5.File, HDF5.Group}, source_relpath, target_filename, target_path;
lcpl_id=HDF5.API.H5P_DEFAULT, lapl_id=HDF5.H5P.DEFAULT)
Create an external link such that `source[source_relpath]` points to `target_path` within the file
with path `target_filename`; Calls `[H5Lcreate_external](https://www.hdfgroup.org/HDF5/doc/RM/RM_H5L.html#Link-CreateExternal)`.
"""
function create_external(
source::Union{File,Group},
source_relpath,
target_filename,
target_path;
lcpl_id=API.H5P_DEFAULT,
lapl_id=API.H5P_DEFAULT
)
API.h5l_create_external(
target_filename, target_path, source, source_relpath, lcpl_id, lapl_id
)
nothing
end
| {
"content_hash": "350d36e76ead2a3bb91d7efc31e7c230",
"timestamp": "",
"source": "github",
"line_count": 176,
"max_line_length": 128,
"avg_line_length": 31.318181818181817,
"alnum_prop": 0.6649129172714079,
"repo_name": "JuliaIO/HDF5.jl",
"id": "c5a480bd4b94dc44ed066adcb4497b7c6e6411e0",
"size": "5514",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/groups.jl",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Julia",
"bytes": "778505"
}
],
"symlink_target": ""
} |
using System.Linq;
using NSubstitute.Core.Arguments;
using NSubstitute.Specs.Infrastructure;
using NUnit.Framework;
namespace NSubstitute.Specs.Arguments
{
public class ArrayContentsArgumentMatcherSpecs : ConcernFor<ArrayContentsArgumentMatcher>
{
private IArgumentSpecification[] _argumentSpecifications;
private string[] _argument;
[Test]
public void Should_match_when_all_argument_specs_match()
{
Assert.That(sut.IsSatisfiedBy(_argument), Is.True);
}
[Test]
public void Should_not_match_when_not_all_argument_specs_match()
{
_argument[1] = "doh";
Assert.That(sut.IsSatisfiedBy(_argument), Is.False);
}
[Test]
public void Should_not_match_when_length_of_arrays_differ()
{
Assert.That(sut.IsSatisfiedBy(new[] { _argument[0] }), Is.False);
}
[Test]
public void Should_not_match_when_argument_is_null()
{
Assert.That(sut.IsSatisfiedBy(null), Is.False);
}
[Test]
public void Should_incorporate_toString_of_all_specifications_in_toString()
{
var expected = _argumentSpecifications[0].ToString() + ", " + _argumentSpecifications[1].ToString();
Assert.That(sut.ToString(), Is.EqualTo(expected));
}
[Test]
public void Should_format_each_spec_and_argument_when_they_are_the_same_length()
{
_argumentSpecifications[0].stub(x => x.FormatArgument(_argument[0])).Return("first");
_argumentSpecifications[1].stub(x => x.FormatArgument(_argument[1])).Return("second");
var expected = "first, second";
var result = sut.Format(_argument, true);
Assert.That(result, Is.EqualTo(expected));
}
[Test]
public void Should_handle_formatting_when_there_are_more_arguments_than_specs()
{
_argumentSpecifications[0].stub(x => x.FormatArgument(_argument[0])).Return("first");
_argumentSpecifications[1].stub(x => x.FormatArgument(_argument[1])).Return("second");
var argsWithExtra = _argument.Concat(new[] { "doh" }).ToArray();
var expected = "first, second, " + DefaultFormat("doh", true);
var result = sut.Format(argsWithExtra, true);
Assert.That(result, Is.EqualTo(expected));
}
[Test]
public void Should_handle_formatting_when_there_are_less_arguments_than_specs()
{
_argumentSpecifications[0].stub(x => x.FormatArgument(_argument[0])).Return("first");
_argumentSpecifications[1].stub(x => x.FormatArgument(_argument[1])).Return("second");
var lessArgsThanSpecs = new[] { _argument[0] };
var expected = "first";
var result = sut.Format(lessArgsThanSpecs, true);
Assert.That(result, Is.EqualTo(expected));
}
public override void Context()
{
_argument = new[] { "blah", "meh" };
_argumentSpecifications = new[] { mock<IArgumentSpecification>(), mock<IArgumentSpecification>() };
_argumentSpecifications[0].stub(x => x.IsSatisfiedBy(_argument[0])).Return(true);
_argumentSpecifications[1].stub(x => x.IsSatisfiedBy(_argument[1])).Return(true);
}
private string DefaultFormat(string text, bool highlight)
{
return new ArgumentFormatter().Format(text, highlight);
}
public override ArrayContentsArgumentMatcher CreateSubjectUnderTest()
{
return new ArrayContentsArgumentMatcher(_argumentSpecifications);
}
}
} | {
"content_hash": "33e2569af8739e4fdf5cfc7f3108dfec",
"timestamp": "",
"source": "github",
"line_count": 98,
"max_line_length": 112,
"avg_line_length": 37.89795918367347,
"alnum_prop": 0.6060850834679591,
"repo_name": "mrinaldi/NSubstitute",
"id": "b4a6f1cf60a42af98f929185690a73e6f23a27d1",
"size": "3716",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "Source/NSubstitute.Specs/Arguments/ArrayContentsArgumentMatcherSpecs.cs",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "475"
},
{
"name": "C#",
"bytes": "648230"
},
{
"name": "CSS",
"bytes": "20686"
},
{
"name": "F#",
"bytes": "10323"
},
{
"name": "HTML",
"bytes": "19127"
},
{
"name": "JavaScript",
"bytes": "1451"
},
{
"name": "Ruby",
"bytes": "18871"
},
{
"name": "Shell",
"bytes": "199"
}
],
"symlink_target": ""
} |
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>coinductive-reals: Not compatible 👼</title>
<link rel="shortcut icon" type="image/png" href="../../../../../favicon.png" />
<link href="../../../../../bootstrap.min.css" rel="stylesheet">
<link href="../../../../../bootstrap-custom.css" rel="stylesheet">
<link href="//maxcdn.bootstrapcdn.com/font-awesome/4.2.0/css/font-awesome.min.css" rel="stylesheet">
<script src="../../../../../moment.min.js"></script>
<!-- HTML5 Shim and Respond.js IE8 support of HTML5 elements and media queries -->
<!-- WARNING: Respond.js doesn't work if you view the page via file:// -->
<!--[if lt IE 9]>
<script src="https://oss.maxcdn.com/html5shiv/3.7.2/html5shiv.min.js"></script>
<script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script>
<![endif]-->
</head>
<body>
<div class="container">
<div class="navbar navbar-default" role="navigation">
<div class="container-fluid">
<div class="navbar-header">
<a class="navbar-brand" href="../../../../.."><i class="fa fa-lg fa-flag-checkered"></i> Coq bench</a>
</div>
<div id="navbar" class="collapse navbar-collapse">
<ul class="nav navbar-nav">
<li><a href="../..">clean / released</a></li>
<li class="active"><a href="">8.8.1 / coinductive-reals - 8.9.0</a></li>
</ul>
</div>
</div>
</div>
<div class="article">
<div class="row">
<div class="col-md-12">
<a href="../..">« Up</a>
<h1>
coinductive-reals
<small>
8.9.0
<span class="label label-info">Not compatible 👼</span>
</small>
</h1>
<p>📅 <em><script>document.write(moment("2022-10-16 04:20:23 +0000", "YYYY-MM-DD HH:mm:ss Z").fromNow());</script> (2022-10-16 04:20:23 UTC)</em><p>
<h2>Context</h2>
<pre># Packages matching: installed
# Name # Installed # Synopsis
base-bigarray base
base-num base Num library distributed with the OCaml compiler
base-ocamlbuild base OCamlbuild binary and libraries distributed with the OCaml compiler
base-threads base
base-unix base
camlp5 7.14 Preprocessor-pretty-printer of OCaml
conf-findutils 1 Virtual package relying on findutils
conf-perl 2 Virtual package relying on perl
coq 8.8.1 Formal proof management system
num 0 The Num library for arbitrary-precision integer and rational arithmetic
ocaml 4.02.3 The OCaml compiler (virtual package)
ocaml-base-compiler 4.02.3 Official 4.02.3 release
ocaml-config 1 OCaml Switch Configuration
ocamlfind 1.9.5 A library manager for OCaml
# opam file:
opam-version: "2.0"
maintainer: "[email protected]"
homepage: "http://www.cs.ru.nl/~milad/ETrees/coinductive-field/"
license: "LGPL"
build: [make "-j%{jobs}%"]
install: [make "install"]
remove: ["rm" "-R" "%{lib}%/coq/user-contrib/CoinductiveReals"]
depends: [
"ocaml"
"coq" {>= "8.9" & < "8.10~"}
"coq-qarith-stern-brocot" {>= "8.9" & < "8.10~"}
]
tags: [
"keyword: real numbers"
"keyword: co-inductive types"
"keyword: co-recursion"
"keyword: exact arithmetic"
"category: Mathematics/Arithmetic and Number Theory/Real numbers"
"date: 2007-04-24"
]
authors: [
"Milad Niqui <[email protected]> [http://www.cs.ru.nl/~milad]"
]
bug-reports: "https://github.com/coq-contribs/coinductive-reals/issues"
dev-repo: "git+https://github.com/coq-contribs/coinductive-reals.git"
synopsis: "Real numbers as coinductive ternary streams"
description: """
See the README file"""
flags: light-uninstall
url {
src: "https://github.com/coq-contribs/coinductive-reals/archive/v8.9.0.tar.gz"
checksum: "md5=fd1d8b1c54ca2d14b9a8a3492ba21f34"
}
</pre>
<h2>Lint</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
</dl>
<h2>Dry install 🏜️</h2>
<p>Dry install with the current Coq version:</p>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>opam install -y --show-action coq-coinductive-reals.8.9.0 coq.8.8.1</code></dd>
<dt>Return code</dt>
<dd>5120</dd>
<dt>Output</dt>
<dd><pre>[NOTE] Package coq is already installed (current version is 8.8.1).
The following dependencies couldn't be met:
- coq-coinductive-reals -> coq >= 8.9 -> ocaml >= 4.05.0
base of this switch (use `--unlock-base' to force)
Your request can't be satisfied:
- No available version of coq satisfies the constraints
No solution found, exiting
</pre></dd>
</dl>
<p>Dry install without Coq/switch base, to test if the problem was incompatibility with the current Coq/OCaml version:</p>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>opam remove -y coq; opam install -y --show-action --unlock-base coq-coinductive-reals.8.9.0</code></dd>
<dt>Return code</dt>
<dd>0</dd>
</dl>
<h2>Install dependencies</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
<dt>Duration</dt>
<dd>0 s</dd>
</dl>
<h2>Install 🚀</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
<dt>Duration</dt>
<dd>0 s</dd>
</dl>
<h2>Installation size</h2>
<p>No files were installed.</p>
<h2>Uninstall 🧹</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
<dt>Missing removes</dt>
<dd>
none
</dd>
<dt>Wrong removes</dt>
<dd>
none
</dd>
</dl>
</div>
</div>
</div>
<hr/>
<div class="footer">
<p class="text-center">
Sources are on <a href="https://github.com/coq-bench">GitHub</a> © Guillaume Claret 🐣
</p>
</div>
</div>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js"></script>
<script src="../../../../../bootstrap.min.js"></script>
</body>
</html>
| {
"content_hash": "8184f3f8b5da18a2962ec05d5c6c3abd",
"timestamp": "",
"source": "github",
"line_count": 177,
"max_line_length": 159,
"avg_line_length": 41.6271186440678,
"alnum_prop": 0.5503528773072747,
"repo_name": "coq-bench/coq-bench.github.io",
"id": "748522a8d431227799a9d8dadb455bab53ac72e9",
"size": "7393",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "clean/Linux-x86_64-4.02.3-2.0.6/released/8.8.1/coinductive-reals/8.9.0.html",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
<?php
/**
* Created by PhpStorm.
* User: breiti
* Date: 14/06/16
* Time: 21:32
*/
namespace BreiteSeite\IP\IPv6;
class Address
{
/**
* @var \GMP
*/
private $gmpAddress;
/**
* Address constructor.
* @param \GMP $gmpAddress
*/
private function __construct(\GMP $gmpAddress)
{
$this->gmpAddress = $gmpAddress;
}
public static function createIPv4Mapped(\BreiteSeite\IP\IPv4\Address $ipv4Address) : self
{
$mappedIpBinaryString = str_repeat('0', 80) . str_repeat('1', 16) . $ipv4Address->getAsBinary();
return new self(gmp_init($mappedIpBinaryString, 2));
}
/**
* @return string returns the binary representation of the IP with leading zeros (16 characters)
*/
public function getAsBinary(): string
{
return str_pad(gmp_strval($this->gmpAddress, 2), 128, '0', STR_PAD_LEFT);
}
}
| {
"content_hash": "9cb15eeecfa8727a8afb17ccf46b3f15",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 104,
"avg_line_length": 21.547619047619047,
"alnum_prop": 0.6033149171270719,
"repo_name": "BreiteSeite/ip",
"id": "1f577bd7b696ad58d82b9e4a3a2fa9ed313869df",
"size": "905",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/IPv6/Address.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PHP",
"bytes": "5320"
}
],
"symlink_target": ""
} |
import { Component, OnInit } from '@angular/core';
import { ActivatedRoute, Router } from '@angular/router';
import { Hero } from '../model/hero';
import { HeroDetailService } from './hero-detail.service';
@Component({
selector: 'app-hero-detail',
templateUrl: './hero-detail.component.html',
styleUrls: ['./hero-detail.component.css'],
providers: [HeroDetailService],
})
export class HeroDetailComponent implements OnInit {
constructor(private heroDetailService: HeroDetailService, private route: ActivatedRoute, private router: Router) {}
hero!: Hero;
ngOnInit(): void {
this.route.paramMap.subscribe((pmap) => this.getHero(pmap.get('id')));
}
private getHero(id: string | null): void {
if (!id) {
this.hero = { id: 0, name: '' } as Hero;
return;
}
this.heroDetailService.getHero(id).subscribe((hero) => {
if (hero) {
this.hero = hero;
} else {
this.gotoList();
}
});
}
save(): void {
this.heroDetailService.saveHero(this.hero).subscribe(() => this.gotoList());
}
cancel() {
this.gotoList();
}
gotoList() {
this.router.navigate(['../'], { relativeTo: this.route });
}
}
| {
"content_hash": "7db43ff59db1a93873c493516469b98d",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 117,
"avg_line_length": 23.86,
"alnum_prop": 0.6261525565800503,
"repo_name": "thymikee/jest-preset-angular",
"id": "e9c0420ecae02aaad8625e8d9b453635c375614f",
"size": "1193",
"binary": false,
"copies": "5",
"ref": "refs/heads/main",
"path": "examples/example-app-monorepo/apps/app1/src/app/hero/hero-detail.component.ts",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "5403"
},
{
"name": "HTML",
"bytes": "171"
},
{
"name": "JavaScript",
"bytes": "22487"
},
{
"name": "SCSS",
"bytes": "54"
},
{
"name": "Shell",
"bytes": "836"
},
{
"name": "TypeScript",
"bytes": "80734"
}
],
"symlink_target": ""
} |
package command
import (
"github.com/codegangsta/cli"
"github.com/denkhaus/cloudia/engine"
)
//kill will call docker kill on all containers, or the specified one(s).
func (c *Commander) NewKillCommand() {
c.Register(cli.Command{
Name: "kill",
Usage: "Kill the containers",
Action: func(ctx *cli.Context) {
c.Execute(func(node engine.Node) error {
return node.Kill()
}, ctx)
},
})
}
| {
"content_hash": "8c0a15c03b0891e0d4715f2e1c9918dd",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 72,
"avg_line_length": 21.42105263157895,
"alnum_prop": 0.6781326781326781,
"repo_name": "denkhaus/cloudia",
"id": "f8bd6b43911b25ac34e70b7f9a8c187d446b3d6a",
"size": "407",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "command/kill_command.go",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Go",
"bytes": "56276"
}
],
"symlink_target": ""
} |
FROM balenalib/artik10-fedora:30-build
ENV NODE_VERSION 15.14.0
ENV YARN_VERSION 1.22.4
RUN for key in \
6A010C5166006599AA17F08146C2130DFD2497F5 \
; do \
gpg --keyserver pgp.mit.edu --recv-keys "$key" || \
gpg --keyserver keyserver.pgp.com --recv-keys "$key" || \
gpg --keyserver ha.pool.sks-keyservers.net --recv-keys "$key" ; \
done \
&& curl -SLO "http://nodejs.org/dist/v$NODE_VERSION/node-v$NODE_VERSION-linux-armv7l.tar.gz" \
&& echo "1cef461a73a124dd3f212e2b8230638f4d16b5cc0915425ffad8aabac050d9fb node-v$NODE_VERSION-linux-armv7l.tar.gz" | sha256sum -c - \
&& tar -xzf "node-v$NODE_VERSION-linux-armv7l.tar.gz" -C /usr/local --strip-components=1 \
&& rm "node-v$NODE_VERSION-linux-armv7l.tar.gz" \
&& curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz" \
&& curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz.asc" \
&& gpg --batch --verify yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \
&& mkdir -p /opt/yarn \
&& tar -xzf yarn-v$YARN_VERSION.tar.gz -C /opt/yarn --strip-components=1 \
&& ln -s /opt/yarn/bin/yarn /usr/local/bin/yarn \
&& ln -s /opt/yarn/bin/yarn /usr/local/bin/yarnpkg \
&& rm yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \
&& npm config set unsafe-perm true -g --unsafe-perm \
&& rm -rf /tmp/*
CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"]
RUN curl -SLO "https://raw.githubusercontent.com/balena-io-library/base-images/8accad6af708fca7271c5c65f18a86782e19f877/scripts/assets/tests/[email protected]" \
&& echo "Running test-stack@node" \
&& chmod +x [email protected] \
&& bash [email protected] \
&& rm -rf [email protected]
RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo $'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: ARM v7 \nOS: Fedora 30 \nVariant: build variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nNode.js v15.14.0, Yarn v1.22.4 \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info
RUN echo $'#!/bin/sh.real\nbalena-info\nrm -f /bin/sh\ncp /bin/sh.real /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \
&& chmod +x /bin/sh-shim \
&& cp /bin/sh /bin/sh.real \
&& mv /bin/sh-shim /bin/sh | {
"content_hash": "c97d69cbe8afd64803b840ec341438f3",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 691,
"avg_line_length": 66.65853658536585,
"alnum_prop": 0.7087449688986461,
"repo_name": "nghiant2710/base-images",
"id": "ac44ea37466ad832e50a91fcb31203c3fa4b39d8",
"size": "2754",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "balena-base-images/node/artik10/fedora/30/15.14.0/build/Dockerfile",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "144558581"
},
{
"name": "JavaScript",
"bytes": "16316"
},
{
"name": "Shell",
"bytes": "368690"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="UTF-8"?>
<module rename-to='zeus_platform'>
<inherits name='com.sencha.gxt.ui.GXT' />
<inherits name="org.moxieapps.gwt.highcharts.Highcharts"/>
<inherits name='com.google.gwt.place.Place' />
<replace-with class="com.taobao.zeus.web.platform.client.theme.shortcut.ShortcutCellDefaultAppearance">
<when-type-is class="com.taobao.zeus.web.platform.client.widget.ShortcutCell.ShortcutCellAppearance" />
</replace-with>
<entry-point class='com.taobao.zeus.web.platform.client.PlatformEntry'/>
<source path='client'/>
<source path='shared'/>
<!-- <set-property name="user.agent" value="safari"/> -->
<!-- <set-property name="gxt.user.agent" value="safari5"/> -->
<!-- <set-property name="user.locale" value="zh"/> -->
</module>
| {
"content_hash": "cc81f36120e08883f6ab9e8a6e281ac8",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 107,
"avg_line_length": 43.666666666666664,
"alnum_prop": 0.6946564885496184,
"repo_name": "wwzhe/dataworks-zeus",
"id": "301325e25fe1ddaba39f349898f83afb17e25a19",
"size": "786",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "web/src/main/java/com/taobao/zeus/web/platform/Platform.gwt.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "156"
},
{
"name": "CSS",
"bytes": "134149"
},
{
"name": "HTML",
"bytes": "57754"
},
{
"name": "Java",
"bytes": "2276733"
},
{
"name": "JavaScript",
"bytes": "737755"
},
{
"name": "Protocol Buffer",
"bytes": "7710"
},
{
"name": "Ruby",
"bytes": "302"
},
{
"name": "Shell",
"bytes": "152"
}
],
"symlink_target": ""
} |
package org.simpleflatmapper.jdbc;
import org.simpleflatmapper.util.CheckedConsumer;
import java.lang.reflect.Type;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.Collection;
/**
*
* @param <T> the target type
* @param <K> the key type
*/
public class ConnectedCrud<T, K> {
private final TransactionTemplate transactionTemplate;
private final Crud<T, K> delegate;
public ConnectedCrud(TransactionTemplate transactionTemplate, Crud<T, K> delegate) {
this.transactionTemplate = transactionTemplate;
this.delegate = delegate;
}
/**
* insert value into the db through the specified connection.
*
* @param value the value
* @throws SQLException if an error occurs
*/
public void create(final T value) throws SQLException {
transactionTemplate
.doInTransaction(new SQLFunction<Connection, Object>() {
@Override
public Object apply(Connection connection) throws SQLException {
delegate.create(connection, value);
return null;
}
});
}
/**
* insert values into the db through the specified connection.
*
* @param values the values
* @throws SQLException if an error occurs
*/
public void create(final Collection<T> values) throws SQLException {
transactionTemplate
.doInTransaction(new SQLFunction<Connection, Object>() {
@Override
public Object apply(Connection connection) throws SQLException {
delegate.create(connection, values);
return null;
}
});
}
/**
* insert value into the db through the specified connection.
* Callback keyConsumer with the generated key if one was.
*
* @param value the value
* @param keyConsumer the key consumer
* @param <RH> the type of keyConsumer
* @return the keyConsumer
* @throws SQLException
*/
public <RH extends CheckedConsumer<? super K>> RH create(final T value, final RH keyConsumer) throws SQLException {
transactionTemplate
.doInTransaction(new SQLFunction<Connection, Object>() {
@Override
public Object apply(Connection connection) throws SQLException {
delegate.create(connection, value, keyConsumer);
return null;
}
});
return keyConsumer;
}
/**
* insert values into the db through the specified connection.
* Callback keyConsumer for the generated keys.
*
* @param values the values
* @param keyConsumer the key consumer
* @param <RH> the type of keyConsumer
* @return the keyConsumer
* @throws SQLException
*/
public <RH extends CheckedConsumer<? super K>> RH create(final Collection<T> values, final RH keyConsumer) throws SQLException {
transactionTemplate
.doInTransaction(new SQLFunction<Connection, Object>() {
@Override
public Object apply(Connection connection) throws SQLException {
delegate.create(connection, values, keyConsumer);
return null;
}
});
return keyConsumer;
}
/**
* retrieve the object with the specified key.
*
* @param key the key
* @return the object or null if not found
* @throws SQLException if an error occurs
*/
public T read(final K key) throws SQLException {
return
transactionTemplate
.doInTransaction(new SQLFunction<Connection, T>() {
@Override
public T apply(Connection connection) throws SQLException {
return delegate.read(connection, key);
}
});
}
/**
* retrieve the objects with the specified keys and pass them to the consumer.
*
* @param keys the keys
* @param consumer the handler that is callback for each row
* @throws SQLException if an error occurs
*/
public <RH extends CheckedConsumer<? super T>> RH read(final Collection<K> keys, final RH consumer) throws SQLException {
transactionTemplate
.doInTransaction(new SQLFunction<Connection, Object>() {
@Override
public Object apply(Connection connection) throws SQLException {
delegate.read(connection, keys, consumer);
return null;
}
});
return consumer;
}
/**
* update the object.
*
* @param value the object
* @throws SQLException if an error occurs
*/
public void update(final T value) throws SQLException {
transactionTemplate
.doInTransaction(new SQLFunction<Connection, Object>() {
@Override
public Object apply(Connection connection) throws SQLException {
delegate.update(connection, value);
return null;
}
});
}
/**
* update the objects.
*
* @param values the objects
* @throws SQLException if an error occurs
*/
public void update(final Collection<T> values) throws SQLException {
transactionTemplate
.doInTransaction(new SQLFunction<Connection, Object>() {
@Override
public Object apply(Connection connection) throws SQLException {
delegate.update(connection, values);
return null;
}
});
}
/**
* delete the object with the specified key.
*
* @param key the key
* @throws SQLException if an error occurs
*/
public void delete(final K key) throws SQLException {
transactionTemplate
.doInTransaction(new SQLFunction<Connection, Object>() {
@Override
public Object apply(Connection connection) throws SQLException {
delegate.delete(connection, key);
return null;
}
});
}
/**
* delete the objects with the specified keys.
*
* @param keys the keys
* @throws SQLException if an error occurs
*/
public void delete(final Collection<K> keys) throws SQLException {
transactionTemplate
.doInTransaction(new SQLFunction<Connection, Object>() {
@Override
public Object apply(Connection connection) throws SQLException {
delegate.delete(connection, keys);
return null;
}
});
}
/**
* UPSERT only supported on Mysql
* @param value the value
* @throws SQLException
* @throws UnsupportedOperationException
*/
public void createOrUpdate(final T value) throws SQLException {
transactionTemplate
.doInTransaction(new SQLFunction<Connection, Object>() {
@Override
public Object apply(Connection connection) throws SQLException {
delegate.createOrUpdate(connection, value);
return null;
}
});
}
/**
* UPSERT only supported on Mysql
* @param values the values to upsert
* @throws SQLException
* @throws UnsupportedOperationException
*/
public void createOrUpdate(final Collection<T> values) throws SQLException {
transactionTemplate
.doInTransaction(new SQLFunction<Connection, Object>() {
@Override
public Object apply(Connection connection) throws SQLException {
delegate.createOrUpdate(connection, values);
return null;
}
});
}
/**
* UPSERT only supported on Mysql and Postgres 9.5.
* Used the callback with caution has Mysql will return an incremented id event for when no insert actually occurred.
* @param value the value to upsert
* @param keyConsumer generated key consumer
* @param <RH> the keyConsumer type
* @return the keyConsumer
* @throws SQLException
*/
public <RH extends CheckedConsumer<? super K>> RH createOrUpdate(final T value, final RH keyConsumer) throws SQLException {
transactionTemplate
.doInTransaction(new SQLFunction<Connection, Object>() {
@Override
public Object apply(Connection connection) throws SQLException {
delegate.createOrUpdate(connection, value, keyConsumer);
return null;
}
});
return keyConsumer;
}
/**
* UPSERT only supported on Mysql and Postgres 9.5.
* Used the callback with caution has Mysql will return an incremented id event for when no insert actually occurred.
* @param values the values to insert
* @param keyConsumer generated key consumer
* @param <RH> the keyConsumer type
* @return the keyConsumer
* @throws SQLException
*/
public <RH extends CheckedConsumer<? super K>> RH createOrUpdate(final Collection<T> values, final RH keyConsumer) throws SQLException {
transactionTemplate
.doInTransaction(new SQLFunction<Connection, Object>() {
@Override
public Object apply(Connection connection) throws SQLException {
delegate.createOrUpdate(connection, values, keyConsumer);
return null;
}
});
return keyConsumer;
}
public Crud<T, K> crud() {
return delegate;
}
public <P> ConnectedSelectQuery<T, P> where(final String whereClause, final Type paramClass) {
SelectQuery<T, P> selectQuery = delegate.where(whereClause, paramClass);
return new ConnectedSelectQuery<T, P>(selectQuery, transactionTemplate);
}
}
| {
"content_hash": "0ce689e7b98ffbb9d9e8e2a90c9a350d",
"timestamp": "",
"source": "github",
"line_count": 296,
"max_line_length": 140,
"avg_line_length": 34.7195945945946,
"alnum_prop": 0.5844117933249002,
"repo_name": "arnaudroger/SimpleFlatMapper",
"id": "05b105c3fd8ba11ed911122c26a84b23a0441771",
"size": "10277",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sfm-jdbc/src/main/java/org/simpleflatmapper/jdbc/ConnectedCrud.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "4203603"
},
{
"name": "Kotlin",
"bytes": "4824"
},
{
"name": "PLpgSQL",
"bytes": "4730"
},
{
"name": "Shell",
"bytes": "10595"
},
{
"name": "XSLT",
"bytes": "1080"
}
],
"symlink_target": ""
} |
package org.mrgeo.mapalgebra.parser;
public class ParserVariableNode extends ParserNode
{
private Object value;
public Object getValue()
{
return value;
}
public void setValue(Object value)
{
this.value = value;
}
}
| {
"content_hash": "18f2aae9524def6fe9d4884d1e18f10a",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 50,
"avg_line_length": 13.722222222222221,
"alnum_prop": 0.6842105263157895,
"repo_name": "akarmas/mrgeo",
"id": "be445cbd0a6ac7d500683505843c5f0865436a8b",
"size": "849",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "mrgeo-core/src/main/java/org/mrgeo/mapalgebra/parser/ParserVariableNode.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "FreeMarker",
"bytes": "2033"
},
{
"name": "Java",
"bytes": "3051262"
},
{
"name": "Python",
"bytes": "111947"
},
{
"name": "Scala",
"bytes": "644165"
},
{
"name": "Shell",
"bytes": "76009"
}
],
"symlink_target": ""
} |
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>com.spotify</groupId>
<artifactId>foss-root</artifactId>
<version>5</version>
</parent>
<artifactId>docker-client</artifactId>
<version>8.11.8-SNAPSHOT</version>
<packaging>jar</packaging>
<name>docker-client</name>
<description>A docker client</description>
<url>https://github.com/spotify/docker-client</url>
<licenses>
<license>
<name>The Apache Software License, Version 2.0</name>
<url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
<distribution>repo</distribution>
</license>
</licenses>
<scm>
<connection>scm:git:https://github.com/spotify/docker-client</connection>
<developerConnection>scm:git:[email protected]:spotify/docker-client</developerConnection>
<url>https://github.com/spotify/docker-client</url>
<tag>HEAD</tag>
</scm>
<developers>
<developer>
<id>dano</id>
<name>Daniel Norberg</name>
<email>[email protected]</email>
</developer>
<developer>
<id>ryan</id>
<name>Ryan Culbertson</name>
<email>[email protected]</email>
</developer>
<developer>
<id>drewc</id>
<name>Drew Csillag</name>
<email>[email protected]</email>
</developer>
<developer>
<id>philipcristiano</id>
<name>Philip Cristiano</name>
<email>[email protected]</email>
</developer>
<developer>
<id>rohan</id>
<name>Rohan Singh</name>
<email>[email protected]</email>
</developer>
<developer>
<id>dxia</id>
<name>David Xia</name>
<email>[email protected]</email>
</developer>
<developer>
<id>mattbrown</id>
<name>Matt Brown</name>
<email>[email protected]</email>
</developer>
</developers>
<repositories>
<repository>
<id>jcenter</id>
<url>http://jcenter.bintray.com</url>
</repository>
</repositories>
<dependencies>
<!--compile deps-->
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>1.7.22</version>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>20.0</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.jaxrs</groupId>
<artifactId>jackson-jaxrs-json-provider</artifactId>
<version>2.9.4</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.datatype</groupId>
<artifactId>jackson-datatype-guava</artifactId>
<version>2.9.4</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>2.9.4</version>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.core</groupId>
<artifactId>jersey-client</artifactId>
<version>2.22.2</version>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.connectors</groupId>
<artifactId>jersey-apache-connector</artifactId>
<version>2.22.2</version>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.media</groupId>
<artifactId>jersey-media-json-jackson</artifactId>
<version>2.22.2</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-compress</artifactId>
<version>1.9</version>
</dependency>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<version>2.5</version>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
<version>4.5</version>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpcore</artifactId>
<version>4.4.5</version>
</dependency>
<dependency>
<groupId>com.github.jnr</groupId>
<artifactId>jnr-unixsocket</artifactId>
<version>0.18</version>
</dependency>
<dependency>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
<version>2.6</version>
</dependency>
<dependency>
<groupId>org.bouncycastle</groupId>
<artifactId>bcpkix-jdk15on</artifactId>
<version>1.59</version>
</dependency>
<dependency>
<groupId>com.google.auto.value</groupId>
<artifactId>auto-value</artifactId>
<version>1.3</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>com.google.code.findbugs</groupId>
<artifactId>jsr305</artifactId>
<version>3.0.1</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>com.google.code.findbugs</groupId>
<artifactId>annotations</artifactId>
<version>3.0.1</version>
<scope>provided</scope>
</dependency>
<!-- TODO we should pull out the Google Cloud support to a new library -->
<dependency>
<groupId>com.google.auth</groupId>
<artifactId>google-auth-library-oauth2-http</artifactId>
<version>0.6.0</version>
<optional>true</optional>
<exclusions>
<exclusion>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</exclusion>
</exclusions>
</dependency>
<!--test deps-->
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.12</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
<version>1.10.19</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.hamcrest</groupId>
<artifactId>hamcrest-library</artifactId>
<version>1.3</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<version>1.2.1</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.google.jimfs</groupId>
<artifactId>jimfs</artifactId>
<version>1.0</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>joda-time</groupId>
<artifactId>joda-time</artifactId>
<version>2.8.2</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.awaitility</groupId>
<artifactId>awaitility</artifactId>
<version>2.0.0</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.squareup.okhttp3</groupId>
<artifactId>mockwebserver</artifactId>
<version>3.8.0</version>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-release-plugin</artifactId>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.5.1</version>
<configuration>
<source>1.7</source>
<target>1.7</target>
</configuration>
</plugin>
<plugin>
<artifactId>maven-checkstyle-plugin</artifactId>
<configuration>
<includeTestSourceDirectory>true</includeTestSourceDirectory>
<violationSeverity>warning</violationSeverity>
<excludes>**\/AutoValue_*.java</excludes>
</configuration>
</plugin>
<plugin>
<artifactId>maven-enforcer-plugin</artifactId>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-project-info-reports-plugin</artifactId>
<version>2.8</version>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>findbugs-maven-plugin</artifactId>
<version>3.0.1</version>
<configuration>
<effort>Max</effort>
<threshold>Low</threshold>
<xmlOutput>true</xmlOutput>
<findbugsXmlOutputDirectory>${project.build.directory}/findbugs
</findbugsXmlOutputDirectory>
<excludeFilterFile>${project.basedir}/findbugs-exclude.xml</excludeFilterFile>
</configuration>
<dependencies>
<dependency>
<groupId>org.apache.ant</groupId>
<artifactId>ant</artifactId>
<version>1.9.6</version>
</dependency>
</dependencies>
<executions>
<execution>
<phase>verify</phase>
<goals>
<goal>check</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<version>2.4</version>
<executions>
<execution>
<id>attach-sources</id>
<goals>
<goal>jar</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>3.1.0</version>
<configuration>
<createSourcesJar>true</createSourcesJar>
<artifactSet>
<includes>
<include>com.fasterxml.jackson.**</include>
<include>javax.annotation:*</include>
<include>javax.ws.rs:*</include>
<include>org.glassfish.**</include>
<include>org.jvnet.hk2.**</include>
<include>com.github.jnr:*</include>
<include>org.ow2.asm:*</include>
<include>com.google.guava:**</include>
<include>org.apache.httpcomponents:httpclient</include>
<include>org.apache.httpcomponents:httpcore</include>
<include>asm:asm</include>
</includes>
</artifactSet>
<relocations>
<relocation>
<pattern>javax.annotation</pattern>
<shadedPattern>com.spotify.docker.client.shaded.javax.annotation</shadedPattern>
</relocation>
<relocation>
<pattern>javax.inject</pattern>
<shadedPattern>com.spotify.docker.client.shaded.javax.inject</shadedPattern>
</relocation>
<relocation>
<pattern>javax.ws.rs</pattern>
<shadedPattern>com.spotify.docker.client.shaded.javax.ws.rs</shadedPattern>
</relocation>
<relocation>
<pattern>org.glassfish</pattern>
<shadedPattern>com.spotify.docker.client.shaded.org.glassfish</shadedPattern>
</relocation>
<relocation>
<pattern>org.jvnet.hk2</pattern>
<shadedPattern>com.spotify.docker.client.shaded.org.jvnet.hk2</shadedPattern>
</relocation>
<relocation>
<pattern>com.fasterxml.jackson</pattern>
<shadedPattern>com.spotify.docker.client.shaded.com.fasterxml.jackson</shadedPattern>
</relocation>
<relocation>
<pattern>com.google.common</pattern>
<shadedPattern>com.spotify.docker.client.shaded.com.google.common</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.http</pattern>
<shadedPattern>com.spotify.docker.client.shaded.org.apache.http</shadedPattern>
</relocation>
<relocation>
<pattern>org.objectweb.asm</pattern>
<shadedPattern>com.spotify.docker.client.shaded.org.objectweb.asm</shadedPattern>
</relocation>
</relocations>
<shadedArtifactAttached>true</shadedArtifactAttached>
<transformers>
<transformer implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer" />
</transformers>
</configuration>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<artifactId>maven-failsafe-plugin</artifactId>
</plugin>
<plugin>
<groupId>org.jacoco</groupId>
<artifactId>jacoco-maven-plugin</artifactId>
<version>0.7.5.201505241946</version>
<executions>
<execution>
<id>jacoco-initialize</id>
<goals>
<goal>prepare-agent</goal>
</goals>
</execution>
<execution>
<id>jacoco-site</id>
<phase>test</phase>
<goals>
<goal>report</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
<reporting>
<plugins>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>findbugs-maven-plugin</artifactId>
<version>3.0.1</version>
<configuration>
<effort>Max</effort>
<failOnError>false</failOnError>
<threshold>Low</threshold>
<xmlOutput>true</xmlOutput>
<findbugsXmlOutputDirectory>${project.build.directory}/findbugs
</findbugsXmlOutputDirectory>
<excludeFilterFile>${project.basedir}/findbugs-exclude.xml</excludeFilterFile>
</configuration>
</plugin>
</plugins>
</reporting>
</project>
| {
"content_hash": "b5e08a5055f71c56125e0c488466274a",
"timestamp": "",
"source": "github",
"line_count": 426,
"max_line_length": 204,
"avg_line_length": 32.44600938967136,
"alnum_prop": 0.6003472724641875,
"repo_name": "rgrunber/docker-client",
"id": "1ef24341fd6fd1c5c6a18a9542fae75de9656d84",
"size": "13822",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pom.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "772"
},
{
"name": "Java",
"bytes": "1137427"
},
{
"name": "Shell",
"bytes": "2967"
}
],
"symlink_target": ""
} |
package com.mangopay.entities;
import com.google.gson.annotations.SerializedName;
import com.mangopay.core.Address;
import com.mangopay.core.EntityBase;
import com.mangopay.core.PlatformCategorization;
import java.util.ArrayList;
/**
* Client entity.
*/
public class Client extends EntityBase {
/**
* Client identifier.
*/
@SerializedName("ClientId")
private String clientId;
/**
* Name of this client.
*/
@SerializedName("Name")
private String name;
/**
* Your branding colour to use for theme pages.
*/
@SerializedName("PrimaryThemeColour")
private String primaryThemeColour;
/**
* Your branding colour to use for call to action buttons.
*/
@SerializedName("PrimaryButtonColour")
private String primaryButtonColour;
/**
* The URL of your MANGOPAY hosted logo.
*/
@SerializedName("Logo")
private String logo;
/**
* A list of email addresses to use when contacting you for technical issues/communications.
*/
@SerializedName("TechEmails")
private ArrayList<String> techEmails;
/**
* A list of email addresses to use when contacting you for admin/commercial issues/communications.
*/
@SerializedName("AdminEmails")
private ArrayList<String> adminEmails;
/**
* A list of email addresses to use when contacting you for fraud/compliance issues/communications.
*/
@SerializedName("FraudEmails")
private ArrayList<String> fraudEmails;
/**
* A list of email addresses to use when contacting you for billing issues/communications.
*/
@SerializedName("BillingEmails")
private ArrayList<String> billingEmails;
/**
* A description of what your platform does.
*/
@SerializedName("PlatformDescription")
private String platformDescription;
/**
* The categorization of the platform
*/
@SerializedName("PlatformCategorization")
private PlatformCategorization platformCategorization;
/**
* The URL for your website.
*/
@SerializedName("PlatformURL")
private String platformUrl;
/**
* The address of the company’s headquarters.
*/
@SerializedName("HeadquartersAddress")
private Address headquartersAddress;
/**
* The phone number of the client's headquarters
*/
@SerializedName("HeadquartersPhoneNumber")
private String headquartersPhoneNumber;
/**
* The tax (or VAT) number for your company.
*/
@SerializedName("TaxNumber")
private String taxNumber;
public String getClientId() {
return clientId;
}
public void setClientId(String clientId) {
this.clientId = clientId;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getPrimaryThemeColour() {
return primaryThemeColour;
}
public void setPrimaryThemeColour(String primaryThemeColour) {
this.primaryThemeColour = primaryThemeColour;
}
public String getPrimaryButtonColour() {
return primaryButtonColour;
}
public void setPrimaryButtonColour(String primaryButtonColour) {
this.primaryButtonColour = primaryButtonColour;
}
public String getLogo() {
return logo;
}
public void setLogo(String logo) {
this.logo = logo;
}
public ArrayList<String> getTechEmails() {
return techEmails;
}
public void setTechEmails(ArrayList<String> techEmails) {
this.techEmails = techEmails;
}
public ArrayList<String> getAdminEmails() {
return adminEmails;
}
public void setAdminEmails(ArrayList<String> adminEmails) {
this.adminEmails = adminEmails;
}
public ArrayList<String> getFraudEmails() {
return fraudEmails;
}
public void setFraudEmails(ArrayList<String> fraudEmails) {
this.fraudEmails = fraudEmails;
}
public ArrayList<String> getBillingEmails() {
return billingEmails;
}
public void setBillingEmails(ArrayList<String> billingEmails) {
this.billingEmails = billingEmails;
}
public String getPlatformDescription() {
return platformDescription;
}
public void setPlatformDescription(String platformDescription) {
this.platformDescription = platformDescription;
}
public PlatformCategorization getPlatformCategorization() {
return platformCategorization;
}
public void setPlatformCategorization(PlatformCategorization platformCategorization) {
this.platformCategorization = platformCategorization;
}
public String getPlatformUrl() {
return platformUrl;
}
public void setPlatformUrl(String platformUrl) {
this.platformUrl = platformUrl;
}
public Address getHeadquartersAddress() {
return headquartersAddress;
}
public void setHeadquartersAddress(Address headquartersAddress) {
this.headquartersAddress = headquartersAddress;
}
public String getTaxNumber() {
return taxNumber;
}
public void setTaxNumber(String taxNumber) {
this.taxNumber = taxNumber;
}
public String getHeadquartersPhoneNumber() {
return headquartersPhoneNumber;
}
public Client setHeadquartersPhoneNumber(String headquartersPhoneNumber) {
this.headquartersPhoneNumber = headquartersPhoneNumber;
return this;
}
}
| {
"content_hash": "212b6380902355bebc8fa9982329e10e",
"timestamp": "",
"source": "github",
"line_count": 224,
"max_line_length": 103,
"avg_line_length": 24.665178571428573,
"alnum_prop": 0.6743891402714932,
"repo_name": "Mangopay/mangopay2-java-sdk",
"id": "a481657863ddbbc676c0bc6aa1e0d2a5d6cc258c",
"size": "5527",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/java/com/mangopay/entities/Client.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "773138"
},
{
"name": "Shell",
"bytes": "478"
}
],
"symlink_target": ""
} |
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
/* FIXME: gdk_cairo_create() is deprecated nowadays */
#define GDK_DISABLE_DEPRECATION_WARNINGS
#include <stdlib.h>
#include <string.h>
#include <glib.h>
#include <gdk/gdkx.h>
#include <gtk/gtk.h>
#include <gst/gst.h>
#include <gst/video/videooverlay.h>
static GtkWidget *video_window = NULL;
static GstElement *sink = NULL;
static gulong embed_xid = 0;
static GdkRGBA trans_color;
static gboolean trans_color_set = FALSE;
static void
redraw_overlay (GtkWidget * widget)
{
GtkAllocation allocation;
GdkWindow *window = gtk_widget_get_window (widget);
cairo_t *cr;
cr = gdk_cairo_create (window);
gtk_widget_get_allocation (widget, &allocation);
cairo_set_source_rgb (cr, 1, 1, 1);
cairo_rectangle (cr, 0, 0, allocation.width, allocation.height);
cairo_fill (cr);
if (trans_color_set) {
guint x, y;
guint h = allocation.height * 0.75;
gdk_cairo_set_source_rgba (cr, &trans_color);
cairo_rectangle (cr, 0, 0, allocation.width, h);
cairo_fill (cr);
for (y = h; y < allocation.height; y++) {
for (x = 0; x < allocation.width; x++) {
if (((x & 1) || (y & 1)) && (x & 1) != (y & 1)) {
cairo_move_to (cr, x, y);
cairo_paint (cr);
}
}
}
}
}
static gboolean
handle_resize_cb (GtkWidget * widget, GdkEventConfigure * event, gpointer data)
{
redraw_overlay (widget);
return FALSE;
}
static gboolean
draw_cb (GtkWidget * widget, cairo_t * cr, gpointer data)
{
redraw_overlay (widget);
return FALSE;
}
static void
realize_cb (GtkWidget * widget, gpointer data)
{
GdkWindow *window = gtk_widget_get_window (widget);
/* This is here just for pedagogical purposes, GDK_WINDOW_XID will call it
* as well */
if (!gdk_window_ensure_native (window))
g_error ("Couldn't create native window needed for GstVideoOverlay!");
embed_xid = GDK_WINDOW_XID (window);
g_print ("Window realize: video window XID = %lu\n", embed_xid);
}
static void
msg_state_changed (GstBus * bus, GstMessage * message, GstPipeline * pipeline)
{
const GstStructure *s;
s = gst_message_get_structure (message);
/* We only care about state changed on the pipeline */
if (s && GST_MESSAGE_SRC (message) == GST_OBJECT_CAST (pipeline)) {
GstState old, new, pending;
gint color;
gst_message_parse_state_changed (message, &old, &new, &pending);
/* When state of the pipeline changes to paused or playing we start updating scale */
switch (GST_STATE_TRANSITION (old, new)) {
case GST_STATE_CHANGE_READY_TO_PAUSED:{
g_object_get (G_OBJECT (sink), "colorkey", &color, NULL);
if (color != -1) {
trans_color.red = (color & 0xff0000) >> 8;
trans_color.green = (color & 0xff00);
trans_color.blue = (color & 0xff) << 8;
trans_color_set = TRUE;
} else {
trans_color_set = FALSE;
}
handle_resize_cb (video_window, NULL, NULL);
break;
}
default:
break;
}
}
}
static void
window_closed (GtkWidget * widget, GdkEvent * event, gpointer user_data)
{
GstElement *pipeline = user_data;
g_print ("stopping\n");
gtk_widget_hide (widget);
gst_element_set_state (pipeline, GST_STATE_NULL);
gtk_main_quit ();
}
static gboolean
start_pipeline (gpointer user_data)
{
GstElement *pipeline = GST_ELEMENT (user_data);
GstStateChangeReturn sret;
sret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
if (sret == GST_STATE_CHANGE_FAILURE) {
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
gtk_main_quit ();
}
return FALSE;
}
int
main (int argc, char **argv)
{
GtkWidget *window;
GstElement *pipeline, *src;
GstBus *bus;
GstStateChangeReturn sret;
#if 0
GstPropertyProbe *probe;
GValueArray *arr;
#endif
gst_init (&argc, &argv);
gtk_init (&argc, &argv);
/* prepare the pipeline */
pipeline = gst_pipeline_new ("xvoverlay");
src = gst_element_factory_make ("videotestsrc", NULL);
sink = gst_element_factory_make ("xvimagesink", NULL);
gst_bin_add_many (GST_BIN (pipeline), src, sink, NULL);
gst_element_link (src, sink);
#define COLOR_GRAY 0x7F7F7F
g_object_set (G_OBJECT (sink), "autopaint-colorkey", FALSE,
"force-aspect-ratio", TRUE, "draw-borders", FALSE,
"colorkey", COLOR_GRAY, NULL);
/* check xvimagesink capabilities */
sret = gst_element_set_state (pipeline, GST_STATE_READY);
if (sret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Can't set pipeline to READY\n");
gst_object_unref (pipeline);
return -1;
}
#if 0
probe = GST_PROPERTY_PROBE (sink);
if (!probe) {
g_printerr ("Can't probe sink\n");
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return -1;
}
arr =
gst_property_probe_probe_and_get_values_name (probe,
"autopaint-colorkey");
if (!arr || !arr->n_values) {
g_printerr ("Can't disable autopaint-colorkey property\n");
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return -1;
}
if (arr)
g_value_array_free (arr);
#endif
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
gst_bus_add_signal_watch_full (bus, G_PRIORITY_HIGH);
g_signal_connect (bus, "message::state-changed",
G_CALLBACK (msg_state_changed), pipeline);
gst_object_unref (bus);
/* prepare the ui */
window = gtk_window_new (GTK_WINDOW_TOPLEVEL);
g_signal_connect (G_OBJECT (window), "delete-event",
G_CALLBACK (window_closed), (gpointer) pipeline);
gtk_window_set_default_size (GTK_WINDOW (window), 320, 240);
video_window = gtk_drawing_area_new ();
g_signal_connect (G_OBJECT (video_window), "configure-event",
G_CALLBACK (handle_resize_cb), NULL);
g_signal_connect (G_OBJECT (video_window), "draw",
G_CALLBACK (draw_cb), NULL);
g_signal_connect (video_window, "realize", G_CALLBACK (realize_cb), NULL);
gtk_container_add (GTK_CONTAINER (window), video_window);
/* show the gui and play */
gtk_widget_show_all (window);
/* realize window now so that the video window gets created and we can
* obtain its XID before the pipeline is started up and the videosink
* asks for the XID of the window to render onto */
gtk_widget_realize (window);
/* we should have the XID now */
g_assert (embed_xid != 0);
/* we know what the video sink is in this case (xvimagesink), so we can
* just set it directly here now (instead of waiting for a
* prepare-window-handle element message in a sync bus handler and setting
* it there) */
g_print ("setting XID %lu\n", embed_xid);
gst_video_overlay_set_window_handle (GST_VIDEO_OVERLAY (sink), embed_xid);
g_idle_add (start_pipeline, pipeline);
gtk_main ();
gst_object_unref (pipeline);
return 0;
}
| {
"content_hash": "0e76eb9c75d56398daf857c91b5838f7",
"timestamp": "",
"source": "github",
"line_count": 251,
"max_line_length": 89,
"avg_line_length": 27.294820717131476,
"alnum_prop": 0.6523135308714056,
"repo_name": "google/aistreams",
"id": "12b7732ee50362b9aedba316fe57bdcacf9a266e",
"size": "7721",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "third_party/gst-plugins-base/tests/icles/test-colorkey.c",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "77741"
},
{
"name": "C++",
"bytes": "626396"
},
{
"name": "Python",
"bytes": "41809"
},
{
"name": "Starlark",
"bytes": "56595"
}
],
"symlink_target": ""
} |
package org.karivar.utils;
import com.google.common.base.Splitter;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.InputStream;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.ResourceBundle;
class PropertyReader {
private final ResourceBundle messages;
private Map<String, List<String>> issueTypesAndStatuses;
private List<String> issueLinks;
private final Logger logger = LoggerFactory.getLogger(PropertyReader.class);
PropertyReader(ResourceBundle bundle) {
messages = bundle;
}
/**
* Loads a property file containing all JIRA issue types and their corresponding statuses
* which allows code check-in.
* @return a map containing issue types and their statuses.
*/
Map<String, List<String>> getIssueTypesAndStatuses() {
loadIssueTypesAndStatuses();
return issueTypesAndStatuses;
}
/**
* Loads a property file containing names of the JIRA link names which will be used to connect relevant issues
* together to increase the level of traceability.
* @return a list containing all JIRA link names.
*/
List<String> getIssueLinks() {
loadIssueLinks();
return issueLinks;
}
private void loadIssueLinks() {
issueLinks = Lists.newArrayList();
Properties properties = loadPropertiesFile("issuelinks.properties");
if (!properties.isEmpty()) {
String values = properties.getProperty("issuelinks");
issueLinks = Lists.newArrayList(Splitter.on(", ").split(values));
}
}
private void loadIssueTypesAndStatuses() {
issueTypesAndStatuses = Maps.newHashMap();
Properties properties = loadPropertiesFile("issuetypes.properties");
if (!properties.isEmpty()) {
for (Object property : properties.keySet()) {
String key = (String) property;
String values = properties.getProperty(key);
if (key.contains("_")) {
key = key.replaceAll("_", " ");
}
List<String> items = Lists.newArrayList(Splitter.on(", ").split(values));
issueTypesAndStatuses.put(key, items);
}
}
}
private Properties loadPropertiesFile(String filename) {
Properties properties = new Properties();
try {
InputStream reader = getClass().getClassLoader().getResourceAsStream(filename);
properties.load(reader);
} catch (IOException e) {
logger.error(messages.getString("error.loadfile.io"), filename);
} catch (IllegalArgumentException e) {
logger.error(messages.getString("error.loadfile.malformed"), filename);
}
return properties;
}
}
| {
"content_hash": "a9927c16c7197312a94219f855c5d7b2",
"timestamp": "",
"source": "github",
"line_count": 95,
"max_line_length": 114,
"avg_line_length": 31.094736842105263,
"alnum_prop": 0.6475964793500338,
"repo_name": "pgjerlow/git-jira-hook",
"id": "202620fe0172da691d933bf7d02963d25460cffc",
"size": "3150",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/java/org/karivar/utils/PropertyReader.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "79423"
},
{
"name": "Shell",
"bytes": "7998"
}
],
"symlink_target": ""
} |
import { Component, OnInit, OnDestroy } from '@angular/core';
import { ActivatedRoute } from '@angular/router';
import { NgbActiveModal } from '@ng-bootstrap/ng-bootstrap';
import { JhiEventManager } from 'ng-jhipster';
import { Engine } from './engine.model';
import { EnginePopupService } from './engine-popup.service';
import { EngineService } from './engine.service';
@Component({
selector: 'jhi-engine-delete-dialog',
templateUrl: './engine-delete-dialog.component.html'
})
export class EngineDeleteDialogComponent {
engine: Engine;
constructor(
private engineService: EngineService,
public activeModal: NgbActiveModal,
private eventManager: JhiEventManager
) {
}
clear() {
this.activeModal.dismiss('cancel');
}
confirmDelete(id: number) {
this.engineService.delete(id).subscribe((response) => {
this.eventManager.broadcast({
name: 'engineListModification',
content: 'Deleted an engine'
});
this.activeModal.dismiss(true);
});
}
}
@Component({
selector: 'jhi-engine-delete-popup',
template: ''
})
export class EngineDeletePopupComponent implements OnInit, OnDestroy {
routeSub: any;
constructor(
private route: ActivatedRoute,
private enginePopupService: EnginePopupService
) {}
ngOnInit() {
this.routeSub = this.route.params.subscribe((params) => {
this.enginePopupService
.open(EngineDeleteDialogComponent as Component, params['id']);
});
}
ngOnDestroy() {
this.routeSub.unsubscribe();
}
}
| {
"content_hash": "4c88f83adc264576e77dec064533e16a",
"timestamp": "",
"source": "github",
"line_count": 64,
"max_line_length": 78,
"avg_line_length": 26.078125,
"alnum_prop": 0.6351108448172559,
"repo_name": "iMartinezMateu/gamecraft",
"id": "f3f1acf6b29977e20b0e5e733cfa4dc2b36b6ab0",
"size": "1669",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "gamecraft-gateway/src/main/webapp/app/entities/engine/engine-delete-dialog.component.ts",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "859"
},
{
"name": "CSS",
"bytes": "159197"
},
{
"name": "Dockerfile",
"bytes": "6073"
},
{
"name": "Gherkin",
"bytes": "179"
},
{
"name": "HTML",
"bytes": "650193"
},
{
"name": "Java",
"bytes": "3048026"
},
{
"name": "JavaScript",
"bytes": "114242"
},
{
"name": "Scala",
"bytes": "57845"
},
{
"name": "Shell",
"bytes": "853"
},
{
"name": "TypeScript",
"bytes": "761582"
}
],
"symlink_target": ""
} |
package com.microsoft.azure.management.compute.v2019_11_01;
import java.util.List;
import com.fasterxml.jackson.annotation.JsonProperty;
/**
* The instance view of a virtual machine extension.
*/
public class VirtualMachineExtensionInstanceView {
/**
* The virtual machine extension name.
*/
@JsonProperty(value = "name")
private String name;
/**
* Specifies the type of the extension; an example is
* "CustomScriptExtension".
*/
@JsonProperty(value = "type")
private String type;
/**
* Specifies the version of the script handler.
*/
@JsonProperty(value = "typeHandlerVersion")
private String typeHandlerVersion;
/**
* The resource status information.
*/
@JsonProperty(value = "substatuses")
private List<InstanceViewStatus> substatuses;
/**
* The resource status information.
*/
@JsonProperty(value = "statuses")
private List<InstanceViewStatus> statuses;
/**
* Get the virtual machine extension name.
*
* @return the name value
*/
public String name() {
return this.name;
}
/**
* Set the virtual machine extension name.
*
* @param name the name value to set
* @return the VirtualMachineExtensionInstanceView object itself.
*/
public VirtualMachineExtensionInstanceView withName(String name) {
this.name = name;
return this;
}
/**
* Get specifies the type of the extension; an example is "CustomScriptExtension".
*
* @return the type value
*/
public String type() {
return this.type;
}
/**
* Set specifies the type of the extension; an example is "CustomScriptExtension".
*
* @param type the type value to set
* @return the VirtualMachineExtensionInstanceView object itself.
*/
public VirtualMachineExtensionInstanceView withType(String type) {
this.type = type;
return this;
}
/**
* Get specifies the version of the script handler.
*
* @return the typeHandlerVersion value
*/
public String typeHandlerVersion() {
return this.typeHandlerVersion;
}
/**
* Set specifies the version of the script handler.
*
* @param typeHandlerVersion the typeHandlerVersion value to set
* @return the VirtualMachineExtensionInstanceView object itself.
*/
public VirtualMachineExtensionInstanceView withTypeHandlerVersion(String typeHandlerVersion) {
this.typeHandlerVersion = typeHandlerVersion;
return this;
}
/**
* Get the resource status information.
*
* @return the substatuses value
*/
public List<InstanceViewStatus> substatuses() {
return this.substatuses;
}
/**
* Set the resource status information.
*
* @param substatuses the substatuses value to set
* @return the VirtualMachineExtensionInstanceView object itself.
*/
public VirtualMachineExtensionInstanceView withSubstatuses(List<InstanceViewStatus> substatuses) {
this.substatuses = substatuses;
return this;
}
/**
* Get the resource status information.
*
* @return the statuses value
*/
public List<InstanceViewStatus> statuses() {
return this.statuses;
}
/**
* Set the resource status information.
*
* @param statuses the statuses value to set
* @return the VirtualMachineExtensionInstanceView object itself.
*/
public VirtualMachineExtensionInstanceView withStatuses(List<InstanceViewStatus> statuses) {
this.statuses = statuses;
return this;
}
}
| {
"content_hash": "70c57d097c62168740febdd90523617f",
"timestamp": "",
"source": "github",
"line_count": 143,
"max_line_length": 102,
"avg_line_length": 25.867132867132867,
"alnum_prop": 0.6496350364963503,
"repo_name": "selvasingh/azure-sdk-for-java",
"id": "22aded80a8b7d76772e8011d775d4481fb73f084",
"size": "3929",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sdk/compute/mgmt-v2019_11_01/src/main/java/com/microsoft/azure/management/compute/v2019_11_01/VirtualMachineExtensionInstanceView.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "29891970"
},
{
"name": "JavaScript",
"bytes": "6198"
},
{
"name": "PowerShell",
"bytes": "160"
},
{
"name": "Shell",
"bytes": "609"
}
],
"symlink_target": ""
} |
ACCEPTED
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
null
#### Original name
null
### Remarks
null | {
"content_hash": "556740075dd3ec3cbaa2f8add28f9e28",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 39,
"avg_line_length": 10.307692307692308,
"alnum_prop": 0.6940298507462687,
"repo_name": "mdoering/backbone",
"id": "30c679e1eff044530154602feb3e7b60fd9d7536",
"size": "180",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Magnoliophyta/Magnoliopsida/Ericales/Clethraceae/Clethra/Clethra tomentosa/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
#include "SGLQueue.hpp"
#include <stdio.h>
#include <stdlib.h>
#include <iostream>
SGLQueue::SGLQueue(){
q = new std::list<int32_t>();
lk.store(-1,std::memory_order::memory_order_release);
}
SGLQueue::~SGLQueue(){}
int32_t SGLQueue::dequeue(int tid){
lockAcquire(tid);
int32_t v=0;
if(!q->empty()){
v = q->front();
q->pop_front();
}
else{v=EMPTY;}
lockRelease(tid);
return v;
}
void SGLQueue::enqueue(int32_t val,int tid){
lockAcquire(tid);
q->push_back(val);
lockRelease(tid);
}
// Simple test and set lock
/// There are better ways to do this...
void SGLQueue::lockAcquire(int32_t tid){
int unlk = -1;
while(!lk.compare_exchange_strong(unlk, tid,std::memory_order::memory_order_acq_rel)){
unlk = -1; // compare_exchange puts the old value into unlk, so set it back
}
assert(lk.load()==tid);
}
void SGLQueue::lockRelease(int32_t tid){
assert(lk==tid);
int unlk = -1;
lk.store(unlk,std::memory_order::memory_order_release);
}
SGLQueue::SGLQueue(std::list<int32_t>* contents){
lk.store(-1,std::memory_order::memory_order_release);
q = new std::list<int32_t>();
q->assign(contents->begin(),contents->end());
}
| {
"content_hash": "038fc9aaa6ce530e1419207385f5f8f5",
"timestamp": "",
"source": "github",
"line_count": 56,
"max_line_length": 87,
"avg_line_length": 21.5,
"alnum_prop": 0.6370431893687708,
"repo_name": "13ofClubs/parHarness",
"id": "6624a4e4d7160799ff44e31838519ec012a3032f",
"size": "1788",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cpp_harness/SGLQueue.cpp",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "82346"
},
{
"name": "Makefile",
"bytes": "1194"
},
{
"name": "Python",
"bytes": "6636"
},
{
"name": "R",
"bytes": "5068"
},
{
"name": "Shell",
"bytes": "147"
}
],
"symlink_target": ""
} |
//$Id: Dog.java 14760 2008-06-11 07:33:15Z hardy.ferentschik $
package org.hibernate.test.annotations.id.sequences.entities;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Table;
import javax.persistence.TableGenerator;
/**
* Share the generator table decribed by the GEN_TABLE GeneratedIdTable
* using the Dog key as discriminator
*
* @author Emmanuel Bernard
*/
@Entity
@Table(name = "tbl_dog")
@TableGenerator(name = "DogGen", table = "GENERATOR_TABLE", pkColumnName = "pkey",
valueColumnName = "hi", pkColumnValue = "Dog", allocationSize = 10)
public class Dog {
private Integer id;
private String name;
@Id
@GeneratedValue(strategy = GenerationType.TABLE, generator = "DogGen")
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
| {
"content_hash": "dd5e2e0e5c167c1a878c3719fd1520bf",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 82,
"avg_line_length": 24.902439024390244,
"alnum_prop": 0.7384916748285995,
"repo_name": "HerrB92/obp",
"id": "0530828eb6bb3b99395b344d62248016b5aff850",
"size": "1021",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "OpenBeaconPackage/libraries/hibernate-release-4.2.7.SP1/project/hibernate-core/src/test/java/org/hibernate/test/annotations/id/sequences/entities/Dog.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "181658"
},
{
"name": "Groovy",
"bytes": "98685"
},
{
"name": "Java",
"bytes": "34621856"
},
{
"name": "JavaScript",
"bytes": "356255"
},
{
"name": "Shell",
"bytes": "194"
},
{
"name": "XSLT",
"bytes": "21372"
}
],
"symlink_target": ""
} |
namespace content {
class RenderViewHost;
}
class Profile;
namespace chromeos {
enum AccessibilityNotificationType {
ACCESSIBILITY_MANAGER_SHUTDOWN,
ACCESSIBILITY_TOGGLE_HIGH_CONTRAST_MODE,
ACCESSIBILITY_TOGGLE_LARGE_CURSOR,
ACCESSIBILITY_TOGGLE_SCREEN_MAGNIFIER,
ACCESSIBILITY_TOGGLE_SPOKEN_FEEDBACK,
ACCESSIBILITY_TOGGLE_VIRTUAL_KEYBOARD,
ACCESSIBILITY_BRAILLE_DISPLAY_CONNECTION_STATE_CHANGED
};
struct AccessibilityStatusEventDetails {
AccessibilityStatusEventDetails(
AccessibilityNotificationType notification_type,
bool enabled,
ui::AccessibilityNotificationVisibility notify);
AccessibilityStatusEventDetails(
AccessibilityNotificationType notification_type,
bool enabled,
ui::MagnifierType magnifier_type,
ui::AccessibilityNotificationVisibility notify);
AccessibilityNotificationType notification_type;
bool enabled;
ui::MagnifierType magnifier_type;
ui::AccessibilityNotificationVisibility notify;
};
typedef base::Callback<void(const AccessibilityStatusEventDetails&)>
AccessibilityStatusCallback;
typedef base::CallbackList<void(const AccessibilityStatusEventDetails&)>
AccessibilityStatusCallbackList;
typedef AccessibilityStatusCallbackList::Subscription
AccessibilityStatusSubscription;
class ChromeVoxPanelWidgetObserver;
// AccessibilityManager changes the statuses of accessibility features
// watching profile notifications and pref-changes.
// TODO(yoshiki): merge MagnificationManager with AccessibilityManager.
class AccessibilityManager
: public content::NotificationObserver,
public extensions::api::braille_display_private::BrailleObserver,
public extensions::ExtensionRegistryObserver,
public ash::SessionStateObserver,
public ash::ShellObserver,
public input_method::InputMethodManager::Observer {
public:
// Creates an instance of AccessibilityManager, this should be called once,
// because only one instance should exist at the same time.
static void Initialize();
// Deletes the existing instance of AccessibilityManager.
static void Shutdown();
// Returns the existing instance. If there is no instance, returns NULL.
static AccessibilityManager* Get();
// On a user's first login into a device, any a11y features enabled/disabled
// by the user on the login screen are enabled/disabled in the user's profile.
// This class watches for profile changes and copies settings into the user's
// profile when it detects a login with a newly created profile.
class PrefHandler {
public:
explicit PrefHandler(const char* pref_path);
virtual ~PrefHandler();
// Should be called from AccessibilityManager::SetProfile().
void HandleProfileChanged(Profile* previous_profile,
Profile* current_profile);
private:
const char* pref_path_;
DISALLOW_COPY_AND_ASSIGN(PrefHandler);
};
// Returns true when the accessibility menu should be shown.
bool ShouldShowAccessibilityMenu();
// Returns true when cursor compositing should be enabled.
bool ShouldEnableCursorCompositing();
// Enables or disables the large cursor.
void EnableLargeCursor(bool enabled);
// Returns true if the large cursor is enabled, or false if not.
bool IsLargeCursorEnabled();
// Enables or disable Sticky Keys.
void EnableStickyKeys(bool enabled);
// Returns true if Incognito mode is allowed, or false if not.
bool IsIncognitoAllowed();
// Returns true if the Sticky Keys is enabled, or false if not.
bool IsStickyKeysEnabled();
// Enables or disables spoken feedback. Enabling spoken feedback installs the
// ChromeVox component extension.
void EnableSpokenFeedback(bool enabled,
ui::AccessibilityNotificationVisibility notify);
// Returns true if spoken feedback is enabled, or false if not.
bool IsSpokenFeedbackEnabled();
// Toggles whether Chrome OS spoken feedback is on or off.
void ToggleSpokenFeedback(ui::AccessibilityNotificationVisibility notify);
// Enables or disables the high contrast mode for Chrome.
void EnableHighContrast(bool enabled);
// Returns true if High Contrast is enabled, or false if not.
bool IsHighContrastEnabled();
// Enables or disables autoclick.
void EnableAutoclick(bool enabled);
// Returns true if autoclick is enabled.
bool IsAutoclickEnabled();
// Set the delay for autoclicking after stopping the cursor in milliseconds.
void SetAutoclickDelay(int delay_ms);
// Returns the autoclick delay in milliseconds.
int GetAutoclickDelay() const;
// Enables or disables the virtual keyboard.
void EnableVirtualKeyboard(bool enabled);
// Returns true if the virtual keyboard is enabled, otherwise false.
bool IsVirtualKeyboardEnabled();
// Returns true if a braille display is connected to the system, otherwise
// false.
bool IsBrailleDisplayConnected() const;
// SessionStateObserver overrides:
void ActiveUserChanged(const AccountId& account_id) override;
// ShellObserver overrides:
void OnAppTerminating() override;
void SetProfileForTest(Profile* profile);
static void SetBrailleControllerForTest(
extensions::api::braille_display_private::BrailleController* controller);
// Enables/disables system sounds.
void EnableSystemSounds(bool system_sounds_enabled);
// Initiates play of shutdown sound and returns it's duration.
base::TimeDelta PlayShutdownSound();
// Injects ChromeVox scripts into given |render_view_host|.
void InjectChromeVox(content::RenderViewHost* render_view_host);
// Register a callback to be notified when the status of an accessibility
// option changes.
scoped_ptr<AccessibilityStatusSubscription> RegisterCallback(
const AccessibilityStatusCallback& cb);
// Notify registered callbacks of a status change in an accessibility setting.
void NotifyAccessibilityStatusChanged(
AccessibilityStatusEventDetails& details);
// Notify accessibility when locale changes occur.
void OnLocaleChanged();
// Plays an earcon. Earcons are brief and distinctive sounds that indicate
// when their mapped event has occurred. The sound key enums can be found in
// chromeos/audio/chromeos_sounds.h.
void PlayEarcon(int sound_key);
// Called by our widget observer when the ChromeVoxPanel is closing.
void OnChromeVoxPanelClosing();
void OnChromeVoxPanelDestroying();
// Profile having the a11y context.
Profile* profile() { return profile_; }
// Extension id of extension receiving keyboard events.
void SetKeyboardListenerExtensionId(const std::string& id,
content::BrowserContext* context);
const std::string& keyboard_listener_extension_id() {
return keyboard_listener_extension_id_;
}
// Whether keyboard listener extension gets to capture keys.
void set_keyboard_listener_capture(bool val) {
keyboard_listener_capture_ = val;
}
bool keyboard_listener_capture() { return keyboard_listener_capture_; }
protected:
AccessibilityManager();
~AccessibilityManager() override;
private:
void LoadChromeVox();
void LoadChromeVoxToUserScreen(const base::Closure& done_cb);
void LoadChromeVoxToLockScreen(const base::Closure& done_cb);
void UnloadChromeVox();
void UnloadChromeVoxFromLockScreen();
void PostLoadChromeVox(Profile* profile);
void PostUnloadChromeVox(Profile* profile);
void UpdateLargeCursorFromPref();
void UpdateStickyKeysFromPref();
void UpdateSpokenFeedbackFromPref();
void UpdateHighContrastFromPref();
void UpdateAutoclickFromPref();
void UpdateAutoclickDelayFromPref();
void UpdateVirtualKeyboardFromPref();
void CheckBrailleState();
void ReceiveBrailleDisplayState(
scoped_ptr<extensions::api::braille_display_private::DisplayState> state);
void UpdateBrailleImeState();
void SetProfile(Profile* profile);
void UpdateChromeOSAccessibilityHistograms();
// content::NotificationObserver
void Observe(int type,
const content::NotificationSource& source,
const content::NotificationDetails& details) override;
// extensions::api::braille_display_private::BrailleObserver implementation.
// Enables spoken feedback if a braille display becomes available.
void OnBrailleDisplayStateChanged(
const extensions::api::braille_display_private::DisplayState&
display_state) override;
void OnBrailleKeyEvent(
const extensions::api::braille_display_private::KeyEvent& event) override;
// ExtensionRegistryObserver implementation.
void OnExtensionUnloaded(
content::BrowserContext* browser_context,
const extensions::Extension* extension,
extensions::UnloadedExtensionInfo::Reason reason) override;
void OnShutdown(extensions::ExtensionRegistry* registry) override;
// InputMethodManager::Observer
void InputMethodChanged(input_method::InputMethodManager* manager,
Profile* profile,
bool show_message) override;
// Profile which has the current a11y context.
Profile* profile_;
// Profile which ChromeVox is currently loaded to. If NULL, ChromeVox is not
// loaded to any profile.
bool chrome_vox_loaded_on_lock_screen_;
bool chrome_vox_loaded_on_user_screen_;
content::NotificationRegistrar notification_registrar_;
scoped_ptr<PrefChangeRegistrar> pref_change_registrar_;
scoped_ptr<PrefChangeRegistrar> local_state_pref_change_registrar_;
scoped_ptr<ash::ScopedSessionStateObserver> session_state_observer_;
PrefHandler large_cursor_pref_handler_;
PrefHandler spoken_feedback_pref_handler_;
PrefHandler high_contrast_pref_handler_;
PrefHandler autoclick_pref_handler_;
PrefHandler autoclick_delay_pref_handler_;
PrefHandler virtual_keyboard_pref_handler_;
bool large_cursor_enabled_;
bool sticky_keys_enabled_;
bool spoken_feedback_enabled_;
bool high_contrast_enabled_;
bool autoclick_enabled_;
int autoclick_delay_ms_;
bool virtual_keyboard_enabled_;
ui::AccessibilityNotificationVisibility spoken_feedback_notification_;
bool should_speak_chrome_vox_announcements_on_user_screen_;
bool system_sounds_enabled_;
AccessibilityStatusCallbackList callback_list_;
bool braille_display_connected_;
ScopedObserver<extensions::api::braille_display_private::BrailleController,
AccessibilityManager> scoped_braille_observer_;
bool braille_ime_current_;
ChromeVoxPanel* chromevox_panel_;
scoped_ptr<ChromeVoxPanelWidgetObserver> chromevox_panel_widget_observer_;
std::string keyboard_listener_extension_id_;
bool keyboard_listener_capture_;
// Listen to extension unloaded notifications.
ScopedObserver<extensions::ExtensionRegistry,
extensions::ExtensionRegistryObserver>
extension_registry_observer_;
base::WeakPtrFactory<AccessibilityManager> weak_ptr_factory_;
DISALLOW_COPY_AND_ASSIGN(AccessibilityManager);
};
} // namespace chromeos
#endif // CHROME_BROWSER_CHROMEOS_ACCESSIBILITY_ACCESSIBILITY_MANAGER_H_
| {
"content_hash": "f1f4bcdd6549b930c1b86753b69b2478",
"timestamp": "",
"source": "github",
"line_count": 316,
"max_line_length": 80,
"avg_line_length": 35.04746835443038,
"alnum_prop": 0.7601805869074492,
"repo_name": "ds-hwang/chromium-crosswalk",
"id": "50121fb757843bcbc5202c8b783ce18088aa5ff6",
"size": "12259",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "chrome/browser/chromeos/accessibility/accessibility_manager.h",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
namespace v8 {
namespace internal {
// CPU Registers.
//
// 1) We would prefer to use an enum, but enum values are assignment-
// compatible with int, which has caused code-generation bugs.
//
// 2) We would prefer to use a class instead of a struct but we don't like
// the register initialization to depend on the particular initialization
// order (which appears to be different on OS X, Linux, and Windows for the
// installed versions of C++ we tried). Using a struct permits C-style
// "initialization". Also, the Register objects cannot be const as this
// forces initialization stubs in MSVC, making us dependent on initialization
// order.
//
// 3) By not using an enum, we are possibly preventing the compiler from
// doing certain constant folds, which may significantly reduce the
// code generated for some assembly instructions (because they boil down
// to a few constants). If this is a problem, we could change the code
// such that we use an enum in optimized mode, and the struct in debug
// mode. This way we get the compile-time error checking in debug mode
// and best performance in optimized code.
//
struct Register {
static const int kMaxNumAllocatableRegisters = 6;
static int NumAllocatableRegisters() {
return kMaxNumAllocatableRegisters;
}
static const int kNumRegisters = 8;
static inline const char* AllocationIndexToString(int index);
static inline int ToAllocationIndex(Register reg);
static inline Register FromAllocationIndex(int index);
static Register from_code(int code) {
DCHECK(code >= 0);
DCHECK(code < kNumRegisters);
Register r = { code };
return r;
}
bool is_valid() const { return 0 <= code_ && code_ < kNumRegisters; }
bool is(Register reg) const { return code_ == reg.code_; }
// eax, ebx, ecx and edx are byte registers, the rest are not.
bool is_byte_register() const { return code_ <= 3; }
int code() const {
DCHECK(is_valid());
return code_;
}
int bit() const {
DCHECK(is_valid());
return 1 << code_;
}
// Unfortunately we can't make this private in a struct.
int code_;
};
const int kRegister_eax_Code = 0;
const int kRegister_ecx_Code = 1;
const int kRegister_edx_Code = 2;
const int kRegister_ebx_Code = 3;
const int kRegister_esp_Code = 4;
const int kRegister_ebp_Code = 5;
const int kRegister_esi_Code = 6;
const int kRegister_edi_Code = 7;
const int kRegister_no_reg_Code = -1;
const Register eax = { kRegister_eax_Code };
const Register ecx = { kRegister_ecx_Code };
const Register edx = { kRegister_edx_Code };
const Register ebx = { kRegister_ebx_Code };
const Register esp = { kRegister_esp_Code };
const Register ebp = { kRegister_ebp_Code };
const Register esi = { kRegister_esi_Code };
const Register edi = { kRegister_edi_Code };
const Register no_reg = { kRegister_no_reg_Code };
inline const char* Register::AllocationIndexToString(int index) {
DCHECK(index >= 0 && index < kMaxNumAllocatableRegisters);
// This is the mapping of allocation indices to registers.
const char* const kNames[] = { "eax", "ecx", "edx", "ebx", "esi", "edi" };
return kNames[index];
}
inline int Register::ToAllocationIndex(Register reg) {
DCHECK(reg.is_valid() && !reg.is(esp) && !reg.is(ebp));
return (reg.code() >= 6) ? reg.code() - 2 : reg.code();
}
inline Register Register::FromAllocationIndex(int index) {
DCHECK(index >= 0 && index < kMaxNumAllocatableRegisters);
return (index >= 4) ? from_code(index + 2) : from_code(index);
}
struct XMMRegister {
static const int kMaxNumAllocatableRegisters = 7;
static const int kMaxNumRegisters = 8;
static int NumAllocatableRegisters() {
return kMaxNumAllocatableRegisters;
}
// TODO(turbofan): Proper support for float32.
static int NumAllocatableAliasedRegisters() {
return NumAllocatableRegisters();
}
static int ToAllocationIndex(XMMRegister reg) {
DCHECK(reg.code() != 0);
return reg.code() - 1;
}
static XMMRegister FromAllocationIndex(int index) {
DCHECK(index >= 0 && index < kMaxNumAllocatableRegisters);
return from_code(index + 1);
}
static XMMRegister from_code(int code) {
XMMRegister result = { code };
return result;
}
bool is_valid() const {
return 0 <= code_ && code_ < kMaxNumRegisters;
}
int code() const {
DCHECK(is_valid());
return code_;
}
bool is(XMMRegister reg) const { return code_ == reg.code_; }
static const char* AllocationIndexToString(int index) {
DCHECK(index >= 0 && index < kMaxNumAllocatableRegisters);
const char* const names[] = {
"xmm1",
"xmm2",
"xmm3",
"xmm4",
"xmm5",
"xmm6",
"xmm7"
};
return names[index];
}
int code_;
};
typedef XMMRegister DoubleRegister;
const XMMRegister xmm0 = { 0 };
const XMMRegister xmm1 = { 1 };
const XMMRegister xmm2 = { 2 };
const XMMRegister xmm3 = { 3 };
const XMMRegister xmm4 = { 4 };
const XMMRegister xmm5 = { 5 };
const XMMRegister xmm6 = { 6 };
const XMMRegister xmm7 = { 7 };
const XMMRegister no_xmm_reg = { -1 };
enum Condition {
// any value < 0 is considered no_condition
no_condition = -1,
overflow = 0,
no_overflow = 1,
below = 2,
above_equal = 3,
equal = 4,
not_equal = 5,
below_equal = 6,
above = 7,
negative = 8,
positive = 9,
parity_even = 10,
parity_odd = 11,
less = 12,
greater_equal = 13,
less_equal = 14,
greater = 15,
// aliases
carry = below,
not_carry = above_equal,
zero = equal,
not_zero = not_equal,
sign = negative,
not_sign = positive
};
// Returns the equivalent of !cc.
// Negation of the default no_condition (-1) results in a non-default
// no_condition value (-2). As long as tests for no_condition check
// for condition < 0, this will work as expected.
inline Condition NegateCondition(Condition cc) {
return static_cast<Condition>(cc ^ 1);
}
// Commute a condition such that {a cond b == b cond' a}.
inline Condition CommuteCondition(Condition cc) {
switch (cc) {
case below:
return above;
case above:
return below;
case above_equal:
return below_equal;
case below_equal:
return above_equal;
case less:
return greater;
case greater:
return less;
case greater_equal:
return less_equal;
case less_equal:
return greater_equal;
default:
return cc;
}
}
// -----------------------------------------------------------------------------
// Machine instruction Immediates
class Immediate BASE_EMBEDDED {
public:
inline explicit Immediate(int x);
inline explicit Immediate(const ExternalReference& ext);
inline explicit Immediate(Handle<Object> handle);
inline explicit Immediate(Smi* value);
inline explicit Immediate(Address addr);
static Immediate CodeRelativeOffset(Label* label) {
return Immediate(label);
}
bool is_zero() const { return x_ == 0 && RelocInfo::IsNone(rmode_); }
bool is_int8() const {
return -128 <= x_ && x_ < 128 && RelocInfo::IsNone(rmode_);
}
bool is_int16() const {
return -32768 <= x_ && x_ < 32768 && RelocInfo::IsNone(rmode_);
}
private:
inline explicit Immediate(Label* value);
int x_;
RelocInfo::Mode rmode_;
friend class Operand;
friend class Assembler;
friend class MacroAssembler;
};
// -----------------------------------------------------------------------------
// Machine instruction Operands
enum ScaleFactor {
times_1 = 0,
times_2 = 1,
times_4 = 2,
times_8 = 3,
times_int_size = times_4,
times_half_pointer_size = times_2,
times_pointer_size = times_4,
times_twice_pointer_size = times_8
};
class Operand BASE_EMBEDDED {
public:
// reg
INLINE(explicit Operand(Register reg));
// XMM reg
INLINE(explicit Operand(XMMRegister xmm_reg));
// [disp/r]
INLINE(explicit Operand(int32_t disp, RelocInfo::Mode rmode));
// [disp/r]
INLINE(explicit Operand(Immediate imm));
// [base + disp/r]
explicit Operand(Register base, int32_t disp,
RelocInfo::Mode rmode = RelocInfo::NONE32);
// [base + index*scale + disp/r]
explicit Operand(Register base,
Register index,
ScaleFactor scale,
int32_t disp,
RelocInfo::Mode rmode = RelocInfo::NONE32);
// [index*scale + disp/r]
explicit Operand(Register index,
ScaleFactor scale,
int32_t disp,
RelocInfo::Mode rmode = RelocInfo::NONE32);
static Operand StaticVariable(const ExternalReference& ext) {
return Operand(reinterpret_cast<int32_t>(ext.address()),
RelocInfo::EXTERNAL_REFERENCE);
}
static Operand StaticArray(Register index,
ScaleFactor scale,
const ExternalReference& arr) {
return Operand(index, scale, reinterpret_cast<int32_t>(arr.address()),
RelocInfo::EXTERNAL_REFERENCE);
}
static Operand ForCell(Handle<Cell> cell) {
AllowDeferredHandleDereference embedding_raw_address;
return Operand(reinterpret_cast<int32_t>(cell.location()),
RelocInfo::CELL);
}
static Operand ForRegisterPlusImmediate(Register base, Immediate imm) {
return Operand(base, imm.x_, imm.rmode_);
}
// Returns true if this Operand is a wrapper for the specified register.
bool is_reg(Register reg) const;
// Returns true if this Operand is a wrapper for one register.
bool is_reg_only() const;
// Asserts that this Operand is a wrapper for one register and returns the
// register.
Register reg() const;
private:
// Set the ModRM byte without an encoded 'reg' register. The
// register is encoded later as part of the emit_operand operation.
inline void set_modrm(int mod, Register rm);
inline void set_sib(ScaleFactor scale, Register index, Register base);
inline void set_disp8(int8_t disp);
inline void set_dispr(int32_t disp, RelocInfo::Mode rmode);
byte buf_[6];
// The number of bytes in buf_.
unsigned int len_;
// Only valid if len_ > 4.
RelocInfo::Mode rmode_;
friend class Assembler;
friend class MacroAssembler;
};
// -----------------------------------------------------------------------------
// A Displacement describes the 32bit immediate field of an instruction which
// may be used together with a Label in order to refer to a yet unknown code
// position. Displacements stored in the instruction stream are used to describe
// the instruction and to chain a list of instructions using the same Label.
// A Displacement contains 2 different fields:
//
// next field: position of next displacement in the chain (0 = end of list)
// type field: instruction type
//
// A next value of null (0) indicates the end of a chain (note that there can
// be no displacement at position zero, because there is always at least one
// instruction byte before the displacement).
//
// Displacement _data field layout
//
// |31.....2|1......0|
// [ next | type |
class Displacement BASE_EMBEDDED {
public:
enum Type {
UNCONDITIONAL_JUMP,
CODE_RELATIVE,
OTHER
};
int data() const { return data_; }
Type type() const { return TypeField::decode(data_); }
void next(Label* L) const {
int n = NextField::decode(data_);
n > 0 ? L->link_to(n) : L->Unuse();
}
void link_to(Label* L) { init(L, type()); }
explicit Displacement(int data) { data_ = data; }
Displacement(Label* L, Type type) { init(L, type); }
void print() {
PrintF("%s (%x) ", (type() == UNCONDITIONAL_JUMP ? "jmp" : "[other]"),
NextField::decode(data_));
}
private:
int data_;
class TypeField: public BitField<Type, 0, 2> {};
class NextField: public BitField<int, 2, 32-2> {};
void init(Label* L, Type type);
};
class Assembler : public AssemblerBase {
private:
// We check before assembling an instruction that there is sufficient
// space to write an instruction and its relocation information.
// The relocation writer's position must be kGap bytes above the end of
// the generated instructions. This leaves enough space for the
// longest possible ia32 instruction, 15 bytes, and the longest possible
// relocation information encoding, RelocInfoWriter::kMaxLength == 16.
// (There is a 15 byte limit on ia32 instruction length that rules out some
// otherwise valid instructions.)
// This allows for a single, fast space check per instruction.
static const int kGap = 32;
public:
// Create an assembler. Instructions and relocation information are emitted
// into a buffer, with the instructions starting from the beginning and the
// relocation information starting from the end of the buffer. See CodeDesc
// for a detailed comment on the layout (globals.h).
//
// If the provided buffer is NULL, the assembler allocates and grows its own
// buffer, and buffer_size determines the initial buffer size. The buffer is
// owned by the assembler and deallocated upon destruction of the assembler.
//
// If the provided buffer is not NULL, the assembler uses the provided buffer
// for code generation and assumes its size to be buffer_size. If the buffer
// is too small, a fatal error occurs. No deallocation of the buffer is done
// upon destruction of the assembler.
// TODO(vitalyr): the assembler does not need an isolate.
Assembler(Isolate* isolate, void* buffer, int buffer_size);
virtual ~Assembler() { }
// GetCode emits any pending (non-emitted) code and fills the descriptor
// desc. GetCode() is idempotent; it returns the same result if no other
// Assembler functions are invoked in between GetCode() calls.
void GetCode(CodeDesc* desc);
// Read/Modify the code target in the branch/call instruction at pc.
inline static Address target_address_at(Address pc,
ConstantPoolArray* constant_pool);
inline static void set_target_address_at(Address pc,
ConstantPoolArray* constant_pool,
Address target,
ICacheFlushMode icache_flush_mode =
FLUSH_ICACHE_IF_NEEDED);
static inline Address target_address_at(Address pc, Code* code) {
ConstantPoolArray* constant_pool = code ? code->constant_pool() : NULL;
return target_address_at(pc, constant_pool);
}
static inline void set_target_address_at(Address pc,
Code* code,
Address target,
ICacheFlushMode icache_flush_mode =
FLUSH_ICACHE_IF_NEEDED) {
ConstantPoolArray* constant_pool = code ? code->constant_pool() : NULL;
set_target_address_at(pc, constant_pool, target);
}
// Return the code target address at a call site from the return address
// of that call in the instruction stream.
inline static Address target_address_from_return_address(Address pc);
// Return the code target address of the patch debug break slot
inline static Address break_address_from_return_address(Address pc);
// This sets the branch destination (which is in the instruction on x86).
// This is for calls and branches within generated code.
inline static void deserialization_set_special_target_at(
Address instruction_payload, Code* code, Address target) {
set_target_address_at(instruction_payload, code, target);
}
static const int kSpecialTargetSize = kPointerSize;
// Distance between the address of the code target in the call instruction
// and the return address
static const int kCallTargetAddressOffset = kPointerSize;
// Distance between start of patched return sequence and the emitted address
// to jump to.
static const int kPatchReturnSequenceAddressOffset = 1; // JMP imm32.
// Distance between start of patched debug break slot and the emitted address
// to jump to.
static const int kPatchDebugBreakSlotAddressOffset = 1; // JMP imm32.
static const int kCallInstructionLength = 5;
static const int kPatchDebugBreakSlotReturnOffset = kPointerSize;
static const int kJSReturnSequenceLength = 6;
// The debug break slot must be able to contain a call instruction.
static const int kDebugBreakSlotLength = kCallInstructionLength;
// One byte opcode for test al, 0xXX.
static const byte kTestAlByte = 0xA8;
// One byte opcode for nop.
static const byte kNopByte = 0x90;
// One byte opcode for a short unconditional jump.
static const byte kJmpShortOpcode = 0xEB;
// One byte prefix for a short conditional jump.
static const byte kJccShortPrefix = 0x70;
static const byte kJncShortOpcode = kJccShortPrefix | not_carry;
static const byte kJcShortOpcode = kJccShortPrefix | carry;
static const byte kJnzShortOpcode = kJccShortPrefix | not_zero;
static const byte kJzShortOpcode = kJccShortPrefix | zero;
// ---------------------------------------------------------------------------
// Code generation
//
// - function names correspond one-to-one to ia32 instruction mnemonics
// - unless specified otherwise, instructions operate on 32bit operands
// - instructions on 8bit (byte) operands/registers have a trailing '_b'
// - instructions on 16bit (word) operands/registers have a trailing '_w'
// - naming conflicts with C++ keywords are resolved via a trailing '_'
// NOTE ON INTERFACE: Currently, the interface is not very consistent
// in the sense that some operations (e.g. mov()) can be called in more
// the one way to generate the same instruction: The Register argument
// can in some cases be replaced with an Operand(Register) argument.
// This should be cleaned up and made more orthogonal. The questions
// is: should we always use Operands instead of Registers where an
// Operand is possible, or should we have a Register (overloaded) form
// instead? We must be careful to make sure that the selected instruction
// is obvious from the parameters to avoid hard-to-find code generation
// bugs.
// Insert the smallest number of nop instructions
// possible to align the pc offset to a multiple
// of m. m must be a power of 2.
void Align(int m);
void Nop(int bytes = 1);
// Aligns code to something that's optimal for a jump target for the platform.
void CodeTargetAlign();
// Stack
void pushad();
void popad();
void pushfd();
void popfd();
void push(const Immediate& x);
void push_imm32(int32_t imm32);
void push(Register src);
void push(const Operand& src);
void pop(Register dst);
void pop(const Operand& dst);
void enter(const Immediate& size);
void leave();
// Moves
void mov_b(Register dst, Register src) { mov_b(dst, Operand(src)); }
void mov_b(Register dst, const Operand& src);
void mov_b(Register dst, int8_t imm8) { mov_b(Operand(dst), imm8); }
void mov_b(const Operand& dst, int8_t src) { mov_b(dst, Immediate(src)); }
void mov_b(const Operand& dst, const Immediate& src);
void mov_b(const Operand& dst, Register src);
void mov_w(Register dst, const Operand& src);
void mov_w(const Operand& dst, int16_t src) { mov_w(dst, Immediate(src)); }
void mov_w(const Operand& dst, const Immediate& src);
void mov_w(const Operand& dst, Register src);
void mov(Register dst, int32_t imm32);
void mov(Register dst, const Immediate& x);
void mov(Register dst, Handle<Object> handle);
void mov(Register dst, const Operand& src);
void mov(Register dst, Register src);
void mov(const Operand& dst, const Immediate& x);
void mov(const Operand& dst, Handle<Object> handle);
void mov(const Operand& dst, Register src);
void movsx_b(Register dst, Register src) { movsx_b(dst, Operand(src)); }
void movsx_b(Register dst, const Operand& src);
void movsx_w(Register dst, Register src) { movsx_w(dst, Operand(src)); }
void movsx_w(Register dst, const Operand& src);
void movzx_b(Register dst, Register src) { movzx_b(dst, Operand(src)); }
void movzx_b(Register dst, const Operand& src);
void movzx_w(Register dst, Register src) { movzx_w(dst, Operand(src)); }
void movzx_w(Register dst, const Operand& src);
// Conditional moves
void cmov(Condition cc, Register dst, Register src) {
cmov(cc, dst, Operand(src));
}
void cmov(Condition cc, Register dst, const Operand& src);
// Flag management.
void cld();
// Repetitive string instructions.
void rep_movs();
void rep_stos();
void stos();
// Exchange
void xchg(Register dst, Register src);
void xchg(Register dst, const Operand& src);
// Arithmetics
void adc(Register dst, int32_t imm32);
void adc(Register dst, const Operand& src);
void add(Register dst, Register src) { add(dst, Operand(src)); }
void add(Register dst, const Operand& src);
void add(const Operand& dst, Register src);
void add(Register dst, const Immediate& imm) { add(Operand(dst), imm); }
void add(const Operand& dst, const Immediate& x);
void and_(Register dst, int32_t imm32);
void and_(Register dst, const Immediate& x);
void and_(Register dst, Register src) { and_(dst, Operand(src)); }
void and_(Register dst, const Operand& src);
void and_(const Operand& dst, Register src);
void and_(const Operand& dst, const Immediate& x);
void cmpb(Register reg, int8_t imm8) { cmpb(Operand(reg), imm8); }
void cmpb(const Operand& op, int8_t imm8);
void cmpb(Register reg, const Operand& op);
void cmpb(const Operand& op, Register reg);
void cmpb_al(const Operand& op);
void cmpw_ax(const Operand& op);
void cmpw(const Operand& op, Immediate imm16);
void cmp(Register reg, int32_t imm32);
void cmp(Register reg, Handle<Object> handle);
void cmp(Register reg0, Register reg1) { cmp(reg0, Operand(reg1)); }
void cmp(Register reg, const Operand& op);
void cmp(Register reg, const Immediate& imm) { cmp(Operand(reg), imm); }
void cmp(const Operand& op, const Immediate& imm);
void cmp(const Operand& op, Handle<Object> handle);
void dec_b(Register dst);
void dec_b(const Operand& dst);
void dec(Register dst);
void dec(const Operand& dst);
void cdq();
void idiv(Register src) { idiv(Operand(src)); }
void idiv(const Operand& src);
void div(Register src) { div(Operand(src)); }
void div(const Operand& src);
// Signed multiply instructions.
void imul(Register src); // edx:eax = eax * src.
void imul(Register dst, Register src) { imul(dst, Operand(src)); }
void imul(Register dst, const Operand& src); // dst = dst * src.
void imul(Register dst, Register src, int32_t imm32); // dst = src * imm32.
void imul(Register dst, const Operand& src, int32_t imm32);
void inc(Register dst);
void inc(const Operand& dst);
void lea(Register dst, const Operand& src);
// Unsigned multiply instruction.
void mul(Register src); // edx:eax = eax * reg.
void neg(Register dst);
void neg(const Operand& dst);
void not_(Register dst);
void not_(const Operand& dst);
void or_(Register dst, int32_t imm32);
void or_(Register dst, Register src) { or_(dst, Operand(src)); }
void or_(Register dst, const Operand& src);
void or_(const Operand& dst, Register src);
void or_(Register dst, const Immediate& imm) { or_(Operand(dst), imm); }
void or_(const Operand& dst, const Immediate& x);
void rcl(Register dst, uint8_t imm8);
void rcr(Register dst, uint8_t imm8);
void ror(Register dst, uint8_t imm8) { ror(Operand(dst), imm8); }
void ror(const Operand& dst, uint8_t imm8);
void ror_cl(Register dst) { ror_cl(Operand(dst)); }
void ror_cl(const Operand& dst);
void sar(Register dst, uint8_t imm8) { sar(Operand(dst), imm8); }
void sar(const Operand& dst, uint8_t imm8);
void sar_cl(Register dst) { sar_cl(Operand(dst)); }
void sar_cl(const Operand& dst);
void sbb(Register dst, const Operand& src);
void shld(Register dst, Register src) { shld(dst, Operand(src)); }
void shld(Register dst, const Operand& src);
void shl(Register dst, uint8_t imm8) { shl(Operand(dst), imm8); }
void shl(const Operand& dst, uint8_t imm8);
void shl_cl(Register dst) { shl_cl(Operand(dst)); }
void shl_cl(const Operand& dst);
void shrd(Register dst, Register src) { shrd(dst, Operand(src)); }
void shrd(Register dst, const Operand& src);
void shr(Register dst, uint8_t imm8) { shr(Operand(dst), imm8); }
void shr(const Operand& dst, uint8_t imm8);
void shr_cl(Register dst) { shr_cl(Operand(dst)); }
void shr_cl(const Operand& dst);
void sub(Register dst, const Immediate& imm) { sub(Operand(dst), imm); }
void sub(const Operand& dst, const Immediate& x);
void sub(Register dst, Register src) { sub(dst, Operand(src)); }
void sub(Register dst, const Operand& src);
void sub(const Operand& dst, Register src);
void test(Register reg, const Immediate& imm);
void test(Register reg0, Register reg1) { test(reg0, Operand(reg1)); }
void test(Register reg, const Operand& op);
void test_b(Register reg, const Operand& op);
void test(const Operand& op, const Immediate& imm);
void test_b(Register reg, uint8_t imm8);
void test_b(const Operand& op, uint8_t imm8);
void xor_(Register dst, int32_t imm32);
void xor_(Register dst, Register src) { xor_(dst, Operand(src)); }
void xor_(Register dst, const Operand& src);
void xor_(const Operand& dst, Register src);
void xor_(Register dst, const Immediate& imm) { xor_(Operand(dst), imm); }
void xor_(const Operand& dst, const Immediate& x);
// Bit operations.
void bt(const Operand& dst, Register src);
void bts(Register dst, Register src) { bts(Operand(dst), src); }
void bts(const Operand& dst, Register src);
void bsr(Register dst, Register src) { bsr(dst, Operand(src)); }
void bsr(Register dst, const Operand& src);
// Miscellaneous
void hlt();
void int3();
void nop();
void ret(int imm16);
// Label operations & relative jumps (PPUM Appendix D)
//
// Takes a branch opcode (cc) and a label (L) and generates
// either a backward branch or a forward branch and links it
// to the label fixup chain. Usage:
//
// Label L; // unbound label
// j(cc, &L); // forward branch to unbound label
// bind(&L); // bind label to the current pc
// j(cc, &L); // backward branch to bound label
// bind(&L); // illegal: a label may be bound only once
//
// Note: The same Label can be used for forward and backward branches
// but it may be bound only once.
void bind(Label* L); // binds an unbound label L to the current code position
// Calls
void call(Label* L);
void call(byte* entry, RelocInfo::Mode rmode);
int CallSize(const Operand& adr);
void call(Register reg) { call(Operand(reg)); }
void call(const Operand& adr);
int CallSize(Handle<Code> code, RelocInfo::Mode mode);
void call(Handle<Code> code,
RelocInfo::Mode rmode,
TypeFeedbackId id = TypeFeedbackId::None());
// Jumps
// unconditional jump to L
void jmp(Label* L, Label::Distance distance = Label::kFar);
void jmp(byte* entry, RelocInfo::Mode rmode);
void jmp(Register reg) { jmp(Operand(reg)); }
void jmp(const Operand& adr);
void jmp(Handle<Code> code, RelocInfo::Mode rmode);
// Conditional jumps
void j(Condition cc,
Label* L,
Label::Distance distance = Label::kFar);
void j(Condition cc, byte* entry, RelocInfo::Mode rmode);
void j(Condition cc, Handle<Code> code);
// Floating-point operations
void fld(int i);
void fstp(int i);
void fld1();
void fldz();
void fldpi();
void fldln2();
void fld_s(const Operand& adr);
void fld_d(const Operand& adr);
void fstp_s(const Operand& adr);
void fst_s(const Operand& adr);
void fstp_d(const Operand& adr);
void fst_d(const Operand& adr);
void fild_s(const Operand& adr);
void fild_d(const Operand& adr);
void fist_s(const Operand& adr);
void fistp_s(const Operand& adr);
void fistp_d(const Operand& adr);
// The fisttp instructions require SSE3.
void fisttp_s(const Operand& adr);
void fisttp_d(const Operand& adr);
void fabs();
void fchs();
void fcos();
void fsin();
void fptan();
void fyl2x();
void f2xm1();
void fscale();
void fninit();
void fadd(int i);
void fadd_i(int i);
void fsub(int i);
void fsub_i(int i);
void fmul(int i);
void fmul_i(int i);
void fdiv(int i);
void fdiv_i(int i);
void fisub_s(const Operand& adr);
void faddp(int i = 1);
void fsubp(int i = 1);
void fsubrp(int i = 1);
void fmulp(int i = 1);
void fdivp(int i = 1);
void fprem();
void fprem1();
void fxch(int i = 1);
void fincstp();
void ffree(int i = 0);
void ftst();
void fucomp(int i);
void fucompp();
void fucomi(int i);
void fucomip();
void fcompp();
void fnstsw_ax();
void fwait();
void fnclex();
void frndint();
void sahf();
void setcc(Condition cc, Register reg);
void cpuid();
// SSE instructions
void addss(XMMRegister dst, XMMRegister src) { addss(dst, Operand(src)); }
void addss(XMMRegister dst, const Operand& src);
void subss(XMMRegister dst, XMMRegister src) { subss(dst, Operand(src)); }
void subss(XMMRegister dst, const Operand& src);
void mulss(XMMRegister dst, XMMRegister src) { mulss(dst, Operand(src)); }
void mulss(XMMRegister dst, const Operand& src);
void divss(XMMRegister dst, XMMRegister src) { divss(dst, Operand(src)); }
void divss(XMMRegister dst, const Operand& src);
void ucomiss(XMMRegister dst, XMMRegister src) { ucomiss(dst, Operand(src)); }
void ucomiss(XMMRegister dst, const Operand& src);
void movaps(XMMRegister dst, XMMRegister src);
void shufps(XMMRegister dst, XMMRegister src, byte imm8);
void andps(XMMRegister dst, const Operand& src);
void andps(XMMRegister dst, XMMRegister src) { andps(dst, Operand(src)); }
void xorps(XMMRegister dst, const Operand& src);
void xorps(XMMRegister dst, XMMRegister src) { xorps(dst, Operand(src)); }
void orps(XMMRegister dst, const Operand& src);
void orps(XMMRegister dst, XMMRegister src) { orps(dst, Operand(src)); }
void addps(XMMRegister dst, const Operand& src);
void addps(XMMRegister dst, XMMRegister src) { addps(dst, Operand(src)); }
void subps(XMMRegister dst, const Operand& src);
void subps(XMMRegister dst, XMMRegister src) { subps(dst, Operand(src)); }
void mulps(XMMRegister dst, const Operand& src);
void mulps(XMMRegister dst, XMMRegister src) { mulps(dst, Operand(src)); }
void divps(XMMRegister dst, const Operand& src);
void divps(XMMRegister dst, XMMRegister src) { divps(dst, Operand(src)); }
// SSE2 instructions
void cvttss2si(Register dst, const Operand& src);
void cvttss2si(Register dst, XMMRegister src) {
cvttss2si(dst, Operand(src));
}
void cvttsd2si(Register dst, const Operand& src);
void cvttsd2si(Register dst, XMMRegister src) {
cvttsd2si(dst, Operand(src));
}
void cvtsd2si(Register dst, XMMRegister src);
void cvtsi2sd(XMMRegister dst, Register src) { cvtsi2sd(dst, Operand(src)); }
void cvtsi2sd(XMMRegister dst, const Operand& src);
void cvtss2sd(XMMRegister dst, const Operand& src);
void cvtss2sd(XMMRegister dst, XMMRegister src) {
cvtss2sd(dst, Operand(src));
}
void cvtsd2ss(XMMRegister dst, const Operand& src);
void cvtsd2ss(XMMRegister dst, XMMRegister src) {
cvtsd2ss(dst, Operand(src));
}
void addsd(XMMRegister dst, XMMRegister src) { addsd(dst, Operand(src)); }
void addsd(XMMRegister dst, const Operand& src);
void subsd(XMMRegister dst, XMMRegister src) { subsd(dst, Operand(src)); }
void subsd(XMMRegister dst, const Operand& src);
void mulsd(XMMRegister dst, XMMRegister src) { mulsd(dst, Operand(src)); }
void mulsd(XMMRegister dst, const Operand& src);
void divsd(XMMRegister dst, XMMRegister src) { divsd(dst, Operand(src)); }
void divsd(XMMRegister dst, const Operand& src);
void xorpd(XMMRegister dst, XMMRegister src);
void sqrtsd(XMMRegister dst, XMMRegister src) { sqrtsd(dst, Operand(src)); }
void sqrtsd(XMMRegister dst, const Operand& src);
void andpd(XMMRegister dst, XMMRegister src);
void orpd(XMMRegister dst, XMMRegister src);
void ucomisd(XMMRegister dst, XMMRegister src) { ucomisd(dst, Operand(src)); }
void ucomisd(XMMRegister dst, const Operand& src);
enum RoundingMode {
kRoundToNearest = 0x0,
kRoundDown = 0x1,
kRoundUp = 0x2,
kRoundToZero = 0x3
};
void roundsd(XMMRegister dst, XMMRegister src, RoundingMode mode);
void movmskpd(Register dst, XMMRegister src);
void movmskps(Register dst, XMMRegister src);
void cmpltsd(XMMRegister dst, XMMRegister src);
void pcmpeqd(XMMRegister dst, XMMRegister src);
void movdqa(XMMRegister dst, const Operand& src);
void movdqa(const Operand& dst, XMMRegister src);
void movdqu(XMMRegister dst, const Operand& src);
void movdqu(const Operand& dst, XMMRegister src);
void movdq(bool aligned, XMMRegister dst, const Operand& src) {
if (aligned) {
movdqa(dst, src);
} else {
movdqu(dst, src);
}
}
void movd(XMMRegister dst, Register src) { movd(dst, Operand(src)); }
void movd(XMMRegister dst, const Operand& src);
void movd(Register dst, XMMRegister src) { movd(Operand(dst), src); }
void movd(const Operand& dst, XMMRegister src);
void movsd(XMMRegister dst, XMMRegister src) { movsd(dst, Operand(src)); }
void movsd(XMMRegister dst, const Operand& src);
void movsd(const Operand& dst, XMMRegister src);
void movss(XMMRegister dst, const Operand& src);
void movss(const Operand& dst, XMMRegister src);
void movss(XMMRegister dst, XMMRegister src) { movss(dst, Operand(src)); }
void extractps(Register dst, XMMRegister src, byte imm8);
void pand(XMMRegister dst, XMMRegister src);
void pxor(XMMRegister dst, XMMRegister src);
void por(XMMRegister dst, XMMRegister src);
void ptest(XMMRegister dst, XMMRegister src);
void pslld(XMMRegister reg, int8_t shift);
void psrld(XMMRegister reg, int8_t shift);
void psllq(XMMRegister reg, int8_t shift);
void psllq(XMMRegister dst, XMMRegister src);
void psrlq(XMMRegister reg, int8_t shift);
void psrlq(XMMRegister dst, XMMRegister src);
void pshufd(XMMRegister dst, XMMRegister src, uint8_t shuffle);
void pextrd(Register dst, XMMRegister src, int8_t offset) {
pextrd(Operand(dst), src, offset);
}
void pextrd(const Operand& dst, XMMRegister src, int8_t offset);
void pinsrd(XMMRegister dst, Register src, int8_t offset) {
pinsrd(dst, Operand(src), offset);
}
void pinsrd(XMMRegister dst, const Operand& src, int8_t offset);
// Parallel XMM operations.
void movntdqa(XMMRegister dst, const Operand& src);
void movntdq(const Operand& dst, XMMRegister src);
// AVX instructions
void vfmadd132sd(XMMRegister dst, XMMRegister src1, XMMRegister src2) {
vfmadd132sd(dst, src1, Operand(src2));
}
void vfmadd213sd(XMMRegister dst, XMMRegister src1, XMMRegister src2) {
vfmadd213sd(dst, src1, Operand(src2));
}
void vfmadd231sd(XMMRegister dst, XMMRegister src1, XMMRegister src2) {
vfmadd231sd(dst, src1, Operand(src2));
}
void vfmadd132sd(XMMRegister dst, XMMRegister src1, const Operand& src2) {
vfmasd(0x99, dst, src1, src2);
}
void vfmadd213sd(XMMRegister dst, XMMRegister src1, const Operand& src2) {
vfmasd(0xa9, dst, src1, src2);
}
void vfmadd231sd(XMMRegister dst, XMMRegister src1, const Operand& src2) {
vfmasd(0xb9, dst, src1, src2);
}
void vfmsub132sd(XMMRegister dst, XMMRegister src1, XMMRegister src2) {
vfmsub132sd(dst, src1, Operand(src2));
}
void vfmsub213sd(XMMRegister dst, XMMRegister src1, XMMRegister src2) {
vfmsub213sd(dst, src1, Operand(src2));
}
void vfmsub231sd(XMMRegister dst, XMMRegister src1, XMMRegister src2) {
vfmsub231sd(dst, src1, Operand(src2));
}
void vfmsub132sd(XMMRegister dst, XMMRegister src1, const Operand& src2) {
vfmasd(0x9b, dst, src1, src2);
}
void vfmsub213sd(XMMRegister dst, XMMRegister src1, const Operand& src2) {
vfmasd(0xab, dst, src1, src2);
}
void vfmsub231sd(XMMRegister dst, XMMRegister src1, const Operand& src2) {
vfmasd(0xbb, dst, src1, src2);
}
void vfnmadd132sd(XMMRegister dst, XMMRegister src1, XMMRegister src2) {
vfnmadd132sd(dst, src1, Operand(src2));
}
void vfnmadd213sd(XMMRegister dst, XMMRegister src1, XMMRegister src2) {
vfnmadd213sd(dst, src1, Operand(src2));
}
void vfnmadd231sd(XMMRegister dst, XMMRegister src1, XMMRegister src2) {
vfnmadd231sd(dst, src1, Operand(src2));
}
void vfnmadd132sd(XMMRegister dst, XMMRegister src1, const Operand& src2) {
vfmasd(0x9d, dst, src1, src2);
}
void vfnmadd213sd(XMMRegister dst, XMMRegister src1, const Operand& src2) {
vfmasd(0xad, dst, src1, src2);
}
void vfnmadd231sd(XMMRegister dst, XMMRegister src1, const Operand& src2) {
vfmasd(0xbd, dst, src1, src2);
}
void vfnmsub132sd(XMMRegister dst, XMMRegister src1, XMMRegister src2) {
vfnmsub132sd(dst, src1, Operand(src2));
}
void vfnmsub213sd(XMMRegister dst, XMMRegister src1, XMMRegister src2) {
vfnmsub213sd(dst, src1, Operand(src2));
}
void vfnmsub231sd(XMMRegister dst, XMMRegister src1, XMMRegister src2) {
vfnmsub231sd(dst, src1, Operand(src2));
}
void vfnmsub132sd(XMMRegister dst, XMMRegister src1, const Operand& src2) {
vfmasd(0x9f, dst, src1, src2);
}
void vfnmsub213sd(XMMRegister dst, XMMRegister src1, const Operand& src2) {
vfmasd(0xaf, dst, src1, src2);
}
void vfnmsub231sd(XMMRegister dst, XMMRegister src1, const Operand& src2) {
vfmasd(0xbf, dst, src1, src2);
}
void vfmasd(byte op, XMMRegister dst, XMMRegister src1, const Operand& src2);
void vfmadd132ss(XMMRegister dst, XMMRegister src1, XMMRegister src2) {
vfmadd132ss(dst, src1, Operand(src2));
}
void vfmadd213ss(XMMRegister dst, XMMRegister src1, XMMRegister src2) {
vfmadd213ss(dst, src1, Operand(src2));
}
void vfmadd231ss(XMMRegister dst, XMMRegister src1, XMMRegister src2) {
vfmadd231ss(dst, src1, Operand(src2));
}
void vfmadd132ss(XMMRegister dst, XMMRegister src1, const Operand& src2) {
vfmass(0x99, dst, src1, src2);
}
void vfmadd213ss(XMMRegister dst, XMMRegister src1, const Operand& src2) {
vfmass(0xa9, dst, src1, src2);
}
void vfmadd231ss(XMMRegister dst, XMMRegister src1, const Operand& src2) {
vfmass(0xb9, dst, src1, src2);
}
void vfmsub132ss(XMMRegister dst, XMMRegister src1, XMMRegister src2) {
vfmsub132ss(dst, src1, Operand(src2));
}
void vfmsub213ss(XMMRegister dst, XMMRegister src1, XMMRegister src2) {
vfmsub213ss(dst, src1, Operand(src2));
}
void vfmsub231ss(XMMRegister dst, XMMRegister src1, XMMRegister src2) {
vfmsub231ss(dst, src1, Operand(src2));
}
void vfmsub132ss(XMMRegister dst, XMMRegister src1, const Operand& src2) {
vfmass(0x9b, dst, src1, src2);
}
void vfmsub213ss(XMMRegister dst, XMMRegister src1, const Operand& src2) {
vfmass(0xab, dst, src1, src2);
}
void vfmsub231ss(XMMRegister dst, XMMRegister src1, const Operand& src2) {
vfmass(0xbb, dst, src1, src2);
}
void vfnmadd132ss(XMMRegister dst, XMMRegister src1, XMMRegister src2) {
vfnmadd132ss(dst, src1, Operand(src2));
}
void vfnmadd213ss(XMMRegister dst, XMMRegister src1, XMMRegister src2) {
vfnmadd213ss(dst, src1, Operand(src2));
}
void vfnmadd231ss(XMMRegister dst, XMMRegister src1, XMMRegister src2) {
vfnmadd231ss(dst, src1, Operand(src2));
}
void vfnmadd132ss(XMMRegister dst, XMMRegister src1, const Operand& src2) {
vfmass(0x9d, dst, src1, src2);
}
void vfnmadd213ss(XMMRegister dst, XMMRegister src1, const Operand& src2) {
vfmass(0xad, dst, src1, src2);
}
void vfnmadd231ss(XMMRegister dst, XMMRegister src1, const Operand& src2) {
vfmass(0xbd, dst, src1, src2);
}
void vfnmsub132ss(XMMRegister dst, XMMRegister src1, XMMRegister src2) {
vfnmsub132ss(dst, src1, Operand(src2));
}
void vfnmsub213ss(XMMRegister dst, XMMRegister src1, XMMRegister src2) {
vfnmsub213ss(dst, src1, Operand(src2));
}
void vfnmsub231ss(XMMRegister dst, XMMRegister src1, XMMRegister src2) {
vfnmsub231ss(dst, src1, Operand(src2));
}
void vfnmsub132ss(XMMRegister dst, XMMRegister src1, const Operand& src2) {
vfmass(0x9f, dst, src1, src2);
}
void vfnmsub213ss(XMMRegister dst, XMMRegister src1, const Operand& src2) {
vfmass(0xaf, dst, src1, src2);
}
void vfnmsub231ss(XMMRegister dst, XMMRegister src1, const Operand& src2) {
vfmass(0xbf, dst, src1, src2);
}
void vfmass(byte op, XMMRegister dst, XMMRegister src1, const Operand& src2);
void vaddsd(XMMRegister dst, XMMRegister src1, XMMRegister src2) {
vaddsd(dst, src1, Operand(src2));
}
void vaddsd(XMMRegister dst, XMMRegister src1, const Operand& src2) {
vsd(0x58, dst, src1, src2);
}
void vsubsd(XMMRegister dst, XMMRegister src1, XMMRegister src2) {
vsubsd(dst, src1, Operand(src2));
}
void vsubsd(XMMRegister dst, XMMRegister src1, const Operand& src2) {
vsd(0x5c, dst, src1, src2);
}
void vmulsd(XMMRegister dst, XMMRegister src1, XMMRegister src2) {
vmulsd(dst, src1, Operand(src2));
}
void vmulsd(XMMRegister dst, XMMRegister src1, const Operand& src2) {
vsd(0x59, dst, src1, src2);
}
void vdivsd(XMMRegister dst, XMMRegister src1, XMMRegister src2) {
vdivsd(dst, src1, Operand(src2));
}
void vdivsd(XMMRegister dst, XMMRegister src1, const Operand& src2) {
vsd(0x5e, dst, src1, src2);
}
void vsd(byte op, XMMRegister dst, XMMRegister src1, const Operand& src2);
// Prefetch src position into cache level.
// Level 1, 2 or 3 specifies CPU cache level. Level 0 specifies a
// non-temporal
void prefetch(const Operand& src, int level);
// TODO(lrn): Need SFENCE for movnt?
// Check the code size generated from label to here.
int SizeOfCodeGeneratedSince(Label* label) {
return pc_offset() - label->pos();
}
// Mark address of the ExitJSFrame code.
void RecordJSReturn();
// Mark address of a debug break slot.
void RecordDebugBreakSlot();
// Record a comment relocation entry that can be used by a disassembler.
// Use --code-comments to enable, or provide "force = true" flag to always
// write a comment.
void RecordComment(const char* msg, bool force = false);
// Writes a single byte or word of data in the code stream. Used for
// inline tables, e.g., jump-tables.
void db(uint8_t data);
void dd(uint32_t data);
// Check if there is less than kGap bytes available in the buffer.
// If this is the case, we need to grow the buffer before emitting
// an instruction or relocation information.
inline bool buffer_overflow() const {
return pc_ >= reloc_info_writer.pos() - kGap;
}
// Get the number of bytes available in the buffer.
inline int available_space() const { return reloc_info_writer.pos() - pc_; }
static bool IsNop(Address addr);
PositionsRecorder* positions_recorder() { return &positions_recorder_; }
int relocation_writer_size() {
return (buffer_ + buffer_size_) - reloc_info_writer.pos();
}
// Avoid overflows for displacements etc.
static const int kMaximalBufferSize = 512*MB;
byte byte_at(int pos) { return buffer_[pos]; }
void set_byte_at(int pos, byte value) { buffer_[pos] = value; }
// Allocate a constant pool of the correct size for the generated code.
Handle<ConstantPoolArray> NewConstantPool(Isolate* isolate);
// Generate the constant pool for the generated code.
void PopulateConstantPool(ConstantPoolArray* constant_pool);
protected:
void emit_sse_operand(XMMRegister reg, const Operand& adr);
void emit_sse_operand(XMMRegister dst, XMMRegister src);
void emit_sse_operand(Register dst, XMMRegister src);
void emit_sse_operand(XMMRegister dst, Register src);
byte* addr_at(int pos) { return buffer_ + pos; }
private:
uint32_t long_at(int pos) {
return *reinterpret_cast<uint32_t*>(addr_at(pos));
}
void long_at_put(int pos, uint32_t x) {
*reinterpret_cast<uint32_t*>(addr_at(pos)) = x;
}
// code emission
void GrowBuffer();
inline void emit(uint32_t x);
inline void emit(Handle<Object> handle);
inline void emit(uint32_t x,
RelocInfo::Mode rmode,
TypeFeedbackId id = TypeFeedbackId::None());
inline void emit(Handle<Code> code,
RelocInfo::Mode rmode,
TypeFeedbackId id = TypeFeedbackId::None());
inline void emit(const Immediate& x);
inline void emit_w(const Immediate& x);
// Emit the code-object-relative offset of the label's position
inline void emit_code_relative_offset(Label* label);
// instruction generation
void emit_arith_b(int op1, int op2, Register dst, int imm8);
// Emit a basic arithmetic instruction (i.e. first byte of the family is 0x81)
// with a given destination expression and an immediate operand. It attempts
// to use the shortest encoding possible.
// sel specifies the /n in the modrm byte (see the Intel PRM).
void emit_arith(int sel, Operand dst, const Immediate& x);
void emit_operand(Register reg, const Operand& adr);
void emit_farith(int b1, int b2, int i);
// Emit vex prefix
enum SIMDPrefix { kNone = 0x0, k66 = 0x1, kF3 = 0x2, kF2 = 0x3 };
enum VectorLength { kL128 = 0x0, kL256 = 0x4, kLIG = kL128 };
enum VexW { kW0 = 0x0, kW1 = 0x80, kWIG = kW0 };
enum LeadingOpcode { k0F = 0x1, k0F38 = 0x2, k0F3A = 0x2 };
inline void emit_vex_prefix(XMMRegister v, VectorLength l, SIMDPrefix pp,
LeadingOpcode m, VexW w);
// labels
void print(Label* L);
void bind_to(Label* L, int pos);
// displacements
inline Displacement disp_at(Label* L);
inline void disp_at_put(Label* L, Displacement disp);
inline void emit_disp(Label* L, Displacement::Type type);
inline void emit_near_disp(Label* L);
// record reloc info for current pc_
void RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data = 0);
friend class CodePatcher;
friend class EnsureSpace;
// code generation
RelocInfoWriter reloc_info_writer;
PositionsRecorder positions_recorder_;
friend class PositionsRecorder;
};
// Helper class that ensures that there is enough space for generating
// instructions and relocation information. The constructor makes
// sure that there is enough space and (in debug mode) the destructor
// checks that we did not generate too much.
class EnsureSpace BASE_EMBEDDED {
public:
explicit EnsureSpace(Assembler* assembler) : assembler_(assembler) {
if (assembler_->buffer_overflow()) assembler_->GrowBuffer();
#ifdef DEBUG
space_before_ = assembler_->available_space();
#endif
}
#ifdef DEBUG
~EnsureSpace() {
int bytes_generated = space_before_ - assembler_->available_space();
DCHECK(bytes_generated < assembler_->kGap);
}
#endif
private:
Assembler* assembler_;
#ifdef DEBUG
int space_before_;
#endif
};
} } // namespace v8::internal
#endif // V8_IA32_ASSEMBLER_IA32_H_
| {
"content_hash": "8aadb4daf6aa443661ed11b149cf89f1",
"timestamp": "",
"source": "github",
"line_count": 1363,
"max_line_length": 80,
"avg_line_length": 34.69479090242113,
"alnum_prop": 0.680729133625156,
"repo_name": "shelsonjava/TeaJS",
"id": "b913f7afc8e2af773ea019e5ab81ac22b0ca4e14",
"size": "49188",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "deps/v8/src/ia32/assembler-ia32.h",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "214549"
},
{
"name": "C++",
"bytes": "1754108"
},
{
"name": "JavaScript",
"bytes": "497549"
},
{
"name": "Perl",
"bytes": "1102"
},
{
"name": "Python",
"bytes": "55611"
},
{
"name": "Slash",
"bytes": "591"
}
],
"symlink_target": ""
} |
@interface BoxSerialAPIQueueManagerTests : XCTestCase
@end
| {
"content_hash": "a104f7da427d2b3f84191c98fb54a71a",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 53,
"avg_line_length": 20,
"alnum_prop": 0.85,
"repo_name": "CleanShavenApps/box-ios-sdk-v2",
"id": "0fc217bf0c43b13eaf77f79aa4f145f218c7a67c",
"size": "216",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "BoxSDKTests/QueueManagers/BoxSerialAPIQueueManagerTests.h",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "1004"
},
{
"name": "Objective-C",
"bytes": "744710"
},
{
"name": "Ruby",
"bytes": "5959"
},
{
"name": "Shell",
"bytes": "2822"
}
],
"symlink_target": ""
} |
import os
# Setup config name.
config.name = 'GWP-ASan' + config.name_suffix
# Setup source root.
config.test_source_root = os.path.dirname(__file__)
# Test suffixes.
config.suffixes = ['.c', '.cpp', '.test']
# C & CXX flags.
c_flags = ([config.target_cflags])
# Android doesn't want -lrt.
if not config.android:
c_flags += ["-lrt"]
cxx_flags = (c_flags + config.cxx_mode_flags + ["-std=c++11"])
gwp_asan_flags = ["-fsanitize=scudo", "-g", "-fno-omit-frame-pointer",
"-mno-omit-leaf-frame-pointer"]
def build_invocation(compile_flags):
return " " + " ".join([config.clang] + compile_flags) + " "
# Add substitutions.
config.substitutions.append(("%clang ", build_invocation(c_flags)))
config.substitutions.append(("%clang_gwp_asan ", build_invocation(c_flags + gwp_asan_flags)))
config.substitutions.append(("%clangxx_gwp_asan ", build_invocation(cxx_flags + gwp_asan_flags)))
# Platform-specific default GWP_ASAN for lit tests. Ensure that GWP-ASan is
# enabled and that it samples every allocation.
default_gwp_asan_options = 'Enabled=1:SampleRate=1'
config.environment['GWP_ASAN_OPTIONS'] = default_gwp_asan_options
default_gwp_asan_options += ':'
config.substitutions.append(('%env_gwp_asan_options=',
'env GWP_ASAN_OPTIONS=' + default_gwp_asan_options))
# GWP-ASan tests are currently supported on Linux only.
if config.host_os not in ['Linux']:
config.unsupported = True
| {
"content_hash": "423b5bfed721787d9e49d887b94f2847",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 97,
"avg_line_length": 33.55813953488372,
"alnum_prop": 0.6812196812196812,
"repo_name": "endlessm/chromium-browser",
"id": "a1b2551c2f9daf9372c3b7a9f197e1dfa41e5c92",
"size": "1461",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "third_party/llvm/compiler-rt/test/gwp_asan/lit.cfg.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
/*************************************************************************/
/* transform.h */
/*************************************************************************/
/* This file is part of: */
/* GODOT ENGINE */
/* https://godotengine.org */
/*************************************************************************/
/* Copyright (c) 2007-2020 Juan Linietsky, Ariel Manzur. */
/* Copyright (c) 2014-2020 Godot Engine contributors (cf. AUTHORS.md). */
/* */
/* Permission is hereby granted, free of charge, to any person obtaining */
/* a copy of this software and associated documentation files (the */
/* "Software"), to deal in the Software without restriction, including */
/* without limitation the rights to use, copy, modify, merge, publish, */
/* distribute, sublicense, and/or sell copies of the Software, and to */
/* permit persons to whom the Software is furnished to do so, subject to */
/* the following conditions: */
/* */
/* The above copyright notice and this permission notice shall be */
/* included in all copies or substantial portions of the Software. */
/* */
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
/*************************************************************************/
#ifndef GODOT_TRANSFORM_H
#define GODOT_TRANSFORM_H
#ifdef __cplusplus
extern "C" {
#endif
#include <stdint.h>
#define GODOT_TRANSFORM_SIZE 48
#ifndef GODOT_CORE_API_GODOT_TRANSFORM_TYPE_DEFINED
#define GODOT_CORE_API_GODOT_TRANSFORM_TYPE_DEFINED
typedef struct {
uint8_t _dont_touch_that[GODOT_TRANSFORM_SIZE];
} godot_transform;
#endif
// reduce extern "C" nesting for VS2013
#ifdef __cplusplus
}
#endif
#include <gdnative/basis.h>
#include <gdnative/gdnative.h>
#include <gdnative/variant.h>
#include <gdnative/vector3.h>
#ifdef __cplusplus
extern "C" {
#endif
void GDAPI godot_transform_new_with_axis_origin(godot_transform *r_dest, const godot_vector3 *p_x_axis, const godot_vector3 *p_y_axis, const godot_vector3 *p_z_axis, const godot_vector3 *p_origin);
void GDAPI godot_transform_new(godot_transform *r_dest, const godot_basis *p_basis, const godot_vector3 *p_origin);
void GDAPI godot_transform_new_with_quat(godot_transform *r_dest, const godot_quat *p_quat);
godot_basis GDAPI godot_transform_get_basis(const godot_transform *p_self);
void GDAPI godot_transform_set_basis(godot_transform *p_self, const godot_basis *p_v);
godot_vector3 GDAPI godot_transform_get_origin(const godot_transform *p_self);
void GDAPI godot_transform_set_origin(godot_transform *p_self, const godot_vector3 *p_v);
godot_string GDAPI godot_transform_as_string(const godot_transform *p_self);
godot_transform GDAPI godot_transform_inverse(const godot_transform *p_self);
godot_transform GDAPI godot_transform_affine_inverse(const godot_transform *p_self);
godot_transform GDAPI godot_transform_orthonormalized(const godot_transform *p_self);
godot_transform GDAPI godot_transform_rotated(const godot_transform *p_self, const godot_vector3 *p_axis, const godot_real p_phi);
godot_transform GDAPI godot_transform_scaled(const godot_transform *p_self, const godot_vector3 *p_scale);
godot_transform GDAPI godot_transform_translated(const godot_transform *p_self, const godot_vector3 *p_ofs);
godot_transform GDAPI godot_transform_looking_at(const godot_transform *p_self, const godot_vector3 *p_target, const godot_vector3 *p_up);
godot_plane GDAPI godot_transform_xform_plane(const godot_transform *p_self, const godot_plane *p_v);
godot_plane GDAPI godot_transform_xform_inv_plane(const godot_transform *p_self, const godot_plane *p_v);
void GDAPI godot_transform_new_identity(godot_transform *r_dest);
godot_bool GDAPI godot_transform_operator_equal(const godot_transform *p_self, const godot_transform *p_b);
godot_transform GDAPI godot_transform_operator_multiply(const godot_transform *p_self, const godot_transform *p_b);
godot_vector3 GDAPI godot_transform_xform_vector3(const godot_transform *p_self, const godot_vector3 *p_v);
godot_vector3 GDAPI godot_transform_xform_inv_vector3(const godot_transform *p_self, const godot_vector3 *p_v);
godot_aabb GDAPI godot_transform_xform_aabb(const godot_transform *p_self, const godot_aabb *p_v);
godot_aabb GDAPI godot_transform_xform_inv_aabb(const godot_transform *p_self, const godot_aabb *p_v);
#ifdef __cplusplus
}
#endif
#endif // GODOT_TRANSFORM_H
| {
"content_hash": "990bc8ebefa62f2dd7361a786f0e3160",
"timestamp": "",
"source": "github",
"line_count": 111,
"max_line_length": 197,
"avg_line_length": 47.98198198198198,
"alnum_prop": 0.6389410439354112,
"repo_name": "Paulloz/godot",
"id": "bc51438b17601fabf94b53363f7ec0040be7f626",
"size": "5326",
"binary": false,
"copies": "13",
"ref": "refs/heads/master",
"path": "modules/gdnative/include/gdnative/transform.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "50004"
},
{
"name": "C#",
"bytes": "176259"
},
{
"name": "C++",
"bytes": "18569070"
},
{
"name": "GLSL",
"bytes": "1271"
},
{
"name": "Java",
"bytes": "495377"
},
{
"name": "JavaScript",
"bytes": "14680"
},
{
"name": "Makefile",
"bytes": "451"
},
{
"name": "Objective-C",
"bytes": "2645"
},
{
"name": "Objective-C++",
"bytes": "173262"
},
{
"name": "Python",
"bytes": "336142"
},
{
"name": "Shell",
"bytes": "19610"
}
],
"symlink_target": ""
} |
// ======================================================================
/*!
* \file NFmiProducer.h
* \brief Implementation of class NFmiProducer
*/
// ======================================================================
/*!
* \class NFmiProducer
*
* Undocumented
*
*/
// ======================================================================
#pragma once
#include "NFmiIndividual.h"
//! Undocumented
class NFmiProducer : public NFmiIndividual
{
public:
NFmiProducer();
NFmiProducer(const NFmiProducer& theProducer);
explicit NFmiProducer(unsigned long theIdent, const NFmiString& theName = "Kennel");
NFmiProducer& operator=(const NFmiProducer& theProducer);
bool operator==(const NFmiProducer& theProducer) const;
virtual const char* ClassName() const;
private:
}; // class NFmiProducer
// ----------------------------------------------------------------------
/*!
* Void constructor
*/
// ----------------------------------------------------------------------
inline NFmiProducer::NFmiProducer() {}
// ----------------------------------------------------------------------
/*!
* Constructor
*
* \param theIdent Undocumented
* \param theName Undocumented
*/
// ----------------------------------------------------------------------
inline NFmiProducer::NFmiProducer(unsigned long theIdent, const NFmiString& theName)
: NFmiIndividual(theIdent, theName)
{
}
// ----------------------------------------------------------------------
/*!
* Copy constructor
*
* \param theProducer The object being copied
*/
// ----------------------------------------------------------------------
inline NFmiProducer::NFmiProducer(const NFmiProducer& theProducer) : NFmiIndividual(theProducer) {}
// ----------------------------------------------------------------------
/*!
* Equality comparison
*
* \param theProducer The object to compare with
* \return True if the objects are equal
*/
// ----------------------------------------------------------------------
inline bool NFmiProducer::operator==(const NFmiProducer& theProducer) const
{
return GetIdent() == theProducer.GetIdent();
}
// ----------------------------------------------------------------------
/*!
* Assignment operator
*
* \param theProducer The object being copied
* \return The object assigned to
*/
// ----------------------------------------------------------------------
inline NFmiProducer& NFmiProducer::operator=(const NFmiProducer& theProducer)
{
NFmiIndividual::operator=(theProducer);
return *this;
}
// ----------------------------------------------------------------------
/*!
* \return Undocumented
*/
// ----------------------------------------------------------------------
inline const char* NFmiProducer::ClassName() const
{
return "NFmiProducer";
}
// ======================================================================
| {
"content_hash": "053a2a925a26570e028c831e1b45740a",
"timestamp": "",
"source": "github",
"line_count": 105,
"max_line_length": 99,
"avg_line_length": 27.36190476190476,
"alnum_prop": 0.4263835711799513,
"repo_name": "fmidev/smartmet-library-newbase",
"id": "f5eea45e34667a35e4ee15d250ec72ebbb499703",
"size": "2873",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "newbase/NFmiProducer.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "12228"
},
{
"name": "C++",
"bytes": "5092170"
},
{
"name": "CMake",
"bytes": "5524"
},
{
"name": "Makefile",
"bytes": "4159"
}
],
"symlink_target": ""
} |
my $this_genome_db = $genome_db_adaptor->fetch_by_name_assembly("Homo sapiens", "NCBI36");
| {
"content_hash": "46dd57c94586780386043805dc71e15a",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 90,
"avg_line_length": 91,
"alnum_prop": 0.7142857142857143,
"repo_name": "dbolser-ebi/ensembl-compara",
"id": "fd2900196e57f4dfda15177e73301931ee955f38",
"size": "745",
"binary": false,
"copies": "1",
"ref": "refs/heads/release/75",
"path": "docs/tutorial/examples/tut_genomedb2.pl",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "12586"
},
{
"name": "Perl",
"bytes": "5317199"
},
{
"name": "Shell",
"bytes": "6045"
},
{
"name": "Tcl",
"bytes": "2577"
}
],
"symlink_target": ""
} |
package org.apache.camel.component.jetty.rest;
import java.io.BufferedReader;
import java.io.InputStream;
import java.io.InputStreamReader;
import org.apache.camel.Exchange;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.jetty.BaseJettyTest;
import org.apache.camel.component.mock.MockEndpoint;
import org.apache.camel.model.dataformat.JsonDataFormat;
import org.apache.camel.model.dataformat.JsonLibrary;
import org.apache.camel.model.rest.RestBindingMode;
import org.junit.Test;
public class RestJettyBindingModeOffWithContractTest extends BaseJettyTest {
@Test
public void testBindingModeOffWithContract() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:input");
mock.expectedMessageCount(1);
mock.message(0).body().isInstanceOf(UserPojoEx.class);
String body = "{\"id\": 123, \"name\": \"Donald Duck\"}";
Object answer = template.requestBodyAndHeader("http://localhost:" + getPort() + "/users/new", body, Exchange.CONTENT_TYPE, "application/json");
assertNotNull(answer);
BufferedReader reader = new BufferedReader(new InputStreamReader((InputStream)answer));
String line;
String answerString = "";
while ((line = reader.readLine()) != null) {
answerString += line;
}
assertTrue("Unexpected response: " + answerString, answerString.contains("\"active\":true"));
assertMockEndpointsSatisfied();
Object obj = mock.getReceivedExchanges().get(0).getIn().getBody();
assertEquals(UserPojoEx.class, obj.getClass());
UserPojoEx user = (UserPojoEx)obj;
assertNotNull(user);
assertEquals(123, user.getId());
assertEquals("Donald Duck", user.getName());
assertEquals(true, user.isActive());
}
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
restConfiguration().component("jetty").host("localhost").port(getPort()).bindingMode(RestBindingMode.off);
JsonDataFormat jsondf = new JsonDataFormat();
jsondf.setLibrary(JsonLibrary.Jackson);
jsondf.setAllowUnmarshallType(true);
jsondf.setUnmarshalType(UserPojoEx.class);
transformer()
.fromType("json")
.toType(UserPojoEx.class)
.withDataFormat(jsondf);
transformer()
.fromType(UserPojoEx.class)
.toType("json")
.withDataFormat(jsondf);
rest("/users/")
// REST binding does nothing
.post("new")
.route()
// contract advice converts betweeen JSON and UserPojoEx directly
.inputType(UserPojoEx.class)
.outputType("json")
.process(ex -> {
ex.getIn().getBody(UserPojoEx.class).setActive(true);
})
.to("mock:input");
}
};
}
}
| {
"content_hash": "8941ed943c65795c915726539069d9a8",
"timestamp": "",
"source": "github",
"line_count": 81,
"max_line_length": 151,
"avg_line_length": 40.111111111111114,
"alnum_prop": 0.6026469682979378,
"repo_name": "Fabryprog/camel",
"id": "ea1dd70fb229e3d26335462b810bd3f74ce554cc",
"size": "4051",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "components/camel-jetty/src/test/java/org/apache/camel/component/jetty/rest/RestJettyBindingModeOffWithContractTest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Apex",
"bytes": "6521"
},
{
"name": "Batchfile",
"bytes": "2353"
},
{
"name": "CSS",
"bytes": "17204"
},
{
"name": "Elm",
"bytes": "10852"
},
{
"name": "FreeMarker",
"bytes": "8015"
},
{
"name": "Groovy",
"bytes": "14479"
},
{
"name": "HTML",
"bytes": "909437"
},
{
"name": "Java",
"bytes": "82182194"
},
{
"name": "JavaScript",
"bytes": "102432"
},
{
"name": "Makefile",
"bytes": "513"
},
{
"name": "Shell",
"bytes": "17240"
},
{
"name": "TSQL",
"bytes": "28835"
},
{
"name": "Tcl",
"bytes": "4974"
},
{
"name": "Thrift",
"bytes": "6979"
},
{
"name": "XQuery",
"bytes": "546"
},
{
"name": "XSLT",
"bytes": "271473"
}
],
"symlink_target": ""
} |
<?php
namespace Oro\Bundle\WorkflowBundle\Tests\Unit\Helper;
use Oro\Bundle\ActionBundle\Model\Attribute;
use Oro\Bundle\EntityBundle\ORM\DoctrineHelper;
use Oro\Bundle\SecurityBundle\Acl\Group\AclGroupProviderInterface;
use Oro\Bundle\WorkflowBundle\Acl\AclManager;
use Oro\Bundle\WorkflowBundle\Entity\WorkflowDefinition;
use Oro\Bundle\WorkflowBundle\Entity\WorkflowItem;
use Oro\Bundle\WorkflowBundle\Entity\WorkflowStep;
use Oro\Bundle\WorkflowBundle\Helper\WorkflowDataHelper;
use Oro\Bundle\WorkflowBundle\Model\Step;
use Oro\Bundle\WorkflowBundle\Model\StepManager;
use Oro\Bundle\WorkflowBundle\Model\Transition;
use Oro\Bundle\WorkflowBundle\Model\TransitionManager;
use Oro\Bundle\WorkflowBundle\Model\Workflow;
use Oro\Bundle\WorkflowBundle\Model\WorkflowManager;
use Oro\Bundle\WorkflowBundle\Restriction\RestrictionManager;
use Oro\Bundle\WorkflowBundle\Tests\Unit\Model\Stub\EntityWithWorkflow;
use Symfony\Component\Routing\Generator\UrlGeneratorInterface;
use Symfony\Component\Security\Core\Authorization\AuthorizationCheckerInterface;
use Symfony\Contracts\Translation\TranslatorInterface;
class WorkflowDataHelperTest extends \PHPUnit\Framework\TestCase
{
/** @var AuthorizationCheckerInterface|\PHPUnit\Framework\MockObject\MockObject */
protected $authorizationChecker;
/** @var TranslatorInterface|\PHPUnit\Framework\MockObject\MockObject */
protected $translator;
/** @var UrlGeneratorInterface|\PHPUnit\Framework\MockObject\MockObject */
protected $router;
/** @var AclGroupProviderInterface|\PHPUnit\Framework\MockObject\MockObject */
private $aclGroupProvider;
/** @var WorkflowDataHelper */
protected $workflowDataHelper;
/**
* {@inheritdoc}
*/
public function setUp()
{
$this->authorizationChecker = $this->createMock(AuthorizationCheckerInterface::class);
$this->translator = $this->createMock(TranslatorInterface::class);
$this->router = $this->createMock(UrlGeneratorInterface::class);
$this->aclGroupProvider = $this->createMock(AclGroupProviderInterface::class);
$this->aclGroupProvider->expects($this->any())
->method('getGroup')
->willReturn(AclGroupProviderInterface::DEFAULT_SECURITY_GROUP);
$this->authorizationChecker->expects($this->any())
->method('isGranted')
->willReturn(true);
$this->router->expects($this->any())
->method('generate')
->willReturnCallback(function ($route, array $params) {
return sprintf('%s/%s', $route, implode('/', $params));
});
}
/**
* @SuppressWarnings(PHPMD.ExcessiveMethodLength)
*
* @return array
*/
public function workflowsDataProvider()
{
return [
'workflows' => [
'workflowsData' => [
[
'name' => 'started_flow',
'transitions' =>
[
[
'name' => 'open',
'isStart' => false,
'hasForm' => true,
'isAvailable' => true,
],
[
'name' => 'close',
'isStart' => false,
'hasForm' => false,
'isAvailable' => true,
],
[
'name' => 'disallowed',
'isStart' => false,
'hasForm' => false,
'isAvailable' => true,
],
[
'name' => 'unavailable',
'isStart' => false,
'hasForm' => false,
'isAvailable' => false,
],
],
'allowed' => ['open', 'close', 'unavailable'],
'isStarted' => true,
],
[
'name' => 'unstarted_flow',
'transitions' => [
[
'name' => 'start',
'isStart' => true,
'hasForm' => false,
'isAvailable' => true,
],
],
'allowed' => ['start'],
'isStarted' => false,
],
[
'name' => 'flow_which_cannot_be_started',
'transitions' => [
[
'name' => TransitionManager::DEFAULT_START_TRANSITION_NAME,
'isStart' => false,
'hasForm' => false,
'isAvailable' => false,
],
],
'allowed' => [],
'isStartStep' => true,
'isStarted' => false,
],
],
'expected' => [
[
'name' => 'started_flow',
'label' => 'Started_flow',
'isStarted' => true,
'workflowItemId' => 1,
'transitionsData' => [
[
'name' => 'open',
'label' => null,
'isStart' => false,
'hasForm' => true,
'displayType' => null,
'message' => '',
'frontendOptions' => null,
'transitionUrl' => 'oro_api_workflow_transit/open/1',
'dialogUrl' => 'oro_workflow_widget_transition_form/open/1',
],
[
'name' => 'close',
'label' => null,
'isStart' => false,
'hasForm' => false,
'displayType' => null,
'message' => '',
'frontendOptions' => null,
'transitionUrl' => 'oro_api_workflow_transit/close/1',
],
],
],
[
'name' => 'unstarted_flow',
'label' => 'Unstarted_flow',
'isStarted' => false,
'workflowItemId' => null,
'transitionsData' => [
[
'name' => 'start',
'label' => null,
'isStart' => true,
'hasForm' => false,
'displayType' => null,
'message' => '',
'frontendOptions' => null,
'transitionUrl' => 'oro_api_workflow_start/unstarted_flow/start/',
],
],
],
[
'name' => 'flow_which_cannot_be_started',
'label' => 'Flow_which_cannot_be_started',
'isStarted' => false,
'workflowItemId' => null,
'transitionsData' => [],
],
],
],
];
}
/**
* @dataProvider workflowsDataProvider
*
* @param $workflowsData
* @param $expected
*/
public function testGetEntityWorkflowsData($workflowsData, $expected)
{
$entity = new EntityWithWorkflow();
$workflowDataHelper = new WorkflowDataHelper(
$this->getWorkflowManager($entity, $workflowsData),
$this->authorizationChecker,
$this->translator,
$this->router,
$this->aclGroupProvider
);
$this->assertEquals($expected, $workflowDataHelper->getEntityWorkflowsData($entity));
}
/**
* @param string $name
* @param bool $isStart
* @param bool $hasForm
* @param bool $isAvailable
*
* @return Transition
*/
protected function getTransition($name, $isStart = false, $hasForm = false, $isAvailable = true)
{
$transition = $this->getMockBuilder(Transition::class)
->disableOriginalConstructor()
->getMock();
$transition->expects($this->any())
->method('getName')
->willReturn($name);
$transition->expects($this->any())
->method('isAvailable')
->willReturn($isAvailable);
$transition->expects($this->any())
->method('isStart')
->willReturn($isStart);
$transition->expects($this->any())
->method('hasForm')
->willReturn($hasForm);
/** @var Transition $transition */
return $transition;
}
/**
* @param object $entity
* @param array $workflowsData ['name' => string, 'transitions' => array, 'allowed' => array]
*
* @return WorkflowManager
*/
protected function getWorkflowManager($entity, array $workflowsData)
{
$workflows = array_map(
function (array $workflow) {
return $this->getWorkflow(
$workflow['name'],
$workflow['transitions'],
$workflow['allowed'],
$workflow['isStartStep'] ?? false
);
},
$workflowsData
);
$workflowManager = $this->getMockBuilder(WorkflowManager::class)->disableOriginalConstructor()->getMock();
$workflowManager
->expects($this->any())
->method('getApplicableWorkflows')
->willReturn($workflows);
$workflowItemMap = array_map(
function (array $workflow) use ($entity) {
$workflowItem = new WorkflowItem();
$workflowItem->setId(1);
$workflowItem->setCurrentStep((new WorkflowStep())->setName('Start'));
return [$entity, $workflow['name'], $workflow['isStarted'] ? $workflowItem : null];
},
$workflowsData
);
$workflowManager
->expects($this->any())
->method('getWorkflowItem')
->willReturnMap($workflowItemMap);
/** @var WorkflowManager $workflowManager */
return $workflowManager;
}
/**
* Get mocked instance of TransitionManager with test transition definitions
*
* @param array $transitionsData ['name' => string, 'isStart' => bool, 'hasForm' => bool, 'isAvailable' => bool]
*
* @return TransitionManager
*/
protected function getTransitionManager(array $transitionsData)
{
$extractTransitionsMap = array_map(
function ($transition) {
return [
$transition['name'],
$this->getTransition(
$transition['name'],
$transition['isStart'],
$transition['hasForm'],
$transition['isAvailable']
),
];
},
$transitionsData
);
$transitionManager = $this->getMockBuilder(TransitionManager::class)
->disableOriginalConstructor()
->getMock();
$transitionManager->expects($this->any())
->method('extractTransition')
->willReturnMap($extractTransitionsMap);
$startTransitions = array_filter(
array_column($extractTransitionsMap, 1),
function (Transition $transition) {
return $transition->isStart();
}
);
$transitionManager->expects($this->any())
->method('getStartTransitions')
->willReturn($startTransitions);
$defaultTransitions = array_filter(
array_column($extractTransitionsMap, 1),
function (Transition $transition) {
return $transition->getName() === TransitionManager::DEFAULT_START_TRANSITION_NAME;
}
);
$transitionManager->expects($this->any())
->method('getDefaultStartTransition')
->willReturn(reset($defaultTransitions));
/** @var TransitionManager $transitionManager */
return $transitionManager;
}
/**
* @param string $workflowName
* @param array $transitionsData
* @param array $allowed
* @param bool $isStartStep
*
* @return Workflow
*/
protected function getWorkflow($workflowName, array $transitionsData, array $allowed, bool $isStartStep)
{
$step = new Step();
$step->setName('Start');
$step->setAllowedTransitions($allowed);
$stepManager = new StepManager([$step]);
if ($isStartStep) {
$stepManager->setStartStepName($step->getName());
}
/** @var AclManager $aclManager */
$aclManager = $this->getMockBuilder(AclManager::class)
->disableOriginalConstructor()
->getMock();
/** @var DoctrineHelper $doctrineHelper */
$doctrineHelper = $this->getMockBuilder(DoctrineHelper::class)
->disableOriginalConstructor()
->getMock();
/** @var RestrictionManager|\PHPUnit\Framework\MockObject\MockObject $restrictionManager */
$restrictionManager = $this->getMockBuilder(RestrictionManager::class)
->disableOriginalConstructor()
->getMock();
$definition = new WorkflowDefinition();
$definition->setName($workflowName);
$definition->setLabel(ucfirst($workflowName));
$workflow = new Workflow(
$doctrineHelper,
$aclManager,
$restrictionManager,
$stepManager,
$attributeManager = null,
$this->getTransitionManager($transitionsData)
);
$workflow->setDefinition($definition);
$entityAttribute = new Attribute();
$entityAttribute->setName('entity');
$workflow->getAttributeManager()->setAttributes([$entityAttribute]);
$workflow->getAttributeManager()->setEntityAttributeName($entityAttribute->getName());
return $workflow;
}
}
| {
"content_hash": "7e61d4fa37f760a81d4f5a6942207f64",
"timestamp": "",
"source": "github",
"line_count": 406,
"max_line_length": 116,
"avg_line_length": 37.522167487684726,
"alnum_prop": 0.47210187737954573,
"repo_name": "orocrm/platform",
"id": "4f44ac5aa1167d0188a33a5dd8e51819f5ef53eb",
"size": "15234",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Oro/Bundle/WorkflowBundle/Tests/Unit/Helper/WorkflowDataHelperTest.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "618485"
},
{
"name": "Gherkin",
"bytes": "158217"
},
{
"name": "HTML",
"bytes": "1648915"
},
{
"name": "JavaScript",
"bytes": "3326127"
},
{
"name": "PHP",
"bytes": "37828618"
}
],
"symlink_target": ""
} |
import sys
import os
import subprocess
import time
import getpass
DEBUG=False
VERBOSE=False
cookies = "--insecure --cookie-jar .cookies.txt --cookie .cookies.txt"
PREFIX=os.path.dirname(os.path.realpath(__file__))
def cmd_exists(cmd):
""" Returns: bool, True if 'cmd' is in PATH, False otherwise."""
return subprocess.call("type " + cmd, shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE) == 0
def confirm_cmd_exists(cmd):
if not cmd_exists(cmd):
print "Error: This script depends on '%s'." % cmd
print "We could not find '%s' in PATH. Please update PATH or" % cmd
print "install the package for '%s' on your system." % cmd
sys.exit(1)
def system(cmd):
## NOTE: use this rather than os.system() to catch
## KeyboardInterrupt correctly.
if DEBUG:
print cmd
# simulate success without running the command.
return 0
if VERBOSE:
print cmd
return subprocess.call(cmd, stdout=sys.stdout,
stderr=sys.stderr, shell=True)
REBOOT_MESSAGE="""
NOTE: please send this message to [email protected]:
Around %(ts)s we rebooted this server due to the system not responding:
%(hostname)s
Once the reboot completes, all services on this system should return to normal.
"""
def usage():
return """
usage:
All commands take a host specification. A host spec is a FQHN, or a
shorter pattern. For example, "mlab1.nuq01", or "mlab1d.nuq01"
without quotes are valid host specs and may be used interchangably.
drac.py <host spec>
Take hostname argument and print out associated PCU information.
<hostname> may be a pattern, such as '*.site.measurement-lab.org'.
Acts like the original 'drac-password.py' script.
drac.py reboot <drac host spec>
Use DRAC to reboot <hostname>
drac.py shell <drac host spec>
Take the drac-hostname argument and log into the DRAC interface via
SSH. Then, control is returned to the user to enter DRAC commands
in the shell. i.e. reboot, or get system info, etc.
drac.py console5 <drac host spec>
drac.py console6 <drac host spec>
Take the drac-hostname argument and open the JavaWebStart Virtual
Console. This depends upon correct configuration of JavaWebStart,
which is platform dependent. Check that 'javaws' is in your path.
console5 is for DRAC5
ams01, ams02, atl01, dfw01, ham01, iad01, lax01, lga01, lga02,
lhr01, mia01, nuq01, ord01, par01, sea01,
console6 is for iDRAC6
arn01, ath01, ath02, dub01, hnd01, mad01, mil01, syd01, syd02,
tpe01, vie01, wlg01,
unknown
svg01,
unsupported (hpilo)
trn01
Not all systems have been tested. There may not be 100% coverage
for MLab DRAC's.
drac.py getsysinfo <drac host spec>
Take the hostname argument and log into the DRAC interface via
SSH. Then run 'racadm getsysinfo'.
<hostname> may be a pattern, such as '*.site.measurement-lab.org'.
drac.py resetpassword <drac host spec> <newpassword>
Take the drac-hostname and set a new password.
The current password is taken from the PCU entry in the PLC
database. Then, this command will log into the DRAC interface
and reset the password there. Finally, it will update PLC's PCU
entry.
"""
def parse_options():
from optparse import OptionParser
parser = OptionParser(usage=usage())
parser.set_defaults(promptpassword=False,
user="admin",
verbose=False,
debug=False)
parser.add_option("-v", "--verbose", dest="verbose", action="store_true",
help="Verbose mode: print extra details.")
parser.add_option("-n", "--dryrun", dest="debug", action="store_true",
help="Debug mode: perform no updates.")
parser.add_option("-u", "--user", dest="user",
metavar="admin",
help=("The DRAC username. Should be used with '-p'"))
parser.add_option("-p", "--promptpassword", dest="promptpassword",
action="store_true",
help=("Prompt for DRAC password rather than querying "+
"PLC. This is useful if you do not have a PLC "+
"account"))
(options, args) = parser.parse_args()
if len(args) == 0:
parser.print_help()
sys.exit(1)
command = "list"
host_spec = None
newpasswd = None
if len(args) == 1:
host_spec = args[0]
elif len(args) == 2:
command = args[0]
host_spec = args[1]
elif len(args) == 3:
command = args[0]
host_spec = args[1]
newpasswd = args[2]
return (command, host_spec, newpasswd, options, args)
def hspec_to_pcu(host_spec):
f = host_spec.split(".")
suffix = "measurement-lab.org"
if len(f) == 2: ## short form.
if f[0][-1] == 'd': ## already a pcu name.
return host_spec + "." + suffix
else:
return "%sd.%s." % (f[0],f[1]) + suffix
elif len(f) == 4: ## long form
if f[0][-1] == 'd': ## already a pcu name.
return host_spec
else:
f[0] = f[0]+"d"
return ".".join(f)
else:
return host_spec
return None
def drac_formatLoginRequest(username, passwd):
def escapeStr(val):
escstr=""
val = val.replace("\\", "\\\\")
tmp = [ i for i in val ]
for i in range(0,len(val)):
if tmp[i] in ['@','(',')',',',':','?','=','&','#','+','%']:
dec = ord(tmp[i])
escstr+= "@0"+ "%02x" % dec
else:
escstr+=tmp[i]
return escstr
postData = ('user=' + escapeStr(username) +
'&password=' + escapeStr(passwd))
return postData
def drac_getLoginURL(console):
if console == "console5":
login_url = "cgi-bin/webcgi/login"
elif console == "console6":
login_url = "data/login"
else:
print "unknown console type: %s" % console
sys.exit(1)
return login_url
def drac_login(login_url, postData, hostname, output):
ret = run_curl(hostname, login_url,
output, "-d '%s'" % postData)
return ret
def run_curl(hostname, url, output, extra_args=""):
cmd_fmt = "curl -D /tmp/out.headers %s -s %s -o %s 'https://%s/%s'"
ret = system(cmd_fmt % (extra_args, cookies, output, hostname, url))
if ret != 0:
return False
if DEBUG:
# if DEBUG is true, out.headers will not exist, and it doesn't matter
return True
headers = open("/tmp/out.headers", 'r').read().strip()
if VERBOSE:
print headers
if "200 OK" in headers or "302 Found" in headers:
return True
return False
def drac_downloadConsoleJNLP(console, user, passwd, hostname, jnlp_output):
date_s=int((time.time())*1000)
postData = drac_formatLoginRequest(user, passwd)
login_url = drac_getLoginURL(console)
login_output = "/tmp/out.login"
print "Logging in.."
login_ok = drac_login(login_url, postData, hostname, login_output)
if not login_ok:
print "Failed to login to %s" % hostname
return False
if VERBOSE: system("cat "+login_output); time.sleep(10)
print "Getting *.jnlp for Java Web Start."
if console == "console5":
return drac5_downloadConsoleJNLP(hostname, date_s, jnlp_output)
elif console == "console6":
return drac6_downloadConsoleJNLP(hostname, date_s,
login_output, jnlp_output)
else:
raise Exception("Unrecognized console type: %s" % console)
def drac6_downloadConsoleJNLP(hostname, date_s, login_output, jnlp_output):
cmd = (r"sed -e "+
r"'s/.*forwardUrl>index.html\(.*\)<\/forwardUrl.*/\1/g'"+
r" " + login_output + r" | tr '?' ' '")
if DEBUG:
print cmd
token = "faketoken"
else:
token = os.popen(cmd, 'r').read().strip()
## NOTE: handle the many variations on a theme.
if "ath01" in hostname or "syd01" in hostname:
url = "viewer.jnlp(%s@0@%s)" % (hostname, date_s)
elif len(token) > 10:
url = "viewer.jnlp(%s@0@title@%s@%s)" % (hostname, date_s, token)
else:
url = "viewer.jnlp(%s@0@title@%s)" % (hostname, date_s)
ret = run_curl(hostname, url, jnlp_output)
if VERBOSE: system("cat "+ jnlp_output)
return ret
def drac5_downloadConsoleJNLP(hostname, date_s, jnlp_output):
print "Getting Virtual Console SessionID.."
session_url="cgi-bin/webcgi/vkvm?state=1"
session_ok = run_curl(hostname, session_url, "/tmp/tmp.out")
if not session_ok: return session_ok
cmd = ("cat /tmp/tmp.out | grep vKvmSessionId |"+
" tr '<>' ' ' | awk '{print $5}' ")
if DEBUG:
print cmd
kvmSessionId = "fakeSessionID"
else:
kvmSessionId = os.popen(cmd).read().strip()
jnlp_url="vkvm/%s.jnlp" % kvmSessionId
jnlp_ok = run_curl(hostname, jnlp_url, jnlp_output)
# NOTE: <sessionid>.jnlp is not always valid, so try the second variation
cmd = "grep 'was not found on this server' "+jnlp_output+" >/dev/null"
not_found = system(cmd)
if not_found == 0:
print jnlp_ok, "Second attempt..."
jnlp_url="cgi-bin/webcgi/vkvmjnlp?id=%s" % date_s
jnlp_ok = run_curl(hostname, jnlp_url, jnlp_output)
if VERBOSE: system("cat "+jnlp_output)
return jnlp_ok
def get_pcu_fields(host_spec, options, return_ip=False):
pcuname = hspec_to_pcu(host_spec)
ret = []
if options.promptpassword:
passwd = getpass.getpass("DRAC passwd: ")
ret = [(pcuname, options.user, passwd, "DRAC")]
else:
cmd=(PREFIX+"/plcquery.py --action=get --type pcu --filter hostname=%s "+
"--fields hostname,username,password,model,ip") % pcuname
if DEBUG: print cmd
lines= os.popen(cmd, 'r').readlines()
for line in lines:
h_u_pw_model= line.strip().split()
hostname = h_u_pw_model[0]
user = h_u_pw_model[1]
passwd = h_u_pw_model[2]
model = h_u_pw_model[3]
ip = h_u_pw_model[4]
if return_ip:
ret.append((hostname, user, passwd, model, ip))
else:
ret.append((hostname, user, passwd, model))
return ret
def main():
global DEBUG
global VERBOSE
(command, host_spec, newpasswd, options, args) = parse_options()
DEBUG=options.debug
VERBOSE=options.verbose
confirm_cmd_exists("expect")
## NOTE: Make sure the session is setup correctly.
## Use os.system() b/c the custom system() function
## doesn't flush stdout correctly. :-/
if not options.promptpassword:
print "Verifying PLC Session...\n"
cmd=PREFIX+"/plcquery.py --action=checksession"
if DEBUG:
print cmd
else:
os.system(cmd)
if command == "shell":
pcu_fields = get_pcu_fields(host_spec, options)
print "Login can be slow. When you receive a prompt, try typing"
print " 'help' or 'racadm help' for a list of available commands."
print " 'exit' will exit the shell and 'drac.py' script.\n"
for hostname,user,passwd,model in pcu_fields:
system("expect %s/exp/SHELL.exp %s %s '%s'" %
(PREFIX, hostname, user, passwd))
elif command in ["console6", "console5"]:
pcu_fields = get_pcu_fields(host_spec, options)
if len(pcu_fields) != 1:
print "host spec '%s' did not return a solitary record" % host_spec
sys.exit(1)
(hostname,user,passwd,model) = pcu_fields[0]
if model != "DRAC":
msg = "Automatic console loading is not supported "
msg+= "for this model PCU: %s." % model
print msg
sys.exit(1)
print "Virtual Console depends on correct setup of JavaWebStart..."
jnlp_output = "/tmp/out.jnlp"
download_ok = drac_downloadConsoleJNLP(command, user, passwd,
hostname, jnlp_output)
if not download_ok:
print "Failed to download JNLP file from %s" % hostname
sys.exit(1)
print "Loading JavaWebStart."
system("javaws "+jnlp_output)
elif command == "getsysinfo":
pcu_fields = get_pcu_fields(host_spec, options)
if len(pcu_fields) == 0:
print "host spec '%s' did not return any records" % host_spec
sys.exit(1)
for hostname,user,passwd,model in pcu_fields:
if model not in ["DRAC", "IMM", "HPiLO"]:
print "%s is an unsupported PCU model" % model
continue
system("expect %s/exp/GETSYSINFO.exp %s %s '%s'" %
(PREFIX, hostname, user, passwd))
elif command == "reboot":
pcu_fields = get_pcu_fields(host_spec, options)
if len(pcu_fields) == 0:
print "host spec '%s' did not return any records" % host_spec
sys.exit(1)
for hostname,user,passwd,model in pcu_fields:
if model in ["DRAC", "IMM", "HPiLO"]:
system("expect %s/exp/REBOOT.exp %s %s '%s' %s %s" %
(PREFIX, hostname, user, passwd, model, options.debug))
elif model == "OpenIPMI":
cmd = "ipmitool -I lanplus -H %s -U %s -P '%s' power cycle"
cmd = cmd % (hostname, user, passwd)
system(cmd)
else:
print "%s is an unsupported PCU model" % model
continue
ts = time.strftime("%b %d %H:%M UTC", time.gmtime())
msg = REBOOT_MESSAGE % {'ts' : ts, 'hostname' : host_spec }
# TODO: add option to --send this message to ops@ list
print msg
elif command == "rebootdrac":
# After a shell login, some pcus can be "reset". i.e.
# TODO: IMM can be soft reset using 'resetsp'
# TODO: DRAC can be soft reset using 'racreset soft'
# TODO: HPiLO can be soft reset using 'reset /map1'
pass
elif command == "resetpassword":
## NOTE: be extra verbose for password resets, in case something goes
## wrong, to see where.
if options.promptpassword:
print "Password resets are not supported without updating PLC db."
print "Do not specify password prompt, and try again."
sys.exit(1)
pcu_fields = get_pcu_fields(host_spec, options)
if len(pcu_fields) != 1:
print "host spec '%s' did not return a single record" % host_spec
sys.exit(1)
(hostname,user,passwd,model) = pcu_fields[0]
if model != "DRAC":
print "Unsupported PCU model '%s' for password reset." % model
sys.exit(1)
cmd = ("expect %s/exp/RESET_PASSWORD.exp %s %s '%s' '%s'" %
(PREFIX, hostname, user, passwd, newpasswd))
# Always print, even if DEBUG is not on
if not DEBUG: print cmd
ret = system(cmd)
if ret != 0:
print "An error occurred resetting the password. Stopping"
sys.exit(1)
print "Updating password in PLC database."
cmd = (PREFIX+"/plcquery.py --action=update --type pcu "+
"--filter 'hostname=%s' "+
"--fields 'password=%s'") % (hostname, newpasswd)
# Always print, even if DEBUG is not on
if not DEBUG: print cmd
ret = system(cmd)
if ret != 0:
print "Password update may have failed."
print ("Before proceeding double check that the password "+
"update was successful.")
print "e.g. drac.py %s" % host_spec
sys.exit(1)
elif command == "list":
if options.promptpassword:
print "Password prompt is not supported for 'list'"
sys.exit(1)
pcu_fields = get_pcu_fields(host_spec, options, True)
if len(pcu_fields) == 0:
print "host spec '%s' did not return any records" % host_spec
sys.exit(1)
for hostname,user,passwd,model,ip in pcu_fields:
print "host: %s" % hostname[0:5]+hostname[6:]
print "pcu hostname: https://%s" % hostname
print "pcu IP: %s" % ip
print "pcu username: %s" % user
print "pcu password: %s" % passwd
print "pcu model: %s" % model
if __name__ == "__main__":
main()
| {
"content_hash": "9e00399999aadf89ee69fad1c06a4da7",
"timestamp": "",
"source": "github",
"line_count": 485,
"max_line_length": 81,
"avg_line_length": 35.30103092783505,
"alnum_prop": 0.5614158051515683,
"repo_name": "nkinkade/operator",
"id": "eca9d2ac147160b57a2dfe20ac1256614a3b7968",
"size": "17144",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "tools/drac.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "149528"
},
{
"name": "Shell",
"bytes": "673"
}
],
"symlink_target": ""
} |
FROM balenalib/imx8m-var-dart-debian:bullseye-run
ENV NODE_VERSION 14.15.4
ENV YARN_VERSION 1.22.4
RUN buildDeps='curl libatomic1' \
&& set -x \
&& for key in \
6A010C5166006599AA17F08146C2130DFD2497F5 \
; do \
gpg --batch --keyserver pgp.mit.edu --recv-keys "$key" || \
gpg --batch --keyserver keyserver.pgp.com --recv-keys "$key" || \
gpg --batch --keyserver ha.pool.sks-keyservers.net --recv-keys "$key" ; \
done \
&& apt-get update && apt-get install -y $buildDeps --no-install-recommends \
&& rm -rf /var/lib/apt/lists/* \
&& curl -SLO "http://nodejs.org/dist/v$NODE_VERSION/node-v$NODE_VERSION-linux-arm64.tar.gz" \
&& echo "b681bda8eaa1ed2ac85e0ed2c2041a0408963c2198a24da183dc3ab60d93d975 node-v$NODE_VERSION-linux-arm64.tar.gz" | sha256sum -c - \
&& tar -xzf "node-v$NODE_VERSION-linux-arm64.tar.gz" -C /usr/local --strip-components=1 \
&& rm "node-v$NODE_VERSION-linux-arm64.tar.gz" \
&& curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz" \
&& curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz.asc" \
&& gpg --batch --verify yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \
&& mkdir -p /opt/yarn \
&& tar -xzf yarn-v$YARN_VERSION.tar.gz -C /opt/yarn --strip-components=1 \
&& ln -s /opt/yarn/bin/yarn /usr/local/bin/yarn \
&& ln -s /opt/yarn/bin/yarn /usr/local/bin/yarnpkg \
&& rm yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \
&& npm config set unsafe-perm true -g --unsafe-perm \
&& rm -rf /tmp/*
CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"]
RUN curl -SLO "https://raw.githubusercontent.com/balena-io-library/base-images/8accad6af708fca7271c5c65f18a86782e19f877/scripts/assets/tests/[email protected]" \
&& echo "Running test-stack@node" \
&& chmod +x [email protected] \
&& bash [email protected] \
&& rm -rf [email protected]
RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo 'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: ARM v8 \nOS: Debian Bullseye \nVariant: run variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nNode.js v14.15.4, Yarn v1.22.4 \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info
RUN echo '#!/bin/sh.real\nbalena-info\nrm -f /bin/sh\ncp /bin/sh.real /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \
&& chmod +x /bin/sh-shim \
&& cp /bin/sh /bin/sh.real \
&& mv /bin/sh-shim /bin/sh | {
"content_hash": "3ebc0bf5a1033ee1f637e8fa88d816e5",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 694,
"avg_line_length": 65,
"alnum_prop": 0.7042735042735043,
"repo_name": "nghiant2710/base-images",
"id": "d095de823281422212e1b4411bdf7f4a6dbbe3f9",
"size": "2946",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "balena-base-images/node/imx8m-var-dart/debian/bullseye/14.15.4/run/Dockerfile",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "144558581"
},
{
"name": "JavaScript",
"bytes": "16316"
},
{
"name": "Shell",
"bytes": "368690"
}
],
"symlink_target": ""
} |
import theano.tensor as T
class Operation():
def __init__(self, input, op_name):
self.input = input
self.operate = self.get_operation(op_name)
self.output = self.operate(input, axis=1)
def get_operation(self, op_name):
if op_name == 'sum':
return T.sum
elif op_name == 'mean':
return T.mean
elif op_name == 'max':
return T.max
else:
L.error('Invalid operation name given: ' + op_name)
| {
"content_hash": "b5994888496fcd4eca7f25680292fd44",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 54,
"avg_line_length": 23.166666666666668,
"alnum_prop": 0.6474820143884892,
"repo_name": "nusnlp/corelm",
"id": "9612f394a9628642182d1ce81d0a0f9d6bb3ca73",
"size": "417",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dlm/models/components/operation.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "119460"
}
],
"symlink_target": ""
} |
"""ttsa.py: Traveling Tournament Problem Using Simulated Annealing"""
__author__ = "Colin Burgin"
__copyright__ = "Copyright 2017, Virginia Tech"
__credits__ = [""]
__license__ = "MIT"
__version__ = "1.0"
__maintainer__ = "Colin Burgin"
__email__ = "[email protected]"
__status__ = "in progress"
# Standard Python Libraries
import random
import sys, copy
import math
class TTSA():
"""Traveling Tournament Simulated Annealing"""
def __init__(self, number_teams, seed, tau, beta, omega, delta, theta, maxc, maxp, maxr, gamma):
# Seed PRNG
if seed is 0:
random.seed()
else:
random.seed(seed)
# Calculate schedule vars
self.number_teams = number_teams
self.weeks = (2 * self.number_teams) - 2
self.best_feasible_S = []
self.best_infeasible_S = []
# SA Parameters
self.tau_not = tau
self.beta = beta
self.omega_not = omega
self.omega = omega
self.delta = delta
self.theta = theta
self.maxC = maxc
self.maxP = maxp
self.maxR = maxr
self.gamma = gamma
# Set all the default vars for SA
self.S = self.build_schedule(self.number_teams)
# Read in the cost matrix
self.cost_matrix = []
self.cost_matrix = self.get_cost_matrix(self.number_teams)
# Perform the simulated annealing to solve the schedule
self.simulated_annealing()
# Print out the stats / result
print("\nThe best feasible schedule:")
self.print_schedule(self.best_feasible_S)
print("\nCost: " + str(self.cost_ttsa(self.best_feasible_S)))
print("Seed:", seed, "\tTau_0:", self.tau_not, "\tBeta:", self.beta, "\tOmega_0:", self.omega_not, "\tDelta:", self.delta, "\tTheta:", self.theta, "\tMaxC:", self.maxC, "\tMaxP:", self.maxP, "\tMaxR:", self.maxR, "\tGamma:", self.gamma, "\n")
# The Simulated Annelaing Algorithm TTSA from the TTP paper figure 2
def simulated_annealing(self):
# Set default vars
best_feasible = sys.maxsize
nbf = sys.maxsize
best_infeasible = sys.maxsize
nbi = sys.maxsize
best_tau = self.tau_not
tau = self.tau_not
reheat = 0
counter = 0
# Loop until no more reheats
while reheat <= self.maxR:
phase = 0
while phase <= self.maxP:
counter = 0
while counter <= self.maxC:
# Make a deepcopy of the schedule
S_prime = copy.deepcopy(self.S)
S_prime = self.random_move(S_prime)
cost_s = self.cost_ttsa(self.S)
cost_s_p = self.cost_ttsa(S_prime)
nbv_s_p = self.nbv(S_prime)
if( (cost_s_p < cost_s) or
(nbv_s_p == 0) and (cost_s_p < best_feasible) or
(nbv_s_p > 0) and (cost_s_p < best_infeasible) ):
accept = True
else:
if math.exp(-abs(cost_s - cost_s_p) / tau) > random.random():
accept = True
else:
accept = False
# Update best found feasible and infeasible schedules if necessary
if cost_s_p < best_feasible and nbv_s_p == 0:
self.best_feasible_S = copy.deepcopy(S_prime)
if cost_s_p < best_infeasible and nbv_s_p > 0:
self.best_infeasible_S = copy.deepcopy(S_prime)
# Set new values if it is accepted
if accept is True:
self.S = copy.deepcopy(S_prime)
# Calculate new values for nbf or nbi
if self.nbv(self.S) == 0:
nbf = min(self.cost_ttsa(self.S), best_feasible)
else:
nbi = min(self.cost_ttsa(self.S), best_infeasible)
self.best_infeasible_S = copy.deepcopy(S_prime)
# Restart the process if a better feasible or infeasible solution is found
if (nbf < best_feasible) or (nbi < best_infeasible):
reheat = 0
counter = 0
phase = 0
best_tau = tau
best_feasible = nbf
best_infeasible = nbi
# Calculate new omega
if self.nbv(self.S) == 0:
self.omega = self.omega / self.theta
else:
self.omega = self.omega * self.delta
else:
counter += 1
# End counter Loop
phase += 1
tau = tau * self.beta
# End phase Loop
reheat += 1
tau = 2 * best_tau
# End reheat Loop
def random_move(self, S):
# Select a random function to call on the schedule
choice = random.randint(0,4)
# Select and perform the operation
if choice is 0:
return self.swap_homes(S)
elif choice is 1:
return self.swap_rounds(S)
elif choice is 2:
return self.swap_teams(S)
elif choice is 3:
return self.partial_swap_rounds(S)
else:
return self.partial_swap_teams(S)
# Determine the number of violations in a given schedule
def nbv(self, S):
violations = 0
# Loop through the schedule looking for non-repeat violations
for team in range(len(S)):
for game in range(1, len(S[team])):
if S[team][game-1][0] is S[team][game][0]:
violations += 1
# Loop through the schedule looking for atmost violations
for team in range(len(S)):
for game in range(3, len(S[team])):
if S[team][game-3][1] == "home" and S[team][game-2][1] == "home" and S[team][game-1][1] == "home" and S[team][game][1] == "home":
violations += 1
if S[team][game-3][1] == "away" and S[team][game-2][1] == "away" and S[team][game-1][1] == "away" and S[team][game][1] == "away":
violations += 1
return violations
# Builds the cost matrix for the coresponding number of teams
def get_cost_matrix(self, number_teams):
file_name = "data/data" + str(number_teams) + ".txt"
l = []
with open(file_name, 'r') as f:
for line in f:
line = line.strip()
if len(line) > 0:
l.append(line.split())
return l
# Calculate the TTSA cost
def cost_ttsa(self, S):
if self.nbv(S) == 0:
return self.cost(S)
else:
return math.sqrt(self.cost(S)**2 + (self.omega * self.fun(self.nbv(S))**2))
# define fun (f function)
def fun(self, v):
return 1 + math.sqrt(v) * math.log(v / 2)
# Calculate the cost of the input schedule
def cost(self, S):
total_cost = 0
cost_m = self.cost_matrix
# Loop through the schedule calculating the cost along the way
for team in S:
i = S.index(team)
team.append((None, "home"))
for game in team:
j = team.index(game)
start_loc = None
dest_loc = None
# Handle the first game case, get start location
if j is 0:
start_loc = i
else:
if team[j-1][1] is "home":
start_loc = i
else:
start_loc = team[j-1][0] - 1
# Handle the last game case, get the travel location
if j is len(team) - 1:
dest_loc = i
else:
if team[j][1] is "home":
dest_loc = i
else:
dest_loc = team[j][0] - 1
# Cost
total_cost += int(cost_m[start_loc][dest_loc])
# Pop off the placeholder location
team.pop()
return total_cost
# Builds a random starting schedule to build and improve on
def build_schedule(self, number_teams):
# Create an empty schedule
S = [[None for i in range(self.weeks)] for j in range(number_teams)]
# Call the recursive build function
return self.r_build_schedule(S, 0, 0)
# Recursive part of build schedule
def r_build_schedule(self, S, team, week):
# If the schedule is full then return becuase it is complete
if self.schedule_full(S):
return S
# Calculate the next location
next_week = week + 1
next_team = team
if next_week == self.weeks:
next_week = 0
next_team += 1
# If there is already a game scheduled then move forward
if S[team][week] is not None:
return self.r_build_schedule(S, next_team, next_week)
# Find all of the possible games that can be scheduled, return if it isn't schedulable
possibilities = self.get_game(S, team, week)
random.shuffle(possibilities)
if possibilities is None:
return None
# Try all the possible games until one works
for p in possibilities:
try_S = [[c for c in r] for r in S]
# Set the game as well as the opponent
try_S[team][week] = p
self.set_opponent(try_S, team, week)
# Move forward with this attempt
result_S = self.r_build_schedule(try_S, next_team, next_week)
if result_S is not None:
return result_S
# Catch all
return None
# Check to see if the schedule is full, inefficent
def schedule_full(self, S):
for week in S:
for game in week:
if game is None:
return False
return True
# Given the schedule and a specfic match, schedule the opponent for that match
def set_opponent(self, S, i, j):
match = S[i][j]
if match[1] is "home":
S[match[0]-1][j] = (i+1, "away")
else:
S[match[0]-1][j] = (i+1, "home")
return S
# Given the schedule and an empty slot, determine the possible games that can be scheduled here
def get_game(self, S, i, j):
# Create a list of available teams
home = lambda x: (x, "home")
away = lambda x: (x, "away")
available = [f(x) for x in range(1, self.number_teams+1) for f in (home, away)]
# Remove self from list
available = [k for k in available if k[0] is not i+1]
# Remove games that this team already has on its schedule
available = [l for l in available if l not in S[i]]
# Remove opponents that are in concurrent games
col = [o[0] for o in [row[j] for row in S] if o is not None]
available = [m for m in available if m[0] not in col]
return available
# The move swaps the home and away roles of team T in pos i and j
# Because this is going to be a random choice everytime the function is called,
# the choice is just made inside of the function instead of being passed in.
def swap_homes(self, S):
# Choose a team to swap on
team = len(S) - 1
swap_loc = S[team].index(random.choice(S[team]))
swap_loc_mirror = S[team].index(self.home_away(S[team][swap_loc]))
# Swap the first game and its opponent
S[team][swap_loc] = self.home_away(S[team][swap_loc])
S = self.set_opponent(S, team, swap_loc)
# Swap the matching game and its opponent
S[team][swap_loc_mirror] = self.home_away(S[team][swap_loc_mirror])
S = self.set_opponent(S, team, swap_loc_mirror)
return S
# Given a game, swap the home/awayness of that game
def home_away(self, game):
if game[1] is 'home':
return (game[0], 'away')
else:
return (game[0], 'home')
# The move simply swaps rounds k and l
# Because this is going to be a random choice everytime the function is called,
# the choice is just made inside of the function instead of being passed in.
def swap_rounds(self, S):
# Choose two different rounds to swap
choices = random.sample(list(range(len(S[0]))), 2)
# Iterate through the teams swapping each rounds
for team in range(len(S)):
game_one = S[team][choices[0]]
game_two = S[team][choices[1]]
S[team][choices[0]] = game_two
S[team][choices[1]] = game_one
return S
# This move swaps the schedule for teams i and j except of course, when they play against each other
# Because this is going to be a random choice everytime the function is called,
# the choice is just made inside of the function instead of being passed in.
def swap_teams(self, S):
# Choose two different teams to swap
choices = random.sample(list(range(len(S))), 2)
# Swap the teams completely
team_one = S[choices[0]]
team_two = S[choices[1]]
S[choices[0]] = team_two
S[choices[1]] = team_one
# Resolve the same team conflicts
for game in range(len(S[choices[0]])):
# If the team is playing itself fix it and resolve opponent
if S[choices[0]][game][0] - 1 is choices[0]:
S[choices[0]][game] = self.home_away(S[choices[1]][game])
S = self.set_opponent(S, choices[0], game)
# Resolve the opponents
for team in choices:
for game in range(len(S[team])):
S = self.set_opponent(S, team, game)
return S
# This mode considers team T and swaps its games at round k and l
# Because this is going to be a random choice everytime the function is called,
# the choice is just made inside of the function instead of being passed in.
def partial_swap_rounds(self, S):
# Choose a random team and two random rounds to swap
s_team = random.sample(list(range(len(S))), 1)[0]
s_rounds = random.sample(list(range(len(S[0]))), 2)
# Create a starting list
p_swap = [s_team]
# Chain ejection until everything is in the list
while 1:
# loop through the list adding new teams if necessary
for item in p_swap:
if S[item][s_rounds[0]][0]-1 not in p_swap:
p_swap.append(S[item][s_rounds[0]][0]-1)
if S[item][s_rounds[1]][0]-1 not in p_swap:
p_swap.append(S[item][s_rounds[1]][0]-1)
# Check to see if the list is fully inclusive
if (S[p_swap[-1]][s_rounds[0]][0]-1 in p_swap) and (S[p_swap[-1]][s_rounds[1]][0]-1 in p_swap) and (S[p_swap[-2]][s_rounds[0]][0]-1 in p_swap) and (S[p_swap[-2]][s_rounds[1]][0]-1 in p_swap):
break
# Loop through the list for one of the rounds and swap all the games in the list
for item in p_swap:
S = self.swap_game_round(S, item, s_rounds[0], s_rounds[1])
return S
# Swap games by same team different rounds
def swap_game_round(self, S, t, rl, rk):
game_one = S[t][rl]
game_two = S[t][rk]
S[t][rl] = game_two
S[t][rk] = game_one
return S
# This move considers round rk and swaps the games of teams Ti and Tj
# Because this is going to be a random choice everytime the function is called,
# the choice is just made inside of the function instead of being passed in.
def partial_swap_teams(self, S):
# Choose a random round and two random teams to swap
s_round = random.sample(list(range(len(S[0]))), 1)[0]
s_teams = random.sample(list(range(len(S))), 2)
# Handle case where the games cannot be swapped because it is invalid (cant play yourself)
if not (set(s_teams) - set([S[s_teams[0]][s_round][0]-1, S[s_teams[1]][s_round][0]-1])):
return S
# Create a starting list
p_swap = [S[s_teams[0]][s_round], S[s_teams[1]][s_round]]
# Chain ejection until everything is in the list
while 1:
# Loop through the list adding new teams if necessary
for item in p_swap:
if self.get_concurrent(S, s_teams[0], s_teams[1], item) not in p_swap:
p_swap.append(self.get_concurrent(S, s_teams[0], s_teams[1], item))
if self.get_concurrent(S, s_teams[1], s_teams[0], item) not in p_swap:
p_swap.append(self.get_concurrent(S, s_teams[1], s_teams[0], item))
if( (self.get_concurrent(S, s_teams[0], s_teams[1], p_swap[-1]) in p_swap) and (self.get_concurrent(S, s_teams[1], s_teams[0], p_swap[-1]) in p_swap) and
(self.get_concurrent(S, s_teams[0], s_teams[1], p_swap[-2]) in p_swap) and (self.get_concurrent(S, s_teams[1], s_teams[0], p_swap[-2]) in p_swap) ):
break
# Get the indices of the games found
p_indices = []
for item in p_swap:
p_indices.append(S[s_teams[0]].index(item))
# Loop through the list for one of the teams and swap all of the games and resolve opponents
for idx in p_indices:
S = self.swap_game_team(S, idx, s_teams[0], s_teams[1])
return S
# Swap games by same round different teams and resolve opponents
def swap_game_team(self, S, r, T1, T2):
game_one = S[T1][r]
game_two = S[T2][r]
S[T1][r] = game_two
S[T2][r] = game_one
S = self.set_opponent(S, T1, r)
S = self.set_opponent(S, T2, r)
return S
# Given a two teams and a game, find the concurrent game for the other teams
def get_concurrent(self, S, T1, T2, game):
for i, j in enumerate(S[T1]):
if j == game:
return S[T2][i]
# Prints the schedule in a way that is readable
def print_schedule(self, S):
for row in S:
print(*row, sep="\t")
| {
"content_hash": "6e2a25277ff2f8b124f96e3d47fc0860",
"timestamp": "",
"source": "github",
"line_count": 478,
"max_line_length": 250,
"avg_line_length": 38.77405857740586,
"alnum_prop": 0.5336678536743282,
"repo_name": "cburgin/TPP-SA",
"id": "065f6bf3cf0bd224638c34864f6b296e9c693d4f",
"size": "18558",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ttsa.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "21432"
}
],
"symlink_target": ""
} |
package main
//go:generate go-bindata -pkg "logbuddy" -o ../logbuddy_bindata.go ../static/...
import "github.com/jnprautomate/logbuddy"
func main() {
testServer := "localhost:8080"
ws := &logbuddy.WebServer{Address: testServer}
ws.Listen()
defer ws.Close()
}
| {
"content_hash": "832944b0d0b99b1984b33659f0b11108",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 80,
"avg_line_length": 22.166666666666668,
"alnum_prop": 0.7030075187969925,
"repo_name": "JNPRAutomate/LogBuddy",
"id": "a2d1be6fe938017d340f915ea31ba90bc97300af",
"size": "266",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "_interactive_tests/webui.go",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "729"
},
{
"name": "Go",
"bytes": "1249829"
},
{
"name": "JavaScript",
"bytes": "3227"
}
],
"symlink_target": ""
} |
<html>
<!--
https://bugzilla.mozilla.org/show_bug.cgi?id=441782
-->
<head>
<title>Test for Bug 441782</title>
<script type="text/javascript" src="/tests/SimpleTest/SimpleTest.js"></script>
<script type="text/javascript" src="/tests/SimpleTest/WindowSnapshot.js"></script>
<link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
</head>
<body>
<a target="_blank" href="https://bugzilla.mozilla.org/show_bug.cgi?id=441782">Mozilla Bug 441782</a>
<pre id="test">
<script type="text/javascript">
/** Test for Bug 441782 **/
var passes = [
// bug 441782 tests (Arabic)
{prefix: "bug441782", file: 1, bidiNumeralValue: 2, op: "=="},
];
</script>
<script type="text/javascript" src="bidi_numeral_test.js"></script>
</pre>
</body>
</html>
| {
"content_hash": "503f5ebcfc4081dd8a80e04e8925d31d",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 100,
"avg_line_length": 27.678571428571427,
"alnum_prop": 0.6696774193548387,
"repo_name": "sergecodd/FireFox-OS",
"id": "595477da27377b06b11bb349cdb949bf1ed410f6",
"size": "775",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "B2G/gecko/layout/base/tests/test_bug441782-1c.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Ada",
"bytes": "443"
},
{
"name": "ApacheConf",
"bytes": "85"
},
{
"name": "Assembly",
"bytes": "5123438"
},
{
"name": "Awk",
"bytes": "46481"
},
{
"name": "Batchfile",
"bytes": "56250"
},
{
"name": "C",
"bytes": "101720951"
},
{
"name": "C#",
"bytes": "38531"
},
{
"name": "C++",
"bytes": "148896543"
},
{
"name": "CMake",
"bytes": "23541"
},
{
"name": "CSS",
"bytes": "2758664"
},
{
"name": "DIGITAL Command Language",
"bytes": "56757"
},
{
"name": "Emacs Lisp",
"bytes": "12694"
},
{
"name": "Erlang",
"bytes": "889"
},
{
"name": "FLUX",
"bytes": "34449"
},
{
"name": "GLSL",
"bytes": "26344"
},
{
"name": "Gnuplot",
"bytes": "710"
},
{
"name": "Groff",
"bytes": "447012"
},
{
"name": "HTML",
"bytes": "43343468"
},
{
"name": "IDL",
"bytes": "1455122"
},
{
"name": "Java",
"bytes": "43261012"
},
{
"name": "JavaScript",
"bytes": "46646658"
},
{
"name": "Lex",
"bytes": "38358"
},
{
"name": "Logos",
"bytes": "21054"
},
{
"name": "Makefile",
"bytes": "2733844"
},
{
"name": "Matlab",
"bytes": "67316"
},
{
"name": "Max",
"bytes": "3698"
},
{
"name": "NSIS",
"bytes": "421625"
},
{
"name": "Objective-C",
"bytes": "877657"
},
{
"name": "Objective-C++",
"bytes": "737713"
},
{
"name": "PHP",
"bytes": "17415"
},
{
"name": "Pascal",
"bytes": "6780"
},
{
"name": "Perl",
"bytes": "1153180"
},
{
"name": "Perl6",
"bytes": "1255"
},
{
"name": "PostScript",
"bytes": "1139"
},
{
"name": "PowerShell",
"bytes": "8252"
},
{
"name": "Protocol Buffer",
"bytes": "26553"
},
{
"name": "Python",
"bytes": "8453201"
},
{
"name": "Ragel in Ruby Host",
"bytes": "3481"
},
{
"name": "Ruby",
"bytes": "5116"
},
{
"name": "Scilab",
"bytes": "7"
},
{
"name": "Shell",
"bytes": "3383832"
},
{
"name": "SourcePawn",
"bytes": "23661"
},
{
"name": "TeX",
"bytes": "879606"
},
{
"name": "WebIDL",
"bytes": "1902"
},
{
"name": "XSLT",
"bytes": "13134"
},
{
"name": "Yacc",
"bytes": "112744"
}
],
"symlink_target": ""
} |
static inline bool isWhiteSpace(int c)
{
return ( c == ' ') || (c == '\t');
}
///////////////////////////////////////////////////////////////////////////////
static inline bool isWhiteSpaceAndNewline(int c)
{
return ( c == ' ') || (c == '\t') || (c == '\n') || (c == '\r') ;
}
///////////////////////////////////////////////////////////////////////////////
static inline bool isNumeric(int c)
{
return ('0' <= c) && (c <= '9');
}
///////////////////////////////////////////////////////////////////////////////
class SampleFileBuffer
{
SampleFramework::File* mFP;
char mBuffer[MAX_FILE_BUFFER_SIZE];
int mCurrentBufferSize;
int mCurrentCounter;
int mEOF;
public:
SampleFileBuffer(SampleFramework::File* fp) :
mFP(fp),
mCurrentCounter(0),
mCurrentBufferSize(0),
mEOF(0)
{}
///////////////////////////////////////////////////////////////////////////
inline void rewind(int offset = 1)
{
mCurrentCounter -= offset;
}
///////////////////////////////////////////////////////////////////////////
void readBuffer()
{
mCurrentBufferSize = (int)fread(mBuffer, 1, MAX_FILE_BUFFER_SIZE, mFP);
mEOF = feof(mFP);
mCurrentCounter = 0;
}
///////////////////////////////////////////////////////////////////////////
char getCharacter()
{
if (mCurrentCounter >= mCurrentBufferSize)
{
if (mEOF) return EOF;
readBuffer();
if (mCurrentBufferSize == 0)
return EOF;
}
return mBuffer[mCurrentCounter++];
}
///////////////////////////////////////////////////////////////////////////
bool skipWhiteSpace(bool stopAtEndOfLine)
{
char c = 0;
do
{
c = getCharacter();
bool skip = (stopAtEndOfLine) ? isWhiteSpace(c) : isWhiteSpaceAndNewline(c);
if (skip == false)
{
rewind();
return true;
}
} while (c != EOF);
return false; // end of file
}
///////////////////////////////////////////////////////////////////////////////
bool getNextToken(char* token, bool stopAtEndOfLine)
{
if (skipWhiteSpace(stopAtEndOfLine) == false)
return false;
char* str = token;
char c = 0;
do
{
c = getCharacter();
if (c == EOF)
{
*str = 0;
}
else if (isWhiteSpaceAndNewline(c) == true)
{
*str = 0;
rewind();
return (strlen(token) > 0);
}
else
*str++ = (char) c;
} while (c != EOF);
return false;
}
///////////////////////////////////////////////////////////////////////////////
bool getNextTokenButMarker(char* token)
{
if (skipWhiteSpace(false) == false)
return 0;
char* str = token;
char c = 0;
do
{
c = getCharacter();
if (c == ':')
{
rewind();
*str = 0;
return false;
}
if (c == EOF)
{
*str = 0;
}
else if (isWhiteSpaceAndNewline(c) == true)
{
*str = 0;
rewind();
return (strlen(token) > 0);
}
else
*str++ = (char) c;
} while (c != EOF);
return false;
}
///////////////////////////////////////////////////////////////////////////////
bool getNextTokenButNumeric(char* token)
{
if (skipWhiteSpace(false) == false)
return 0;
char* str = token;
char c = 0;
do
{
c = getCharacter();
if (isNumeric(c))
{
rewind();
*str = 0;
return false;
}
if (c == EOF)
{
*str = 0;
}
else if (isWhiteSpaceAndNewline(c) == true)
{
*str = 0;
rewind();
return (strlen(token) > 0);
}
else
*str++ = (char) c;
} while (c != EOF);
return false;
}
///////////////////////////////////////////////////////////////////////////////
void skipUntilNextLine()
{
char c = 0;
do
{
c = getCharacter();
} while ((c != '\n') && (c != EOF));
}
///////////////////////////////////////////////////////////////////////////////
void skipUntilNextBlock()
{
char dummy[MAX_TOKEN_LENGTH];
while (getNextTokenButMarker(dummy) == true)
;
}
///////////////////////////////////////////////////////////////////////////////
bool getNextFloat(float& val, bool stopAtEndOfLine = true)
{
char dummy[MAX_TOKEN_LENGTH];
if (getNextToken(dummy, stopAtEndOfLine) == false)
return false;
val = float(atof(dummy));
return true;
}
///////////////////////////////////////////////////////////////////////////////
bool getNextInt(int& val, bool stopAtEndOfLine = true)
{
char dummy[MAX_TOKEN_LENGTH];
if (getNextToken(dummy, stopAtEndOfLine) == false)
return false;
val = int(atoi(dummy));
return true;
}
///////////////////////////////////////////////////////////////////////////////
bool getNextString(char* val, bool stopAtEndOfLine = true)
{
char dummy[MAX_TOKEN_LENGTH];
if (getNextToken(dummy, stopAtEndOfLine) == false)
return false;
strcpy(val, dummy);
return true;
}
///////////////////////////////////////////////////////////////////////////////
bool getNextVec3(PxVec3& val, bool stopAtEndOfLine = true)
{
if (getNextFloat(val.x, stopAtEndOfLine) == false)
return false;
if (getNextFloat(val.y, stopAtEndOfLine) == false)
return false;
if (getNextFloat(val.z, stopAtEndOfLine) == false)
return false;
return true;
}
};
///////////////////////////////////////////////////////////////////////////////
static bool readHeader(SampleFileBuffer& buffer, Acclaim::ASFData& data)
{
using namespace Acclaim;
char token[MAX_TOKEN_LENGTH], value[MAX_TOKEN_LENGTH];
while (buffer.getNextTokenButMarker(token) == true)
{
if (strcmp(token, "mass") == 0)
{
if (buffer.getNextFloat(data.mHeader.mMass) == false)
return false;
}
else if (strcmp(token, "length") == 0)
{
if (buffer.getNextFloat(data.mHeader.mLengthUnit) == false)
return false;
}
else if (strcmp(token, "angle") == 0)
{
if (buffer.getNextToken(value, true) == false)
return false;
data.mHeader.mAngleInDegree = (strcmp(value, "deg") == 0);
}
}
return true;
}
///////////////////////////////////////////////////////////////////////////////
static bool readRoot(SampleFileBuffer& buffer, Acclaim::ASFData& data)
{
using namespace Acclaim;
char token[MAX_TOKEN_LENGTH];
while (buffer.getNextTokenButMarker(token) == true)
{
if (strcmp(token, "order") == 0)
buffer.skipUntilNextLine();
else if (strcmp(token, "axis") == 0)
buffer.skipUntilNextLine();
else if (strcmp(token, "position") == 0)
{
if (buffer.getNextVec3(data.mRoot.mPosition) == false)
return false;
}
else if (strcmp(token, "orientation") == 0)
{
if (buffer.getNextVec3(data.mRoot.mOrientation) == false)
return false;
}
else
{
buffer.skipUntilNextLine();
}
}
return true;
}
///////////////////////////////////////////////////////////////////////////////
static bool readBone(SampleFileBuffer& buffer, Acclaim::Bone& bone)
{
using namespace Acclaim;
int nbDOF = 0;
char token[MAX_TOKEN_LENGTH], dummy[MAX_TOKEN_LENGTH];
if (buffer.getNextTokenButMarker(token) == false)
return false;
if (strcmp(token, "begin") != 0)
return false;
while (buffer.getNextToken(token, false) == true)
{
if (strcmp(token, "id") == 0)
{
if (buffer.getNextInt(bone.mID) == false) return false;
}
else if (strcmp(token, "name") == 0)
{
if (buffer.getNextString(bone.mName) == false) return false;
}
else if (strcmp(token, "direction") == 0)
{
if (buffer.getNextVec3(bone.mDirection) == false) return false;
}
else if (strcmp(token, "length") == 0)
{
if (buffer.getNextFloat(bone.mLength) == false) return false;
}
else if (strcmp(token, "axis") == 0)
{
if (buffer.getNextVec3(bone.mAxis) == false) return false;
buffer.getNextToken(dummy, true);
}
else if (strcmp(token, "dof") == 0)
{
while ((buffer.getNextToken(dummy, true) == true))
{
if (strcmp(dummy, "rx") == 0)
{
bone.mDOF |= BoneDOFFlag::eRX;
nbDOF++;
}
else if (strcmp(dummy, "ry") == 0)
{
bone.mDOF |= BoneDOFFlag::eRY;
nbDOF++;
}
else if (strcmp(dummy, "rz") == 0)
{
bone.mDOF |= BoneDOFFlag::eRZ;
nbDOF++;
}
else if (strcmp(dummy, "l") == 0)
{
bone.mDOF |= BoneDOFFlag::eLENGTH;
nbDOF++;
}
}
continue;
}
else if (strcmp(token, "limits") == 0)
{
int cnt = 0;
while ( cnt++ < nbDOF)
{
// we ignore limit data for now
if (buffer.getNextToken(dummy, false) == false) return false;
if (buffer.getNextToken(dummy, false) == false) return false;
}
}
else if (strcmp(token, "end") == 0)
break;
else
buffer.skipUntilNextLine();
}
return true;
}
///////////////////////////////////////////////////////////////////////////////
static bool readBoneData(SampleFileBuffer& buffer, Acclaim::ASFData& data)
{
using namespace Acclaim;
Bone tempBones[MAX_BONE_NUMBER];
PxU32 nbBones = 0;
// read all the temporary bones onto temporary buffer
bool moreBone = false;
do {
moreBone = readBone(buffer, tempBones[nbBones]);
if (moreBone)
nbBones++;
PX_ASSERT(nbBones <= MAX_BONE_NUMBER);
} while (moreBone == true);
// allocate the right size and copy the bone data
data.mBones = (Bone*)malloc(sizeof(Bone) * nbBones);
data.mNbBones = nbBones;
for (PxU32 i = 0; i < nbBones; i++)
{
data.mBones[i] = tempBones[i];
}
return true;
}
///////////////////////////////////////////////////////////////////////////////
static Acclaim::Bone* getBoneFromName(Acclaim::ASFData& data, const char* name)
{
// use a simple linear search -> probably we could use hash map if performance is an issue
for (PxU32 i = 0; i < data.mNbBones; i++)
{
if (strcmp(name, data.mBones[i].mName) == 0)
return &data.mBones[i];
}
return 0;
}
///////////////////////////////////////////////////////////////////////////////
static bool readHierarchy(SampleFileBuffer& buffer, Acclaim::ASFData& data)
{
using namespace Acclaim;
char token[MAX_TOKEN_LENGTH];
char dummy[MAX_TOKEN_LENGTH];
while (buffer.getNextTokenButMarker(token) == true)
{
if (strcmp(token, "begin") == 0)
;
else if (strcmp(token, "end") == 0)
break;
else
{
Bone* parent = getBoneFromName(data, token);
while (buffer.getNextToken(dummy, true) == true)
{
Bone* child = getBoneFromName(data, dummy);
if (!child)
return false;
child->mParent = parent;
}
}
buffer.skipUntilNextLine();
}
return true;
}
///////////////////////////////////////////////////////////////////////////////
static bool readFrameData(SampleFileBuffer& buffer, Acclaim::ASFData& asfData, Acclaim::FrameData& frameData)
{
using namespace Acclaim;
char token[MAX_TOKEN_LENGTH];
while (buffer.getNextTokenButNumeric(token) == true)
{
if (strcmp(token, "root") == 0)
{
buffer.getNextVec3(frameData.mRootPosition);
buffer.getNextVec3(frameData.mRootOrientation);
}
else
{
Bone* bone = getBoneFromName(asfData, token);
if (bone == 0)
return false;
int id = bone->mID - 1;
float val = 0;
if (bone->mDOF & BoneDOFFlag::eRX)
{
buffer.getNextFloat(val);
frameData.mBoneFrameData[id].x = val;
}
if (bone->mDOF & BoneDOFFlag::eRY)
{
buffer.getNextFloat(val);
frameData.mBoneFrameData[id].y = val;
}
if (bone->mDOF & BoneDOFFlag::eRZ)
{
buffer.getNextFloat(val);
frameData.mBoneFrameData[id].z = val;
}
}
}
return true;
}
///////////////////////////////////////////////////////////////////////////////
bool Acclaim::readASFData(const char* filename, Acclaim::ASFData& data)
{
using namespace Acclaim;
char token[MAX_TOKEN_LENGTH];
SampleFramework::File* fp = NULL;
physx::shdfnd::fopen_s(&fp, filename, "r");
if (!fp)
return false;
SampleFileBuffer buffer(fp);
while (buffer.getNextToken(token, false) == true)
{
if (token[0] == '#') // comment
{
buffer.skipUntilNextLine();
continue;
}
else if (token[0] == ':') // blocks
{
const char* str = token + 1; // remainder of the string
if (strcmp(str, "version") == 0) // ignore version number
buffer.skipUntilNextLine();
else if (strcmp(str, "name") == 0) // probably 'VICON'
buffer.skipUntilNextLine();
else if (strcmp(str, "units") == 0)
{
if ( readHeader(buffer, data) == false)
return false;
}
else if (strcmp(str, "documentation") == 0)
buffer.skipUntilNextBlock();
else if (strcmp(str, "root") == 0)
{
if (readRoot(buffer, data) == false)
return false;
}
else if (strcmp(str, "bonedata") == 0)
{
if (readBoneData(buffer, data) == false)
return false;
}
else if (strcmp(str, "hierarchy") == 0)
{
if (readHierarchy(buffer, data) == false)
return false;
}
else
{
// ERROR! - unrecognized block name
}
}
else
{
// ERRROR!
continue;
}
}
fclose(fp);
return true;
}
///////////////////////////////////////////////////////////////////////////////
bool Acclaim::readAMCData(const char* filename, Acclaim::ASFData& asfData, Acclaim::AMCData& amcData)
{
using namespace Acclaim;
char token[MAX_TOKEN_LENGTH];
SampleArray<FrameData> tempFrameData;
tempFrameData.reserve(300);
SampleFramework::File* fp = NULL;
physx::shdfnd::fopen_s(&fp, filename, "r");
if (!fp)
return false;
SampleFileBuffer buffer(fp);
while (buffer.getNextToken(token, false) == true)
{
if (token[0] == '#') // comment
{
buffer.skipUntilNextLine();
continue;
}
else if (token[0] == ':') // blocks
{
const char* str = token + 1; // remainder of the string
if (strcmp(str, "FULLY-SPECIFIED") == 0)
continue;
else if (strcmp(str, "DEGREES") == 0)
continue;
}
else if (isNumeric(token[0]) == true)
{
// frame number
int frameNo = atoi(token);
FrameData frameData;
if (readFrameData(buffer, asfData, frameData) == true)
tempFrameData.pushBack(frameData);
}
}
amcData.mNbFrames = tempFrameData.size();
amcData.mFrameData = (FrameData*)malloc(sizeof(FrameData) * amcData.mNbFrames);
memcpy(amcData.mFrameData, tempFrameData.begin(), sizeof(FrameData) * amcData.mNbFrames);
fclose(fp);
return true;
}
| {
"content_hash": "42545c92e7073d56f8d7825b5985f4ce",
"timestamp": "",
"source": "github",
"line_count": 642,
"max_line_length": 109,
"avg_line_length": 21.833333333333332,
"alnum_prop": 0.5302846543482913,
"repo_name": "jjuiddong/PhysXPractice",
"id": "62e979454fbf5cedaf67c21e27f322f0d5cf48b7",
"size": "16094",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "SampleBase/AcclaimLoader.cpp",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "108249"
},
{
"name": "C++",
"bytes": "2277826"
},
{
"name": "Cuda",
"bytes": "228174"
},
{
"name": "Objective-C",
"bytes": "298"
},
{
"name": "Shell",
"bytes": "3012"
}
],
"symlink_target": ""
} |
require "spec_helper"
describe IoMerchant::Order do
before {
# @user = FactoryGirl.create(:user)
@user = User.create(:name => 'a', :kind => 'a', :email => 'a')
@order = FactoryGirl.create(:order, :buyer => @user)
}
it 'order' do
p IoMerchant::Order.generate_unique_id
expect(@order.code).to eq('xxx')
end
it 'created should be true' do
expect(@order.created?).to eq(true)
end
it 'pending should be false' do
expect(@order.pending?).to eq(false)
end
end | {
"content_hash": "07a8846a4cdc63f99e817fb58c5d74c7",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 66,
"avg_line_length": 21,
"alnum_prop": 0.623015873015873,
"repo_name": "iobuild/io_merchant",
"id": "188d3da87878483a0e118eff7f1d3b2b4cf1d623",
"size": "504",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "spec/lib/order_spec.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2427"
},
{
"name": "JavaScript",
"bytes": "6855"
},
{
"name": "Ruby",
"bytes": "74521"
}
],
"symlink_target": ""
} |
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1">
<meta name="author" content="Open Knowledge">
<meta name="description" content="The Global Open Data Index assesses the state of open government data around the world.
">
<meta name="keywords" content="Open Government, Open Data, Government Transparency, Open Knowledge
">
<meta property="og:type" content="website"/>
<meta property="og:title" content="Open Data Index - Open Knowledge"/>
<meta property="og:site_name" content="Open Data Index"/>
<meta property="og:description"
content="The Global Open Data Index assesses the state of open government data around the world."/>
<meta property="og:image" content="/static/images/favicon.ico"/>
<title>Cayman Islands / Land Ownership (2014) | Global Open Data Index by Open Knowledge</title>
<base href="/">
<!--[if lt IE 9]>
<script src="/static/vendor/html5shiv.min.js"></script>
<![endif]-->
<link rel="stylesheet" href="/static/css/site.css">
<link rel="icon" href="/static/images/favicon.ico">
<script>
var siteUrl = '';
</script>
</head>
<body class="na">
<div class="fixed-ok-panel">
<div id="ok-panel" class="closed">
<iframe src="http://assets.okfn.org/themes/okfn/okf-panel.html" scrolling="no"></iframe>
</div>
<a class="ok-ribbon"><img src="http://okfnlabs.org/ok-panel/assets/images/ok-ribbon.png" alt="Open Knowledge"></a>
</div>
<header id="header">
<nav class="navbar navbar-default" role="navigation">
<div class="container">
<div>
<div class="navbar-header">
<button type="button" class="navbar-toggle collapsed" data-toggle="collapse"
data-target="#navbar-collapse">
<span class="sr-only">Toggle navigation</span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
<div class="logo">
<a href="/">
<img src="/static/images/logo2.png">
<span>Global<br/>Open Data Index</span>
</a>
</div>
</div>
<div class="collapse navbar-collapse" id="navbar-collapse">
<ul class="nav navbar-nav" style="margin-right: 132px;">
<li>
<a href="/place/" title="About the Open Data Index project">
Places
</a>
</li>
<li>
<a href="/dataset/" title="About the Open Data Index project">
Datasets
</a>
</li>
<li>
<a href="/download/" title="Download Open Data Index data">
Download
</a>
</li>
<li>
<a href="/insights/" title="Insights">
Insights
</a>
</li>
<li>
<a href="/methodology/"
title="The methodology behind the Open Data Index">
Methodology
</a>
</li>
<li>
<a href="/about/" title="About the Open Data Index project">
About
</a>
</li>
<li>
<a href="/press/"
title="Press information for the Open Data Index">
Press
</a>
</li>
</ul>
</div>
</div>
</div>
</nav>
</header>
<div class="container">
<div class="content">
<div class="row">
<div class="col-md-12">
<ol class="breadcrumb">
<li>
<a href="/">Home</a>
</li>
<li class="active">Cayman Islands / Land Ownership (2014)</li>
</ol>
<header class="page-header">
<h1>Cayman Islands / Land Ownership (2014)</h1>
</header>
<h3>Sorry</h3>
<p>
There is no data available for Cayman Islands / Land Ownership (2014) in the Index.
</p>
</div>
</div>
</div>
</div>
<footer id="footer">
<div class="container">
<div class="row">
<div class="footer-main col-md-8">
<div class="footer-attribution">
<p>
<a href="http://opendefinition.org/ossd/" title="Open Online Software Service">
<img src="http://assets.okfn.org/images/ok_buttons/os_80x15_orange_grey.png" alt=""
border=""/>
</a>
<a href="http://opendefinition.org/okd/" title="Open Online Software Service">
<img src="http://assets.okfn.org/images/ok_buttons/od_80x15_blue.png" alt="" border=""/>
</a>
<a href="http://opendefinition.org/okd/" title="Open Content">
<img src="http://assets.okfn.org/images/ok_buttons/oc_80x15_blue.png" alt="" border=""/>
</a>
–
<a href="http://creativecommons.org/licenses/by/3.0/"
title="Content Licensed under a CC Attribution"></a>
<a href="http://opendatacommons.org/licenses/pddl/1.0"
title="Data License (Public Domain)">Data License (Public
Domain)</a>
</p>
</div>
<div class="footer-meta">
<p>
This service is run by <a href="https://okfn.org/" title="Open Knowledge">Open Knowledge</a>
</p> <a class="naked" href="http://okfn.org/" title="Open Knowledge"><img
src="http://assets.okfn.org/p/okfn/img/okfn-logo-landscape-black-s.png" alt="" height="28"></a>
</div>
</div>
<div class="footer-links col-md-2">
<li><a href="http://okfn.org/" title="Open Knowledge">Open Knowledge</a></li>
<li><a href="http://okfn.org/opendata/" title="What is Open Data?">What is
Open Data?</a></li>
<li><a href="http://census.okfn.org/" title="Run your own Index">Run your
own Index</a></li>
<li><a href="https://github.com/okfn/opendataindex" title="The source code for Open Data Index">Source Code</a></li>
</div>
<div class="footer-links col-md-2">
<li><a href="/" title="Open Data Index home">Home</a></li>
<li><a href="/download/" title="Download data">Download</a></li>
<li><a href="/methodology/"
title="The methodology behind the Open Data Index">Methodology</a></li>
<li><a href="/faq/" title=" Open Data Index FAQ">FAQ</a></li>
<li><a href="/about/" title="About the Open Data Index">About</a></li>
<li><a href="/about/" title="Contact us">Contact</a></li>
<li><a href="/press/" title="Press">Press</a></li>
</div>
</div>
</div>
</footer>
<script data-main="/static/scripts/site" src="/static/scripts/require.js"></script>
</body>
</html> | {
"content_hash": "1531031e381295056d271d7703e6a52f",
"timestamp": "",
"source": "github",
"line_count": 180,
"max_line_length": 132,
"avg_line_length": 45.34444444444444,
"alnum_prop": 0.4583435432492036,
"repo_name": "okfn/opendataindex-2015",
"id": "a069a373acf46c13947a5ecc6e8d3c45bffa66db",
"size": "8162",
"binary": false,
"copies": "1",
"ref": "refs/heads/gh-pages",
"path": "place/cayman-islands/land/2014/index.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "277465"
},
{
"name": "HTML",
"bytes": "169425658"
},
{
"name": "JavaScript",
"bytes": "37060"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.