commit
stringlengths 40
40
| old_file
stringlengths 4
237
| new_file
stringlengths 4
237
| old_contents
stringlengths 1
4.24k
| new_contents
stringlengths 5
4.84k
| subject
stringlengths 15
778
| message
stringlengths 16
6.86k
| lang
stringlengths 1
30
| license
stringclasses 13
values | repos
stringlengths 5
116k
| config
stringlengths 1
30
| content
stringlengths 105
8.72k
|
---|---|---|---|---|---|---|---|---|---|---|---|
fa8cfbc631dfab0067b8c15bf6374579af071e7a | tests/test_main.py | tests/test_main.py | import sys
import unittest
import tempfile
import pathlib
import os
import os.path
from unittest.mock import patch
import monitor
class TestMonitor(unittest.TestCase):
def test_MonitorConfigInterval(self):
with self.assertRaises(SystemExit):
testargs = ["monitor.py", "-f", "tests/mocks/ini/monitor-nointerval.ini"]
with patch.object(sys, "argv", testargs):
monitor.main()
with self.assertRaises(SystemExit):
testargs = ["monitor.py", "-f", "tests/mocks/ini/monitor-badinterval.ini"]
with patch.object(sys, "argv", testargs):
monitor.main()
def test_file_hup(self):
temp_file_info = tempfile.mkstemp()
os.close(temp_file_info[0])
temp_file_name = temp_file_info[1]
monitor.check_hup_file(temp_file_name)
pathlib.Path(temp_file_name).touch()
self.assertEqual(
monitor.check_hup_file(temp_file_name),
True,
"check_hup_file did not trigger",
)
self.assertEqual(
monitor.check_hup_file(temp_file_name),
False,
"check_hup_file should not have triggered",
)
os.unlink(temp_file_name)
| import sys
import unittest
import tempfile
import pathlib
import os
import os.path
import time
from unittest.mock import patch
import monitor
class TestMonitor(unittest.TestCase):
def test_MonitorConfigInterval(self):
with self.assertRaises(SystemExit):
testargs = ["monitor.py", "-f", "tests/mocks/ini/monitor-nointerval.ini"]
with patch.object(sys, "argv", testargs):
monitor.main()
with self.assertRaises(SystemExit):
testargs = ["monitor.py", "-f", "tests/mocks/ini/monitor-badinterval.ini"]
with patch.object(sys, "argv", testargs):
monitor.main()
def test_file_hup(self):
temp_file_info = tempfile.mkstemp()
os.close(temp_file_info[0])
temp_file_name = temp_file_info[1]
monitor.check_hup_file(temp_file_name)
time.sleep(2)
pathlib.Path(temp_file_name).touch()
self.assertEqual(
monitor.check_hup_file(temp_file_name),
True,
"check_hup_file did not trigger",
)
self.assertEqual(
monitor.check_hup_file(temp_file_name),
False,
"check_hup_file should not have triggered",
)
os.unlink(temp_file_name)
| Add sleep during tests to prevent race | Add sleep during tests to prevent race
| Python | bsd-3-clause | jamesoff/simplemonitor,jamesoff/simplemonitor,jamesoff/simplemonitor,jamesoff/simplemonitor,jamesoff/simplemonitor | python | ## Code Before:
import sys
import unittest
import tempfile
import pathlib
import os
import os.path
from unittest.mock import patch
import monitor
class TestMonitor(unittest.TestCase):
def test_MonitorConfigInterval(self):
with self.assertRaises(SystemExit):
testargs = ["monitor.py", "-f", "tests/mocks/ini/monitor-nointerval.ini"]
with patch.object(sys, "argv", testargs):
monitor.main()
with self.assertRaises(SystemExit):
testargs = ["monitor.py", "-f", "tests/mocks/ini/monitor-badinterval.ini"]
with patch.object(sys, "argv", testargs):
monitor.main()
def test_file_hup(self):
temp_file_info = tempfile.mkstemp()
os.close(temp_file_info[0])
temp_file_name = temp_file_info[1]
monitor.check_hup_file(temp_file_name)
pathlib.Path(temp_file_name).touch()
self.assertEqual(
monitor.check_hup_file(temp_file_name),
True,
"check_hup_file did not trigger",
)
self.assertEqual(
monitor.check_hup_file(temp_file_name),
False,
"check_hup_file should not have triggered",
)
os.unlink(temp_file_name)
## Instruction:
Add sleep during tests to prevent race
## Code After:
import sys
import unittest
import tempfile
import pathlib
import os
import os.path
import time
from unittest.mock import patch
import monitor
class TestMonitor(unittest.TestCase):
def test_MonitorConfigInterval(self):
with self.assertRaises(SystemExit):
testargs = ["monitor.py", "-f", "tests/mocks/ini/monitor-nointerval.ini"]
with patch.object(sys, "argv", testargs):
monitor.main()
with self.assertRaises(SystemExit):
testargs = ["monitor.py", "-f", "tests/mocks/ini/monitor-badinterval.ini"]
with patch.object(sys, "argv", testargs):
monitor.main()
def test_file_hup(self):
temp_file_info = tempfile.mkstemp()
os.close(temp_file_info[0])
temp_file_name = temp_file_info[1]
monitor.check_hup_file(temp_file_name)
time.sleep(2)
pathlib.Path(temp_file_name).touch()
self.assertEqual(
monitor.check_hup_file(temp_file_name),
True,
"check_hup_file did not trigger",
)
self.assertEqual(
monitor.check_hup_file(temp_file_name),
False,
"check_hup_file should not have triggered",
)
os.unlink(temp_file_name)
|
2047601ed3e6901c51269eddd94780cfdd011ce8 | src/Doctrine/Functions/AbstractFunction.php | src/Doctrine/Functions/AbstractFunction.php | <?php
declare(strict_types=1);
namespace Brick\Geo\Doctrine\Functions;
use Doctrine\ORM\Query\AST\Functions\FunctionNode;
use Doctrine\ORM\Query\Lexer;
use Doctrine\ORM\Query\Parser;
use Doctrine\ORM\Query\SqlWalker;
/**
* Base class for Doctrine functions.
*/
abstract class AbstractFunction extends FunctionNode
{
/**
* @var \Doctrine\ORM\Query\AST\Node[]
*/
private $args = [];
abstract protected function getSqlFunctionName() : string;
abstract protected function getParameterCount() : int;
public function getSql(SqlWalker $sqlWalker)
{
$sql = $this->getSqlFunctionName() . '(';
foreach ($this->args as $key => $arg) {
if ($key !== 0) {
$sql .= ', ';
}
$sql .= $arg->dispatch($sqlWalker);
}
$sql .= ')';
return $sql;
}
public function parse(Parser $parser)
{
$this->args = [];
$parser->match(Lexer::T_IDENTIFIER);
$parser->match(Lexer::T_OPEN_PARENTHESIS);
$parameterCount = $this->getParameterCount();
for ($i = 0; $i < $parameterCount; $i++) {
if ($i !== 0) {
$parser->match(Lexer::T_COMMA);
}
/** @psalm-suppress MixedPropertyTypeCoercion */
$this->args[] = $parser->ArithmeticPrimary();
}
$parser->match(Lexer::T_CLOSE_PARENTHESIS);
}
}
| <?php
declare(strict_types=1);
namespace Brick\Geo\Doctrine\Functions;
use Doctrine\ORM\Query\AST\Functions\FunctionNode;
use Doctrine\ORM\Query\Lexer;
use Doctrine\ORM\Query\Parser;
use Doctrine\ORM\Query\SqlWalker;
/**
* Base class for Doctrine functions.
*/
abstract class AbstractFunction extends FunctionNode
{
/**
* @var \Doctrine\ORM\Query\AST\Node[]
*/
private $args = [];
abstract protected function getSqlFunctionName() : string;
abstract protected function getParameterCount() : int;
public function getSql(SqlWalker $sqlWalker) : string
{
$sql = $this->getSqlFunctionName() . '(';
foreach ($this->args as $key => $arg) {
if ($key !== 0) {
$sql .= ', ';
}
$sql .= $arg->dispatch($sqlWalker);
}
$sql .= ')';
return $sql;
}
public function parse(Parser $parser) : void
{
$this->args = [];
$parser->match(Lexer::T_IDENTIFIER);
$parser->match(Lexer::T_OPEN_PARENTHESIS);
$parameterCount = $this->getParameterCount();
for ($i = 0; $i < $parameterCount; $i++) {
if ($i !== 0) {
$parser->match(Lexer::T_COMMA);
}
/** @psalm-suppress MixedPropertyTypeCoercion */
$this->args[] = $parser->ArithmeticPrimary();
}
$parser->match(Lexer::T_CLOSE_PARENTHESIS);
}
}
| Add return types to Doctrine functions | Add return types to Doctrine functions
| PHP | mit | brick/geo | php | ## Code Before:
<?php
declare(strict_types=1);
namespace Brick\Geo\Doctrine\Functions;
use Doctrine\ORM\Query\AST\Functions\FunctionNode;
use Doctrine\ORM\Query\Lexer;
use Doctrine\ORM\Query\Parser;
use Doctrine\ORM\Query\SqlWalker;
/**
* Base class for Doctrine functions.
*/
abstract class AbstractFunction extends FunctionNode
{
/**
* @var \Doctrine\ORM\Query\AST\Node[]
*/
private $args = [];
abstract protected function getSqlFunctionName() : string;
abstract protected function getParameterCount() : int;
public function getSql(SqlWalker $sqlWalker)
{
$sql = $this->getSqlFunctionName() . '(';
foreach ($this->args as $key => $arg) {
if ($key !== 0) {
$sql .= ', ';
}
$sql .= $arg->dispatch($sqlWalker);
}
$sql .= ')';
return $sql;
}
public function parse(Parser $parser)
{
$this->args = [];
$parser->match(Lexer::T_IDENTIFIER);
$parser->match(Lexer::T_OPEN_PARENTHESIS);
$parameterCount = $this->getParameterCount();
for ($i = 0; $i < $parameterCount; $i++) {
if ($i !== 0) {
$parser->match(Lexer::T_COMMA);
}
/** @psalm-suppress MixedPropertyTypeCoercion */
$this->args[] = $parser->ArithmeticPrimary();
}
$parser->match(Lexer::T_CLOSE_PARENTHESIS);
}
}
## Instruction:
Add return types to Doctrine functions
## Code After:
<?php
declare(strict_types=1);
namespace Brick\Geo\Doctrine\Functions;
use Doctrine\ORM\Query\AST\Functions\FunctionNode;
use Doctrine\ORM\Query\Lexer;
use Doctrine\ORM\Query\Parser;
use Doctrine\ORM\Query\SqlWalker;
/**
* Base class for Doctrine functions.
*/
abstract class AbstractFunction extends FunctionNode
{
/**
* @var \Doctrine\ORM\Query\AST\Node[]
*/
private $args = [];
abstract protected function getSqlFunctionName() : string;
abstract protected function getParameterCount() : int;
public function getSql(SqlWalker $sqlWalker) : string
{
$sql = $this->getSqlFunctionName() . '(';
foreach ($this->args as $key => $arg) {
if ($key !== 0) {
$sql .= ', ';
}
$sql .= $arg->dispatch($sqlWalker);
}
$sql .= ')';
return $sql;
}
public function parse(Parser $parser) : void
{
$this->args = [];
$parser->match(Lexer::T_IDENTIFIER);
$parser->match(Lexer::T_OPEN_PARENTHESIS);
$parameterCount = $this->getParameterCount();
for ($i = 0; $i < $parameterCount; $i++) {
if ($i !== 0) {
$parser->match(Lexer::T_COMMA);
}
/** @psalm-suppress MixedPropertyTypeCoercion */
$this->args[] = $parser->ArithmeticPrimary();
}
$parser->match(Lexer::T_CLOSE_PARENTHESIS);
}
}
|
6e929d45c8ece44054e7731746b8bb8143d396ec | app/templates/dep_tree.html | app/templates/dep_tree.html | <div class="small-12 columns">
<div class="small-4 columns note">
{{ plugin.tokensWithoutHeadCount() }} of {{ state.totalTokens }} unused
</div>
<div class="small-6 columns"></div>
<div class="small-2 columns right end">
<span
class="button radius tiny right"
ng-show="plugin.diffPresent"
ng-click="plugin.toggleDiff()">
Toggle Diff
</span>
</div>
</div>
<div
lang-specific
dependency-tree
tokens="state.tokens"
styles="plugin.diffStyles()"
to-bottom
class="full-width">
</div>
| <div class="small-12 columns">
<div
class="small-4 columns note"
tooltip="Click to highlight unused tokens"
tooltip-popup-delay="1000">
{{ plugin.tokensWithoutHeadCount() }} of {{ state.totalTokens }} unused
</div>
<div class="small-6 columns"></div>
<div class="small-2 columns right end">
<span
class="button radius tiny right"
ng-show="plugin.diffPresent"
ng-click="plugin.toggleDiff()">
Toggle Diff
</span>
</div>
</div>
<div
lang-specific
dependency-tree
tokens="state.tokens"
styles="plugin.diffStyles()"
to-bottom
class="full-width">
</div>
| Add a tooltip in depTree template | Add a tooltip in depTree template
| HTML | mit | Masoumeh/arethusa,alpheios-project/arethusa,PonteIneptique/arethusa,alpheios-project/arethusa,fbaumgardt/arethusa,latin-language-toolkit/arethusa,fbaumgardt/arethusa,latin-language-toolkit/arethusa,Masoumeh/arethusa,alpheios-project/arethusa,PonteIneptique/arethusa,fbaumgardt/arethusa | html | ## Code Before:
<div class="small-12 columns">
<div class="small-4 columns note">
{{ plugin.tokensWithoutHeadCount() }} of {{ state.totalTokens }} unused
</div>
<div class="small-6 columns"></div>
<div class="small-2 columns right end">
<span
class="button radius tiny right"
ng-show="plugin.diffPresent"
ng-click="plugin.toggleDiff()">
Toggle Diff
</span>
</div>
</div>
<div
lang-specific
dependency-tree
tokens="state.tokens"
styles="plugin.diffStyles()"
to-bottom
class="full-width">
</div>
## Instruction:
Add a tooltip in depTree template
## Code After:
<div class="small-12 columns">
<div
class="small-4 columns note"
tooltip="Click to highlight unused tokens"
tooltip-popup-delay="1000">
{{ plugin.tokensWithoutHeadCount() }} of {{ state.totalTokens }} unused
</div>
<div class="small-6 columns"></div>
<div class="small-2 columns right end">
<span
class="button radius tiny right"
ng-show="plugin.diffPresent"
ng-click="plugin.toggleDiff()">
Toggle Diff
</span>
</div>
</div>
<div
lang-specific
dependency-tree
tokens="state.tokens"
styles="plugin.diffStyles()"
to-bottom
class="full-width">
</div>
|
2a78c3d61496d16584a00942073965ef79bcb8de | src/MageTest/Manager/Fixtures/Order.yml | src/MageTest/Manager/Fixtures/Order.yml | sales/quote (customer/address catalog/product):
shipping_method: flatrate_flatrate
payment_method: checkmo | sales/quote (catalog/product customer/address):
shipping_method: flatrate_flatrate
payment_method: checkmo | Add product to quote before addresses, fixes order/quote item price and and order/quote grand total | Add product to quote before addresses, fixes order/quote item price and and order/quote grand total
| YAML | mit | MageTest/Manager,MageTest/Manager,dwickstrom/Manager,dwickstrom/Manager | yaml | ## Code Before:
sales/quote (customer/address catalog/product):
shipping_method: flatrate_flatrate
payment_method: checkmo
## Instruction:
Add product to quote before addresses, fixes order/quote item price and and order/quote grand total
## Code After:
sales/quote (catalog/product customer/address):
shipping_method: flatrate_flatrate
payment_method: checkmo |
af8220dd76f402f256b1bc59ae4a92483db52618 | src/_border-widths.css | src/_border-widths.css | /*
BORDER WIDTHS
Base:
bw = border-width
Modifiers:
0 = 0 width border
1 = 1st step in border-width scale
2 = 2nd step in border-width scale
3 = 3rd step in border-width scale
4 = 4th step in border-width scale
5 = 5th step in border-width scale
Media Query Extensions:
-ns = not-small
-m = medium
-l = large
*/
.bw0 { border-width: 0; }
.bw1 { border-width: .125rem; }
.bw2 { border-width: .25rem; }
.bw3 { border-width: .5rem; }
.bw4 { border-width: .75rem; }
.bw5 { border-width: 1rem; }
| /*
BORDER WIDTHS
Base:
bw = border-width
Modifiers:
0 = 0 width border
1 = 1st step in border-width scale
2 = 2nd step in border-width scale
3 = 3rd step in border-width scale
4 = 4th step in border-width scale
5 = 5th step in border-width scale
Media Query Extensions:
-ns = not-small
-m = medium
-l = large
*/
.bw0 { border-width: 0; }
.bw1 { border-width: .125rem; }
.bw2 { border-width: .25rem; }
.bw3 { border-width: .5rem; }
.bw4 { border-width: .75rem; }
.bw5 { border-width: 1rem; }
@media (--breakpoint-not-small) {
.bw0-ns { border-width: 0; }
.bw1-ns { border-width: .125rem; }
.bw2-ns { border-width: .25rem; }
.bw3-ns { border-width: .5rem; }
.bw4-ns { border-width: .75rem; }
.bw5-ns { border-width: 1rem; }
}
@media (--breakpoint-medium) {
.bw0-m { border-width: 0; }
.bw1-m { border-width: .125rem; }
.bw2-m { border-width: .25rem; }
.bw3-m { border-width: .5rem; }
.bw4-m { border-width: .75rem; }
.bw5-m { border-width: 1rem; }
}
@media (--breakpoint-large) {
.bw0-l { border-width: 0; }
.bw1-l { border-width: .125rem; }
.bw2-l { border-width: .25rem; }
.bw3-l { border-width: .5rem; }
.bw4-l { border-width: .75rem; }
.bw5-l { border-width: 1rem; }
}
| Add ability to target breakpoints for border-width. | Add ability to target breakpoints for border-width.
| CSS | mit | getfrank/tachyons,topherauyeung/portfolio,topherauyeung/portfolio,fenderdigital/css-utilities,topherauyeung/portfolio,pietgeursen/pietgeursen.github.io,matyikriszta/moonlit-landing-page,fenderdigital/css-utilities,tachyons-css/tachyons,cwonrails/tachyons | css | ## Code Before:
/*
BORDER WIDTHS
Base:
bw = border-width
Modifiers:
0 = 0 width border
1 = 1st step in border-width scale
2 = 2nd step in border-width scale
3 = 3rd step in border-width scale
4 = 4th step in border-width scale
5 = 5th step in border-width scale
Media Query Extensions:
-ns = not-small
-m = medium
-l = large
*/
.bw0 { border-width: 0; }
.bw1 { border-width: .125rem; }
.bw2 { border-width: .25rem; }
.bw3 { border-width: .5rem; }
.bw4 { border-width: .75rem; }
.bw5 { border-width: 1rem; }
## Instruction:
Add ability to target breakpoints for border-width.
## Code After:
/*
BORDER WIDTHS
Base:
bw = border-width
Modifiers:
0 = 0 width border
1 = 1st step in border-width scale
2 = 2nd step in border-width scale
3 = 3rd step in border-width scale
4 = 4th step in border-width scale
5 = 5th step in border-width scale
Media Query Extensions:
-ns = not-small
-m = medium
-l = large
*/
.bw0 { border-width: 0; }
.bw1 { border-width: .125rem; }
.bw2 { border-width: .25rem; }
.bw3 { border-width: .5rem; }
.bw4 { border-width: .75rem; }
.bw5 { border-width: 1rem; }
@media (--breakpoint-not-small) {
.bw0-ns { border-width: 0; }
.bw1-ns { border-width: .125rem; }
.bw2-ns { border-width: .25rem; }
.bw3-ns { border-width: .5rem; }
.bw4-ns { border-width: .75rem; }
.bw5-ns { border-width: 1rem; }
}
@media (--breakpoint-medium) {
.bw0-m { border-width: 0; }
.bw1-m { border-width: .125rem; }
.bw2-m { border-width: .25rem; }
.bw3-m { border-width: .5rem; }
.bw4-m { border-width: .75rem; }
.bw5-m { border-width: 1rem; }
}
@media (--breakpoint-large) {
.bw0-l { border-width: 0; }
.bw1-l { border-width: .125rem; }
.bw2-l { border-width: .25rem; }
.bw3-l { border-width: .5rem; }
.bw4-l { border-width: .75rem; }
.bw5-l { border-width: 1rem; }
}
|
da4b39a6fe7006686a864577e3d8e8c4cbf1f236 | .github/workflows/deploy.yml | .github/workflows/deploy.yml | name: Deploy built-framework for Carthage when tagged
on:
push:
tags:
- '*'
jobs:
deploy:
name: Deploy built-framework to GitHub releases
runs-on: macOS-latest
strategy:
matrix:
framework: [WebP]
steps:
- name: Checkout repo
uses: actions/checkout@v1
- name: Archive built framework with Carthage
if: true
run: |
set -o pipefail
brew update
brew outdated carthage || brew upgrade carthage
git submodule update -i
carthage build --no-skip-current
carthage archive ${{ matrix.framework }}
- name: Upload built framework to GitHub Release
uses: svenstaro/upload-release-action@v1-release
with:
file: '${{ matrix.framework }}.framework.zip'
asset_name: '${{ matrix.framework }}.framework.zip'
tag: ${{ github.ref }}
overwrite: true
repo_token: ${{ secrets.GITHUB_TOKEN }}
| name: Deploy prebuilt-framework for Carthage when tagged
on:
push:
tags:
- '*'
jobs:
deploy:
name: Deploy built-framework to GitHub releases
runs-on: macOS-latest
strategy:
matrix:
framework: [WebP]
steps:
- name: Checkout repo
uses: actions/checkout@v1
- name: Archive prebuilt framework with Carthage
run: |
set -o pipefail
brew update
brew outdated carthage || brew upgrade carthage
git submodule update -i
carthage build --no-skip-current
carthage archive ${{ matrix.framework }}
- name: Upload built framework to GitHub Release
uses: svenstaro/upload-release-action@v1-release
with:
file: '${{ matrix.framework }}.framework.zip'
asset_name: '${{ matrix.framework }}.framework.zip'
tag: ${{ github.ref }}
overwrite: true
repo_token: ${{ secrets.GITHUB_TOKEN }}
| Fix name and remove unnecessary condition | Fix name and remove unnecessary condition
| YAML | mit | ainame/Swift-WebP,ainame/Swift-WebP,ainame/Swift-WebP | yaml | ## Code Before:
name: Deploy built-framework for Carthage when tagged
on:
push:
tags:
- '*'
jobs:
deploy:
name: Deploy built-framework to GitHub releases
runs-on: macOS-latest
strategy:
matrix:
framework: [WebP]
steps:
- name: Checkout repo
uses: actions/checkout@v1
- name: Archive built framework with Carthage
if: true
run: |
set -o pipefail
brew update
brew outdated carthage || brew upgrade carthage
git submodule update -i
carthage build --no-skip-current
carthage archive ${{ matrix.framework }}
- name: Upload built framework to GitHub Release
uses: svenstaro/upload-release-action@v1-release
with:
file: '${{ matrix.framework }}.framework.zip'
asset_name: '${{ matrix.framework }}.framework.zip'
tag: ${{ github.ref }}
overwrite: true
repo_token: ${{ secrets.GITHUB_TOKEN }}
## Instruction:
Fix name and remove unnecessary condition
## Code After:
name: Deploy prebuilt-framework for Carthage when tagged
on:
push:
tags:
- '*'
jobs:
deploy:
name: Deploy built-framework to GitHub releases
runs-on: macOS-latest
strategy:
matrix:
framework: [WebP]
steps:
- name: Checkout repo
uses: actions/checkout@v1
- name: Archive prebuilt framework with Carthage
run: |
set -o pipefail
brew update
brew outdated carthage || brew upgrade carthage
git submodule update -i
carthage build --no-skip-current
carthage archive ${{ matrix.framework }}
- name: Upload built framework to GitHub Release
uses: svenstaro/upload-release-action@v1-release
with:
file: '${{ matrix.framework }}.framework.zip'
asset_name: '${{ matrix.framework }}.framework.zip'
tag: ${{ github.ref }}
overwrite: true
repo_token: ${{ secrets.GITHUB_TOKEN }}
|
b6c1d9869fcf5865753defd912c450efb6e2467c | comprehend.gemspec | comprehend.gemspec | Gem::Specification.new do |s|
s.name = 'comprehend'
s.version = '0.0.0'
s.date = '2013-01-29'
s.summary = "Compact and flatten an array in one iteration"
s.description = '[1,2,3].comprehend{ |i| i.to_s if i<3 } == ["1","2"]'
s.authors = ["Aaron Weiner"]
s.email = '[email protected]'
s.files = Dir.glob('lib/**/*.rb') +
Dir.glob('ext/**/*.{c,h,rb}')
s.extensions = ['ext/comprehend/extconf.rb']
s.homepage = 'http://github.com/mdsol/comprehend'
s.required_ruby_version = '>= 1.9.3'
end
| Gem::Specification.new do |s|
s.name = 'comprehend'
s.version = '0.0.0'
s.date = '2013-01-29'
s.summary = "Transform and compact an array in one iteration"
s.description = '[1,2,3].comprehend{ |i| i.to_s if i<3 } == ["1","2"]'
s.authors = ["Aaron Weiner"]
s.email = '[email protected]'
s.files = Dir.glob('lib/**/*.rb') +
Dir.glob('ext/**/*.{c,h,rb}')
s.extensions = ['ext/comprehend/extconf.rb']
s.homepage = 'http://github.com/mdsol-share/comprehend'
s.required_ruby_version = '>= 1.9.3'
s.add_development_dependency 'rspec'
end
| Correct homepage, summary, development dependencies | Correct homepage, summary, development dependencies
| Ruby | mit | mdsol-share/comprehend,mdsol-share/comprehend | ruby | ## Code Before:
Gem::Specification.new do |s|
s.name = 'comprehend'
s.version = '0.0.0'
s.date = '2013-01-29'
s.summary = "Compact and flatten an array in one iteration"
s.description = '[1,2,3].comprehend{ |i| i.to_s if i<3 } == ["1","2"]'
s.authors = ["Aaron Weiner"]
s.email = '[email protected]'
s.files = Dir.glob('lib/**/*.rb') +
Dir.glob('ext/**/*.{c,h,rb}')
s.extensions = ['ext/comprehend/extconf.rb']
s.homepage = 'http://github.com/mdsol/comprehend'
s.required_ruby_version = '>= 1.9.3'
end
## Instruction:
Correct homepage, summary, development dependencies
## Code After:
Gem::Specification.new do |s|
s.name = 'comprehend'
s.version = '0.0.0'
s.date = '2013-01-29'
s.summary = "Transform and compact an array in one iteration"
s.description = '[1,2,3].comprehend{ |i| i.to_s if i<3 } == ["1","2"]'
s.authors = ["Aaron Weiner"]
s.email = '[email protected]'
s.files = Dir.glob('lib/**/*.rb') +
Dir.glob('ext/**/*.{c,h,rb}')
s.extensions = ['ext/comprehend/extconf.rb']
s.homepage = 'http://github.com/mdsol-share/comprehend'
s.required_ruby_version = '>= 1.9.3'
s.add_development_dependency 'rspec'
end
|
7ec8ab9fa4cea43c7da6646db06636ad024b5ce7 | docker-compose.yml | docker-compose.yml | version: '3.1'
services:
http:
build:
context: .docker/http
args:
- COMPOSE_PROJECT_NAME
depends_on:
- app
env_file: .env
networks:
public:
ports:
- "80:80"
volumes:
- .:/var/www/html
app:
build: .
depends_on:
- db
- redis
env_file: .env
networks:
public:
private:
volumes:
- .:/var/www/html
db:
env_file: .env
image: mariadb:10.1.19
networks:
private:
volumes:
- db:/var/lib/mysql
redis:
command: ["redis-server", "--appendonly", "yes"]
env_file: .env
image: redis:3.2.9-alpine
networks:
private:
volumes:
- redis:/data
npm:
build:
context: .docker/npm
env_file: .env
networks:
private:
volumes:
- .:/usr/src/app
networks:
public:
driver: overlay
private:
driver: overlay
volumes:
db:
redis:
| version: '3.1'
services:
http:
build:
context: .docker/http
args:
- COMPOSE_PROJECT_NAME
depends_on:
- app
env_file: .env
networks:
public:
ports:
- "80:80"
volumes:
- .:/var/www/html
app:
build: .
depends_on:
- db
- redis
env_file: .env
networks:
public:
private:
volumes:
- .:/var/www/html
db:
env_file: .env
image: mariadb:10.1.19
networks:
private:
ports:
- "3306:3306"
volumes:
- db:/var/lib/mysql
redis:
command: ["redis-server", "--appendonly", "yes"]
env_file: .env
image: redis:3.2.9-alpine
networks:
private:
volumes:
- redis:/data
npm:
build:
context: .docker/npm
env_file: .env
networks:
private:
volumes:
- .:/usr/src/app
networks:
public:
driver: overlay
private:
driver: overlay
volumes:
db:
redis:
| Make MariaDB available to host OS from docker | Make MariaDB available to host OS from docker
| YAML | mit | dnunez24/craftcms-project-heroku,dnunez24/craftcms-project-heroku,dnunez24/craftcms-project-heroku | yaml | ## Code Before:
version: '3.1'
services:
http:
build:
context: .docker/http
args:
- COMPOSE_PROJECT_NAME
depends_on:
- app
env_file: .env
networks:
public:
ports:
- "80:80"
volumes:
- .:/var/www/html
app:
build: .
depends_on:
- db
- redis
env_file: .env
networks:
public:
private:
volumes:
- .:/var/www/html
db:
env_file: .env
image: mariadb:10.1.19
networks:
private:
volumes:
- db:/var/lib/mysql
redis:
command: ["redis-server", "--appendonly", "yes"]
env_file: .env
image: redis:3.2.9-alpine
networks:
private:
volumes:
- redis:/data
npm:
build:
context: .docker/npm
env_file: .env
networks:
private:
volumes:
- .:/usr/src/app
networks:
public:
driver: overlay
private:
driver: overlay
volumes:
db:
redis:
## Instruction:
Make MariaDB available to host OS from docker
## Code After:
version: '3.1'
services:
http:
build:
context: .docker/http
args:
- COMPOSE_PROJECT_NAME
depends_on:
- app
env_file: .env
networks:
public:
ports:
- "80:80"
volumes:
- .:/var/www/html
app:
build: .
depends_on:
- db
- redis
env_file: .env
networks:
public:
private:
volumes:
- .:/var/www/html
db:
env_file: .env
image: mariadb:10.1.19
networks:
private:
ports:
- "3306:3306"
volumes:
- db:/var/lib/mysql
redis:
command: ["redis-server", "--appendonly", "yes"]
env_file: .env
image: redis:3.2.9-alpine
networks:
private:
volumes:
- redis:/data
npm:
build:
context: .docker/npm
env_file: .env
networks:
private:
volumes:
- .:/usr/src/app
networks:
public:
driver: overlay
private:
driver: overlay
volumes:
db:
redis:
|
df92c184537199dd79b3a281198260b5a6c7b75d | README.md | README.md | ---
## **Setup**
***Warning:*** This bot is made mainly for my specific server(s). No attempt is, or will ever
be made to make installation or setup easier. This bot uses dev builds of discord.py and even Python itself.
***Prerequisites:***
- [Python 3.6](https://www.python.org/download/pre-releases/)
- Python 3.6 is REQUIRED as this bot makes heavy use of the new string formatting.
- [discord.py](https://github.com/Rapptz/discord.py)
- Latest development version
- [Discord API app](https://discordapp.com/developers/applications/me)
- You'll need to create the bot account for the app if that wasn't obvious.
***Setup:***
- Download or clone the repo
- Edit `config/config.json` with the appropriate info and settings
- Launch the bot via `python bot.py` or `python3 bot.py` depending on your system
- If the bot can't create folders or files it'll complain because it needs to do that
- If you're on windows run the command `chcp 65001` before starting the bot else you'll get errors for days
| ---
# THIS IS THE DEV VERSION
# IT WON'T WORK HALF THE TIME
# DON'T USE IT
# PLEASE
# STOP ASKING ME ABOUT IT
# NO
# GO AWAY
## **Setup**
***Warning:*** This bot is made mainly for my specific server(s). No attempt is, or will ever
be made to make installation or setup easier. This bot uses dev builds of discord.py and even Python itself.
***Prerequisites:***
- [Python 3.6](https://www.python.org/download/pre-releases/)
- Python 3.6 is REQUIRED as this bot makes heavy use of the new string formatting.
- [discord.py](https://github.com/Rapptz/discord.py)
- Latest development version
- [Discord API app](https://discordapp.com/developers/applications/me)
- You'll need to create the bot account for the app if that wasn't obvious.
***Setup:***
- Download or clone the repo
- Edit `config/config.json` with the appropriate info and settings
- Launch the bot via `python bot.py` or `python3 bot.py` depending on your system
- If the bot can't create folders or files it'll complain because it needs to do that
- If you're on windows run the command `chcp 65001` before starting the bot else you'll get errors for days
| Make the dev version clear | Make the dev version clear
| Markdown | mit | Ispira/Ispyra | markdown | ## Code Before:
---
## **Setup**
***Warning:*** This bot is made mainly for my specific server(s). No attempt is, or will ever
be made to make installation or setup easier. This bot uses dev builds of discord.py and even Python itself.
***Prerequisites:***
- [Python 3.6](https://www.python.org/download/pre-releases/)
- Python 3.6 is REQUIRED as this bot makes heavy use of the new string formatting.
- [discord.py](https://github.com/Rapptz/discord.py)
- Latest development version
- [Discord API app](https://discordapp.com/developers/applications/me)
- You'll need to create the bot account for the app if that wasn't obvious.
***Setup:***
- Download or clone the repo
- Edit `config/config.json` with the appropriate info and settings
- Launch the bot via `python bot.py` or `python3 bot.py` depending on your system
- If the bot can't create folders or files it'll complain because it needs to do that
- If you're on windows run the command `chcp 65001` before starting the bot else you'll get errors for days
## Instruction:
Make the dev version clear
## Code After:
---
# THIS IS THE DEV VERSION
# IT WON'T WORK HALF THE TIME
# DON'T USE IT
# PLEASE
# STOP ASKING ME ABOUT IT
# NO
# GO AWAY
## **Setup**
***Warning:*** This bot is made mainly for my specific server(s). No attempt is, or will ever
be made to make installation or setup easier. This bot uses dev builds of discord.py and even Python itself.
***Prerequisites:***
- [Python 3.6](https://www.python.org/download/pre-releases/)
- Python 3.6 is REQUIRED as this bot makes heavy use of the new string formatting.
- [discord.py](https://github.com/Rapptz/discord.py)
- Latest development version
- [Discord API app](https://discordapp.com/developers/applications/me)
- You'll need to create the bot account for the app if that wasn't obvious.
***Setup:***
- Download or clone the repo
- Edit `config/config.json` with the appropriate info and settings
- Launch the bot via `python bot.py` or `python3 bot.py` depending on your system
- If the bot can't create folders or files it'll complain because it needs to do that
- If you're on windows run the command `chcp 65001` before starting the bot else you'll get errors for days
|
38f55c8473ad69bc6e52e89f5f5b803f77558632 | provider/src/main/java/com/github/terma/gigaspacewebconsole/provider/executor/DatabaseExecutor.java | provider/src/main/java/com/github/terma/gigaspacewebconsole/provider/executor/DatabaseExecutor.java | /*
Copyright 2015-2017 Artem Stasiuk
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.github.terma.gigaspacewebconsole.provider.executor;
import com.github.terma.gigaspacewebconsole.provider.ConverterHelper;
import java.util.ArrayList;
import java.util.Collections;
public class DatabaseExecutor {
private static final ConnectionFactory connectionFactory = new DatabaseConnectionFactory();
private static final ConverterHelper converterHelper = new ConverterHelper(new ArrayList<String>());
public static final Executor INSTANCE = new Executor(
connectionFactory,
new ZeroExecutorPreprocessor(),
Collections.<ExecutorPlugin>singletonList(new SqlOnJsonPlugin(connectionFactory, converterHelper)),
converterHelper);
}
| /*
Copyright 2015-2017 Artem Stasiuk
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.github.terma.gigaspacewebconsole.provider.executor;
import com.github.terma.gigaspacewebconsole.core.config.ConfigLocator;
import com.github.terma.gigaspacewebconsole.provider.ConverterHelper;
import java.util.Collections;
public class DatabaseExecutor {
private static final ConnectionFactory connectionFactory = new DatabaseConnectionFactory();
private static final ConverterHelper converterHelper = new ConverterHelper(ConfigLocator.CONFIG.user.converters);
public static final Executor INSTANCE = new Executor(
connectionFactory,
new ZeroExecutorPreprocessor(),
Collections.<ExecutorPlugin>singletonList(new SqlOnJsonPlugin(connectionFactory, converterHelper)),
converterHelper);
}
| Add support converter configuration for database | Add support converter configuration for database
| Java | apache-2.0 | terma/gigaspace-web-console,terma/gigaspace-web-console,terma/gigaspace-web-console,terma/gigaspace-web-console | java | ## Code Before:
/*
Copyright 2015-2017 Artem Stasiuk
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.github.terma.gigaspacewebconsole.provider.executor;
import com.github.terma.gigaspacewebconsole.provider.ConverterHelper;
import java.util.ArrayList;
import java.util.Collections;
public class DatabaseExecutor {
private static final ConnectionFactory connectionFactory = new DatabaseConnectionFactory();
private static final ConverterHelper converterHelper = new ConverterHelper(new ArrayList<String>());
public static final Executor INSTANCE = new Executor(
connectionFactory,
new ZeroExecutorPreprocessor(),
Collections.<ExecutorPlugin>singletonList(new SqlOnJsonPlugin(connectionFactory, converterHelper)),
converterHelper);
}
## Instruction:
Add support converter configuration for database
## Code After:
/*
Copyright 2015-2017 Artem Stasiuk
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.github.terma.gigaspacewebconsole.provider.executor;
import com.github.terma.gigaspacewebconsole.core.config.ConfigLocator;
import com.github.terma.gigaspacewebconsole.provider.ConverterHelper;
import java.util.Collections;
public class DatabaseExecutor {
private static final ConnectionFactory connectionFactory = new DatabaseConnectionFactory();
private static final ConverterHelper converterHelper = new ConverterHelper(ConfigLocator.CONFIG.user.converters);
public static final Executor INSTANCE = new Executor(
connectionFactory,
new ZeroExecutorPreprocessor(),
Collections.<ExecutorPlugin>singletonList(new SqlOnJsonPlugin(connectionFactory, converterHelper)),
converterHelper);
}
|
b31821e30638afbb9a6b2f2615bb70198acf6e78 | .travis.yml | .travis.yml | sudo: false
language: php
notifications:
email:
on_success: never
on_failure: change
branches:
only:
- master
cache:
directories:
- vendor
- $HOME/.composer/cache
matrix:
include:
- php: 7.1
env: WP_VERSION=latest
- php: 7.0
env: WP_VERSION=latest
- php: 5.6
env: WP_VERSION=latest
- php: 7.0
env: WP_VERSION=trunk
before_script:
- export PATH="$HOME/.composer/vendor/bin:$PATH"
- |
if [[ ! -z "$WP_VERSION" ]] ; then
bash bin/install-wp-tests.sh wordpress_test root '' localhost $WP_VERSION
if [[ ${TRAVIS_PHP_VERSION:0:2} == "5." ]]; then
composer global require "phpunit/phpunit=4.8.*"
else
composer global require "phpunit/phpunit=5.7.*"
fi
fi
script:
- |
if [[ ! -z "$WP_VERSION" ]] ; then
phpunit
WP_MULTISITE=1 phpunit
fi
| sudo: false
language: php
notifications:
email: false
cache:
directories:
- vendor
- $HOME/.composer/cache
matrix:
include:
- php: 7.1
env: WP_VERSION=latest
- php: 7.0
env: WP_VERSION=latest
- php: 5.6
env: WP_VERSION=latest
- php: 7.0
env: WP_VERSION=trunk
before_script:
- export PATH="$HOME/.composer/vendor/bin:$PATH"
- |
if [[ ! -z "$WP_VERSION" ]] ; then
bash bin/install-wp-tests.sh wordpress_test root '' localhost $WP_VERSION
if [[ ${TRAVIS_PHP_VERSION:0:2} == "5." ]]; then
composer global require "phpunit/phpunit=4.8.*"
else
composer global require "phpunit/phpunit=5.7.*"
fi
fi
script:
- |
if [[ ! -z "$WP_VERSION" ]] ; then
phpunit
WP_MULTISITE=1 phpunit
fi
| Disable email notifications and remove branch restrictions on building with Travis CI | Disable email notifications and remove branch restrictions on building with Travis CI
| YAML | mit | liquidweb/sticky-tax,liquidweb/sticky-tax,liquidweb/sticky-tax | yaml | ## Code Before:
sudo: false
language: php
notifications:
email:
on_success: never
on_failure: change
branches:
only:
- master
cache:
directories:
- vendor
- $HOME/.composer/cache
matrix:
include:
- php: 7.1
env: WP_VERSION=latest
- php: 7.0
env: WP_VERSION=latest
- php: 5.6
env: WP_VERSION=latest
- php: 7.0
env: WP_VERSION=trunk
before_script:
- export PATH="$HOME/.composer/vendor/bin:$PATH"
- |
if [[ ! -z "$WP_VERSION" ]] ; then
bash bin/install-wp-tests.sh wordpress_test root '' localhost $WP_VERSION
if [[ ${TRAVIS_PHP_VERSION:0:2} == "5." ]]; then
composer global require "phpunit/phpunit=4.8.*"
else
composer global require "phpunit/phpunit=5.7.*"
fi
fi
script:
- |
if [[ ! -z "$WP_VERSION" ]] ; then
phpunit
WP_MULTISITE=1 phpunit
fi
## Instruction:
Disable email notifications and remove branch restrictions on building with Travis CI
## Code After:
sudo: false
language: php
notifications:
email: false
cache:
directories:
- vendor
- $HOME/.composer/cache
matrix:
include:
- php: 7.1
env: WP_VERSION=latest
- php: 7.0
env: WP_VERSION=latest
- php: 5.6
env: WP_VERSION=latest
- php: 7.0
env: WP_VERSION=trunk
before_script:
- export PATH="$HOME/.composer/vendor/bin:$PATH"
- |
if [[ ! -z "$WP_VERSION" ]] ; then
bash bin/install-wp-tests.sh wordpress_test root '' localhost $WP_VERSION
if [[ ${TRAVIS_PHP_VERSION:0:2} == "5." ]]; then
composer global require "phpunit/phpunit=4.8.*"
else
composer global require "phpunit/phpunit=5.7.*"
fi
fi
script:
- |
if [[ ! -z "$WP_VERSION" ]] ; then
phpunit
WP_MULTISITE=1 phpunit
fi
|
fe1ea6eaa85a3ea8f26ecebf72fec9209babfbc9 | app/controllers/api/base_controller.rb | app/controllers/api/base_controller.rb | module Api
class BaseController < ActionController::Base
Mumukit::Login.configure_controller! self
protect_from_forgery with: :null_session
include OnBaseOrganizationOnly
include WithApiErrors
include WithAuthorization
before_action :verify_api_client!
private
def verify_api_client!
ApiClient.verify_token! Mumukit::Auth::Token.extract_from_header(request.env['HTTP_AUTHORIZATION'])
end
end
end
| module Api
class BaseController < ActionController::Base
Mumukit::Login.configure_controller! self
protect_from_forgery with: :null_session
include WithApiErrors
include WithAuthorization
include Mumuki::Laboratory::Controllers::CurrentOrganization
before_action :set_current_organization!
include OnBaseOrganizationOnly
before_action :verify_api_client!
private
def verify_api_client!
ApiClient.verify_token! Mumukit::Auth::Token.extract_from_header(request.env['HTTP_AUTHORIZATION'])
end
end
end
| Select organization on base controller as well | Select organization on base controller as well
| Ruby | agpl-3.0 | mumuki/mumuki-laboratory,mumuki/mumuki-laboratory,mumuki/mumuki-laboratory,mumuki/mumuki-laboratory | ruby | ## Code Before:
module Api
class BaseController < ActionController::Base
Mumukit::Login.configure_controller! self
protect_from_forgery with: :null_session
include OnBaseOrganizationOnly
include WithApiErrors
include WithAuthorization
before_action :verify_api_client!
private
def verify_api_client!
ApiClient.verify_token! Mumukit::Auth::Token.extract_from_header(request.env['HTTP_AUTHORIZATION'])
end
end
end
## Instruction:
Select organization on base controller as well
## Code After:
module Api
class BaseController < ActionController::Base
Mumukit::Login.configure_controller! self
protect_from_forgery with: :null_session
include WithApiErrors
include WithAuthorization
include Mumuki::Laboratory::Controllers::CurrentOrganization
before_action :set_current_organization!
include OnBaseOrganizationOnly
before_action :verify_api_client!
private
def verify_api_client!
ApiClient.verify_token! Mumukit::Auth::Token.extract_from_header(request.env['HTTP_AUTHORIZATION'])
end
end
end
|
3c9602b1eae034dbc4fb617f22ebd776f944c05c | Rainy.UI/bower.json | Rainy.UI/bower.json | {
"name": "Rainy-UI",
"version": "0.0.1",
"devDependencies": {
"jquery": "~2.0.3",
"angular-strap": "~0.7.5",
"angular": "~1.0.7",
"bootstrap.zip": "http://getbootstrap.com/2.3.2/assets/bootstrap.zip",
"bootswatch-united": "http://netdna.bootstrapcdn.com/bootswatch/2.3.2/united/bootstrap.min.css",
"underscore": "~1.5.1",
"bootstrap-switch": "*"
},
"dependencies": {
"noty": "~2.1.0",
"wysihtml5": "~0.3.0",
"bootstrap-wysihtml5": "*"
}
}
| {
"name": "Rainy-UI",
"version": "0.0.1",
"devDependencies": {
"jquery": "~2.0.3",
"angular-strap": "~0.7.5",
"angular": "~1.0.7",
"bootstrap.zip": "http://getbootstrap.com/2.3.2/assets/bootstrap.zip",
"bootswatch-united": "http://netdna.bootstrapcdn.com/bootswatch/2.3.2/united/bootstrap.min.css",
"underscore": "~1.5.1",
"bootstrap-switch": "*"
},
"dependencies": {
"noty": "~2.1.0",
"wysihtml5": "~0.3.0",
"bootstrap-wysihtml5": "*"
},
"resolutions": {
"bootstrap": ">=2.0.4 <3.0"
}
}
| Fix version conflict in bowser.json | Fix version conflict in bowser.json
| JSON | agpl-3.0 | Dynalon/Rainy,Dynalon/Rainy,Dynalon/Rainy,Dynalon/Rainy | json | ## Code Before:
{
"name": "Rainy-UI",
"version": "0.0.1",
"devDependencies": {
"jquery": "~2.0.3",
"angular-strap": "~0.7.5",
"angular": "~1.0.7",
"bootstrap.zip": "http://getbootstrap.com/2.3.2/assets/bootstrap.zip",
"bootswatch-united": "http://netdna.bootstrapcdn.com/bootswatch/2.3.2/united/bootstrap.min.css",
"underscore": "~1.5.1",
"bootstrap-switch": "*"
},
"dependencies": {
"noty": "~2.1.0",
"wysihtml5": "~0.3.0",
"bootstrap-wysihtml5": "*"
}
}
## Instruction:
Fix version conflict in bowser.json
## Code After:
{
"name": "Rainy-UI",
"version": "0.0.1",
"devDependencies": {
"jquery": "~2.0.3",
"angular-strap": "~0.7.5",
"angular": "~1.0.7",
"bootstrap.zip": "http://getbootstrap.com/2.3.2/assets/bootstrap.zip",
"bootswatch-united": "http://netdna.bootstrapcdn.com/bootswatch/2.3.2/united/bootstrap.min.css",
"underscore": "~1.5.1",
"bootstrap-switch": "*"
},
"dependencies": {
"noty": "~2.1.0",
"wysihtml5": "~0.3.0",
"bootstrap-wysihtml5": "*"
},
"resolutions": {
"bootstrap": ">=2.0.4 <3.0"
}
}
|
3911e6df8dff68ebe8031cbcb6ba544deba68461 | spec/higher_level_api/integration/basic_cancel_spec.rb | spec/higher_level_api/integration/basic_cancel_spec.rb | require "spec_helper"
describe 'A consumer' do
let(:connection) { HotBunnies.connect }
after :each do
connection.close
end
it 'receives messages until cancelled' do
x = connection.create_channel.default_exchange
q = connection.create_channel.queue("", :auto_delete => true)
messages = []
consumer_exited = false
consumer = nil
consumer_thread = Thread.new do
consumer = q.subscribe do |headers, message|
messages << message
sleep 0.1
end
consumer_exited = true
end
publisher_thread = Thread.new do
20.times do
x.publish('hello world', :routing_key => q.name)
end
end
sleep 0.2
consumer.cancel
consumer_thread.join
publisher_thread.join
messages.should_not be_empty
consumer_exited.should be_true
end
end
| require "spec_helper"
describe 'A consumer' do
let(:connection) { HotBunnies.connect }
after :each do
connection.close
end
context "that does not block the caller" do
it 'receives messages until cancelled' do
x = connection.create_channel.default_exchange
q = connection.create_channel.queue("", :auto_delete => true)
messages = []
consumer_exited = false
consumer = nil
consumer_thread = Thread.new do
consumer = q.subscribe do |headers, message|
messages << message
sleep 0.1
end
consumer_exited = true
end
publisher_thread = Thread.new do
20.times do
x.publish('hello world', :routing_key => q.name)
end
end
sleep 0.2
consumer.cancel
consumer_thread.join
publisher_thread.join
messages.should_not be_empty
consumer_exited.should be_true
end
end
context "that DOES block the caller" do
it 'receives messages until cancelled' do
x = connection.create_channel.default_exchange
q = connection.create_channel.queue("", :auto_delete => true)
messages = []
consumer_exited = false
consumer = nil
consumer_thread = Thread.new do
co = q.build_consumer(:block => true) do |headers, message|
messages << message
sleep 0.1
end
consumer = co
q.subscribe_with(co, :block => true)
consumer_exited = true
end
publisher_thread = Thread.new do
20.times do
x.publish('hello world', :routing_key => q.name)
end
end
sleep 0.2
consumer.cancel
consumer_thread.join
publisher_thread.join
messages.should_not be_empty
consumer_exited.should be_true
end
end
end
| Add a test that cancels a blocking consumer | Add a test that cancels a blocking consumer
| Ruby | mit | ruby-amqp/march_hare,ruby-amqp/march_hare,noahhaon/march_hare,noahhaon/march_hare | ruby | ## Code Before:
require "spec_helper"
describe 'A consumer' do
let(:connection) { HotBunnies.connect }
after :each do
connection.close
end
it 'receives messages until cancelled' do
x = connection.create_channel.default_exchange
q = connection.create_channel.queue("", :auto_delete => true)
messages = []
consumer_exited = false
consumer = nil
consumer_thread = Thread.new do
consumer = q.subscribe do |headers, message|
messages << message
sleep 0.1
end
consumer_exited = true
end
publisher_thread = Thread.new do
20.times do
x.publish('hello world', :routing_key => q.name)
end
end
sleep 0.2
consumer.cancel
consumer_thread.join
publisher_thread.join
messages.should_not be_empty
consumer_exited.should be_true
end
end
## Instruction:
Add a test that cancels a blocking consumer
## Code After:
require "spec_helper"
describe 'A consumer' do
let(:connection) { HotBunnies.connect }
after :each do
connection.close
end
context "that does not block the caller" do
it 'receives messages until cancelled' do
x = connection.create_channel.default_exchange
q = connection.create_channel.queue("", :auto_delete => true)
messages = []
consumer_exited = false
consumer = nil
consumer_thread = Thread.new do
consumer = q.subscribe do |headers, message|
messages << message
sleep 0.1
end
consumer_exited = true
end
publisher_thread = Thread.new do
20.times do
x.publish('hello world', :routing_key => q.name)
end
end
sleep 0.2
consumer.cancel
consumer_thread.join
publisher_thread.join
messages.should_not be_empty
consumer_exited.should be_true
end
end
context "that DOES block the caller" do
it 'receives messages until cancelled' do
x = connection.create_channel.default_exchange
q = connection.create_channel.queue("", :auto_delete => true)
messages = []
consumer_exited = false
consumer = nil
consumer_thread = Thread.new do
co = q.build_consumer(:block => true) do |headers, message|
messages << message
sleep 0.1
end
consumer = co
q.subscribe_with(co, :block => true)
consumer_exited = true
end
publisher_thread = Thread.new do
20.times do
x.publish('hello world', :routing_key => q.name)
end
end
sleep 0.2
consumer.cancel
consumer_thread.join
publisher_thread.join
messages.should_not be_empty
consumer_exited.should be_true
end
end
end
|
966bf159c612692d059602153177d68676f29398 | src/script/lex_script_tool.rs | src/script/lex_script_tool.rs | use serde_json::*;
use super::super::lex::*;
///
/// Creates a lexing tool for the scripting language
///
pub fn create_lex_script_tool() -> StringLexingTool {
// Parse the lexer
let script_json = from_str::<Vec<LexToolSymbol>>(include_str!("syntax_lexer.json")).unwrap();
// The name isn't used here, but define it anyway
let lex_defn = LexToolInput {
new_tool_name: String::from("lex-script"),
symbols: script_json
};
// Create the lexing tool with this definition
StringLexingTool::from_lex_tool_input(&lex_defn)
}
| use serde_json::*;
use super::super::lex::*;
///
/// Creates a lexing tool for the scripting language
///
pub fn create_lex_script_tool() -> StringLexingTool {
// Parse the lexer
let script_json = from_str::<Vec<LexToolSymbol>>(include_str!("syntax_lexer.json")).unwrap();
// The name isn't used here, but define it anyway
let lex_defn = LexToolInput {
new_tool_name: String::from("lex-script"),
symbols: script_json
};
// Create the lexing tool with this definition
StringLexingTool::from_lex_tool_input(&lex_defn)
}
#[cfg(test)]
mod test {
use std::error::Error;
use super::*;
#[test]
fn can_parse_syntax_json() {
let script_json = from_str::<Value>(include_str!("syntax_lexer.json"));
if script_json.is_err() {
println!("{:?}", script_json);
println!("{:?}", script_json.unwrap_err().description());
assert!(false);
}
}
#[test]
fn json_can_be_deserialized() {
let script_json = from_str::<Vec<LexToolSymbol>>(include_str!("syntax_lexer.json"));
if script_json.is_err() {
println!("{:?}", script_json);
}
script_json.unwrap();
}
#[test]
fn can_create_tool() {
let _tool = create_lex_script_tool();
}
} | Add some tests to make sure we can parse the syntax JSON file | Add some tests to make sure we can parse the syntax JSON file
| Rust | apache-2.0 | Logicalshift/gossyp,Logicalshift/gossyp | rust | ## Code Before:
use serde_json::*;
use super::super::lex::*;
///
/// Creates a lexing tool for the scripting language
///
pub fn create_lex_script_tool() -> StringLexingTool {
// Parse the lexer
let script_json = from_str::<Vec<LexToolSymbol>>(include_str!("syntax_lexer.json")).unwrap();
// The name isn't used here, but define it anyway
let lex_defn = LexToolInput {
new_tool_name: String::from("lex-script"),
symbols: script_json
};
// Create the lexing tool with this definition
StringLexingTool::from_lex_tool_input(&lex_defn)
}
## Instruction:
Add some tests to make sure we can parse the syntax JSON file
## Code After:
use serde_json::*;
use super::super::lex::*;
///
/// Creates a lexing tool for the scripting language
///
pub fn create_lex_script_tool() -> StringLexingTool {
// Parse the lexer
let script_json = from_str::<Vec<LexToolSymbol>>(include_str!("syntax_lexer.json")).unwrap();
// The name isn't used here, but define it anyway
let lex_defn = LexToolInput {
new_tool_name: String::from("lex-script"),
symbols: script_json
};
// Create the lexing tool with this definition
StringLexingTool::from_lex_tool_input(&lex_defn)
}
#[cfg(test)]
mod test {
use std::error::Error;
use super::*;
#[test]
fn can_parse_syntax_json() {
let script_json = from_str::<Value>(include_str!("syntax_lexer.json"));
if script_json.is_err() {
println!("{:?}", script_json);
println!("{:?}", script_json.unwrap_err().description());
assert!(false);
}
}
#[test]
fn json_can_be_deserialized() {
let script_json = from_str::<Vec<LexToolSymbol>>(include_str!("syntax_lexer.json"));
if script_json.is_err() {
println!("{:?}", script_json);
}
script_json.unwrap();
}
#[test]
fn can_create_tool() {
let _tool = create_lex_script_tool();
}
} |
f4b6492ffd159416dc44a17966b7e340a4bc190d | helm/nats-operator/config/client-auth.json | helm/nats-operator/config/client-auth.json | {
"users": [
{{- if and (.Values.auth.username) (not .Values.auth.users) }}
{
"username": "{{ .Values.auth.username }}",
"password": "{{ .Values.auth.password }}"
}
{{- end }}
{{- if .Values.auth.users }}
{{ $length := len .Values.auth.users }}
{{- range $index, $user := .Values.auth.users }}
{
"username": "{{ $user.username }}",
"password": "{{ $user.password }}"
{{- if $user.permissions }},
"permissions": {{ toJson $user.permissions | replace "\\u003e" ">"}}
{{- end}}
}{{- if lt (add1 $index) $length }},{{ end }}
{{- end}}
{{- end }}
]{{- if .Values.auth.defaultPermissions }},
"default_permissions": {{ toJson .Values.auth.defaultPermissions | replace "\\u003e" ">" }}
{{- end}}
}
| {
"users": [
{{- if and (.Values.cluster.auth.username) (not .Values.cluster.auth.users) }}
{
"username": "{{ .Values.cluster.auth.username }}",
"password": "{{ .Values.cluster.auth.password }}"
}
{{- end }}
{{- if .Values.cluster.auth.users }}
{{ $length := len .Values.cluster.auth.users }}
{{- range $index, $user := .Values.cluster.auth.users }}
{
"username": "{{ $user.username }}",
"password": "{{ $user.password }}"
{{- if $user.permissions }},
"permissions": {{ toJson $user.permissions | replace "\\u003e" ">"}}
{{- end}}
}{{- if lt (add1 $index) $length }},{{ end }}
{{- end}}
{{- end }}
]{{- if .Values.cluster.auth.defaultPermissions }},
"default_permissions": {{ toJson .Values.cluster.auth.defaultPermissions | replace "\\u003e" ">" }}
{{- end}}
}
| Fix incorrect cluster auth values in config.json | Fix incorrect cluster auth values in config.json
| JSON | apache-2.0 | pires/nats-operator,nats-io/nats-operator,nats-io/nats-operator | json | ## Code Before:
{
"users": [
{{- if and (.Values.auth.username) (not .Values.auth.users) }}
{
"username": "{{ .Values.auth.username }}",
"password": "{{ .Values.auth.password }}"
}
{{- end }}
{{- if .Values.auth.users }}
{{ $length := len .Values.auth.users }}
{{- range $index, $user := .Values.auth.users }}
{
"username": "{{ $user.username }}",
"password": "{{ $user.password }}"
{{- if $user.permissions }},
"permissions": {{ toJson $user.permissions | replace "\\u003e" ">"}}
{{- end}}
}{{- if lt (add1 $index) $length }},{{ end }}
{{- end}}
{{- end }}
]{{- if .Values.auth.defaultPermissions }},
"default_permissions": {{ toJson .Values.auth.defaultPermissions | replace "\\u003e" ">" }}
{{- end}}
}
## Instruction:
Fix incorrect cluster auth values in config.json
## Code After:
{
"users": [
{{- if and (.Values.cluster.auth.username) (not .Values.cluster.auth.users) }}
{
"username": "{{ .Values.cluster.auth.username }}",
"password": "{{ .Values.cluster.auth.password }}"
}
{{- end }}
{{- if .Values.cluster.auth.users }}
{{ $length := len .Values.cluster.auth.users }}
{{- range $index, $user := .Values.cluster.auth.users }}
{
"username": "{{ $user.username }}",
"password": "{{ $user.password }}"
{{- if $user.permissions }},
"permissions": {{ toJson $user.permissions | replace "\\u003e" ">"}}
{{- end}}
}{{- if lt (add1 $index) $length }},{{ end }}
{{- end}}
{{- end }}
]{{- if .Values.cluster.auth.defaultPermissions }},
"default_permissions": {{ toJson .Values.cluster.auth.defaultPermissions | replace "\\u003e" ">" }}
{{- end}}
}
|
7eae7bf22e3ddbfeae58710445e125caf1c56228 | src/main/MediaCheck.java | src/main/MediaCheck.java | public class MediaCheck {
}
| import java.awt.Image;
import java.net.URL;
public class MediaCheck {
public static void media(URL url) {
Image image = null;
}
}
| Create a method which contains URL as argument and create an Image variable | Create a method which contains URL as argument and create an Image variable
| Java | apache-2.0 | milouk/Web_Crawler | java | ## Code Before:
public class MediaCheck {
}
## Instruction:
Create a method which contains URL as argument and create an Image variable
## Code After:
import java.awt.Image;
import java.net.URL;
public class MediaCheck {
public static void media(URL url) {
Image image = null;
}
}
|
adcda7d5cbd7d89f67e9c396fb73d118d42c9fc5 | crates/libcruby-sys/Cargo.toml | crates/libcruby-sys/Cargo.toml | [package]
name = "libcruby-sys"
version = "0.5.0-alpha-1"
authors = ["Godhuda <[email protected]>"]
build = "build.rs"
include = [
"**/*.rs",
"Cargo.toml",
"ruby_info.rb",
# Keep in sync with version
"helix-runtime-0-5-0-alpha-1.i386.lib",
"helix-runtime-0-5-0-alpha-1.x86_64.lib"
]
[dependencies]
libc = "*"
| [package]
name = "libcruby-sys"
version = "0.5.0-alpha-1"
authors = ["Godhuda <[email protected]>"]
description = "Ruby bindings"
license = "MIT/Apache-2.0"
build = "build.rs"
include = [
"**/*.rs",
"Cargo.toml",
"ruby_info.rb",
# Keep in sync with version
"helix-runtime-0-5-0-alpha-1.i386.lib",
"helix-runtime-0-5-0-alpha-1.x86_64.lib"
]
[dependencies]
libc = "*"
| Add description and license for libcruby-sys | Add description and license for libcruby-sys
| TOML | isc | tildeio/turboruby,tildeio/turboruby,tildeio/helix,tildeio/helix,tildeio/turboruby,tildeio/helix,tildeio/turboruby,tildeio/helix | toml | ## Code Before:
[package]
name = "libcruby-sys"
version = "0.5.0-alpha-1"
authors = ["Godhuda <[email protected]>"]
build = "build.rs"
include = [
"**/*.rs",
"Cargo.toml",
"ruby_info.rb",
# Keep in sync with version
"helix-runtime-0-5-0-alpha-1.i386.lib",
"helix-runtime-0-5-0-alpha-1.x86_64.lib"
]
[dependencies]
libc = "*"
## Instruction:
Add description and license for libcruby-sys
## Code After:
[package]
name = "libcruby-sys"
version = "0.5.0-alpha-1"
authors = ["Godhuda <[email protected]>"]
description = "Ruby bindings"
license = "MIT/Apache-2.0"
build = "build.rs"
include = [
"**/*.rs",
"Cargo.toml",
"ruby_info.rb",
# Keep in sync with version
"helix-runtime-0-5-0-alpha-1.i386.lib",
"helix-runtime-0-5-0-alpha-1.x86_64.lib"
]
[dependencies]
libc = "*"
|
1b6ec4e760cfc7903dc222e7a3bc52001e398354 | examples/sprite/README.md | examples/sprite/README.md |

## Description
This example displays how to construct a basic audio player using howler.js as the engine powering it. The only outside framework being used is [SiriWave.js](https://github.com/CaffeinaLab/SiriWaveJS) to provide some visual sugar.
This example visualizes how audio sprites work and demonstrates how they can be used within howler.js. Click on a sprite on the waveform to play it back and watch the progress (multiple clicks will overlap sounds).
* Audio sprite support with single audio file.
* Displays playback progress of sprites in real-time.
* Allows playback of multiple sprites on top of each other.
* Full-screen and responsive for desktop/mobile.
## How-To
1. Clone the git repo or download the source:
* **Clone:** `git clone https://github.com/goldfire/howler.js.git`
* **Download:** [https://github.com/goldfire/howler.js/archive/master.zip](https://github.com/goldfire/howler.js/archive/master.zip)
2. Open `index.html` in your favorite browser. |

## Description
This example visualizes how audio sprites work and demonstrates how they can be used within howler.js. Click on a sprite on the waveform to play it back and watch the progress (multiple clicks will overlap sounds).
* Audio sprite support with single audio file.
* Displays playback progress of sprites in real-time.
* Allows playback of multiple sprites on top of each other.
* Full-screen and responsive for desktop/mobile.
## How-To
1. Clone the git repo or download the source:
* **Clone:** `git clone https://github.com/goldfire/howler.js.git`
* **Download:** [https://github.com/goldfire/howler.js/archive/master.zip](https://github.com/goldfire/howler.js/archive/master.zip)
2. Open `index.html` in your favorite browser. | Fix description on audio sprite demo | Fix description on audio sprite demo
| Markdown | mit | goldfire/howler.js,goldfire/howler.js | markdown | ## Code Before:

## Description
This example displays how to construct a basic audio player using howler.js as the engine powering it. The only outside framework being used is [SiriWave.js](https://github.com/CaffeinaLab/SiriWaveJS) to provide some visual sugar.
This example visualizes how audio sprites work and demonstrates how they can be used within howler.js. Click on a sprite on the waveform to play it back and watch the progress (multiple clicks will overlap sounds).
* Audio sprite support with single audio file.
* Displays playback progress of sprites in real-time.
* Allows playback of multiple sprites on top of each other.
* Full-screen and responsive for desktop/mobile.
## How-To
1. Clone the git repo or download the source:
* **Clone:** `git clone https://github.com/goldfire/howler.js.git`
* **Download:** [https://github.com/goldfire/howler.js/archive/master.zip](https://github.com/goldfire/howler.js/archive/master.zip)
2. Open `index.html` in your favorite browser.
## Instruction:
Fix description on audio sprite demo
## Code After:

## Description
This example visualizes how audio sprites work and demonstrates how they can be used within howler.js. Click on a sprite on the waveform to play it back and watch the progress (multiple clicks will overlap sounds).
* Audio sprite support with single audio file.
* Displays playback progress of sprites in real-time.
* Allows playback of multiple sprites on top of each other.
* Full-screen and responsive for desktop/mobile.
## How-To
1. Clone the git repo or download the source:
* **Clone:** `git clone https://github.com/goldfire/howler.js.git`
* **Download:** [https://github.com/goldfire/howler.js/archive/master.zip](https://github.com/goldfire/howler.js/archive/master.zip)
2. Open `index.html` in your favorite browser. |
3478e524eea0f963a3e30122c07bb691e1a62269 | README.md | README.md | An experimental Node-based test262 harness. Once this harness has stabilized, I plan to push to include it by default in official test262.
## Quick Start
1. `git clone https://github.com/tc39/test262.git --depth 1`
2. `npm install -g test262-harness`
3. `test262-harness test262\test\**\*.js --hostType node --hostPath <path to node.exe>`
Run `test262-harness --help` for details on the various configuration options.
## Options
| Name | Action |
|------------|---------------|
| --hostType | Type of host to run tests in. See [eshost's supported hosts](https://github.com/bterlson/eshost#supported-hosts) for available options.
| --hostPath | Path to the host executable.
| --hostArgs | Any additional arguments to pass to the host when invoking it (eg. --harmony, --es6all, etc).
| -t, --threads | Run this many tests in parallel. Note that the browser runners don't work great with t > 1.
| -r, --reporter | Selects test case result format. Currently either `json` or `simple`. Default `simple`.
|--test262Dir | Optional. Root test262 directory and is used to locate the includes directory.
|--includesDir | Includes directory. By default inferred from test262Dir or else detected by walking upward from the first test found.
| An experimental Node-based test262 harness. Once this harness has stabilized, I plan to push to include it by default in official test262.
Requires Node 6 or above.
## Quick Start
1. `git clone https://github.com/tc39/test262.git --depth 1`
2. `npm install -g test262-harness`
3. `test262-harness test262\test\**\*.js --hostType node --hostPath <path to node.exe>`
Run `test262-harness --help` for details on the various configuration options.
## Options
| Name | Action |
|------------|---------------|
| --hostType | Type of host to run tests in. See [eshost's supported hosts](https://github.com/bterlson/eshost#supported-hosts) for available options.
| --hostPath | Path to the host executable.
| --hostArgs | Any additional arguments to pass to the host when invoking it (eg. --harmony, --es6all, etc).
| -t, --threads | Run this many tests in parallel. Note that the browser runners don't work great with t > 1.
| -r, --reporter | Selects test case result format. Currently either `json` or `simple`. Default `simple`.
|--test262Dir | Optional. Root test262 directory and is used to locate the includes directory.
|--includesDir | Includes directory. By default inferred from test262Dir or else detected by walking upward from the first test found.
| Add readme note about Node 6 requirement | Add readme note about Node 6 requirement | Markdown | bsd-3-clause | bterlson/test262-harness | markdown | ## Code Before:
An experimental Node-based test262 harness. Once this harness has stabilized, I plan to push to include it by default in official test262.
## Quick Start
1. `git clone https://github.com/tc39/test262.git --depth 1`
2. `npm install -g test262-harness`
3. `test262-harness test262\test\**\*.js --hostType node --hostPath <path to node.exe>`
Run `test262-harness --help` for details on the various configuration options.
## Options
| Name | Action |
|------------|---------------|
| --hostType | Type of host to run tests in. See [eshost's supported hosts](https://github.com/bterlson/eshost#supported-hosts) for available options.
| --hostPath | Path to the host executable.
| --hostArgs | Any additional arguments to pass to the host when invoking it (eg. --harmony, --es6all, etc).
| -t, --threads | Run this many tests in parallel. Note that the browser runners don't work great with t > 1.
| -r, --reporter | Selects test case result format. Currently either `json` or `simple`. Default `simple`.
|--test262Dir | Optional. Root test262 directory and is used to locate the includes directory.
|--includesDir | Includes directory. By default inferred from test262Dir or else detected by walking upward from the first test found.
## Instruction:
Add readme note about Node 6 requirement
## Code After:
An experimental Node-based test262 harness. Once this harness has stabilized, I plan to push to include it by default in official test262.
Requires Node 6 or above.
## Quick Start
1. `git clone https://github.com/tc39/test262.git --depth 1`
2. `npm install -g test262-harness`
3. `test262-harness test262\test\**\*.js --hostType node --hostPath <path to node.exe>`
Run `test262-harness --help` for details on the various configuration options.
## Options
| Name | Action |
|------------|---------------|
| --hostType | Type of host to run tests in. See [eshost's supported hosts](https://github.com/bterlson/eshost#supported-hosts) for available options.
| --hostPath | Path to the host executable.
| --hostArgs | Any additional arguments to pass to the host when invoking it (eg. --harmony, --es6all, etc).
| -t, --threads | Run this many tests in parallel. Note that the browser runners don't work great with t > 1.
| -r, --reporter | Selects test case result format. Currently either `json` or `simple`. Default `simple`.
|--test262Dir | Optional. Root test262 directory and is used to locate the includes directory.
|--includesDir | Includes directory. By default inferred from test262Dir or else detected by walking upward from the first test found.
|
c9a11a6f307a5a994806aa82be3a609496c30930 | packages/hm/hmatrix-nlopt.yaml | packages/hm/hmatrix-nlopt.yaml | homepage: https://github.com/peddie/hmatrix-nlopt
changelog-type: markdown
hash: 99f8435087901aa0565e220db29081c0c7f613a18cb21f14f5bd416ba81f3646
test-bench-deps:
base: ! '>=4 && <5'
doctest: ! '>=0.11'
maintainer: [email protected]
synopsis: Interface HMatrix with the NLOPT minimizer
changelog: ! '# Revision history for hmatrix-nlopt
## 0.1.0.0 -- 2017-03-27
* First version. Examples work.
'
basic-deps:
base: ! '>=4.9 && <4.10'
nlopt-haskell: ! '>=0.1.0.0'
hmatrix: ! '>=0.17'
vector: ! '>=0.10'
all-versions:
- '0.1.0.0'
author: Matthew Peddie
latest: '0.1.0.0'
description-type: haddock
description: ! 'A high-level interface to
<http://ab-initio.mit.edu/wiki/index.php/NLopt the NLOPT solvers>
using @hmatrix@ vectors and matrices.'
license-name: BSD3
| homepage: https://github.com/peddie/hmatrix-nlopt
changelog-type: markdown
hash: febca7b14eff2870ae1aa428497f1a050a6362ca40c7cd7d9217b05753d3893a
test-bench-deps:
base: ! '>=4 && <5'
doctest: ! '>=0.11'
maintainer: [email protected]
synopsis: Interface HMatrix with the NLOPT minimizer
changelog: ! '# Revision history for hmatrix-nlopt
## 0.1.0.0 -- 2017-03-27
* First version. Examples work.
'
basic-deps:
base: ! '>=4.9 && <4.11'
nlopt-haskell: ! '>=0.1.1.0'
hmatrix: ! '>=0.17'
vector: ! '>=0.10'
all-versions:
- '0.1.0.0'
- '0.1.1.0'
author: Matthew Peddie
latest: '0.1.1.0'
description-type: haddock
description: ! 'A high-level interface to
<http://ab-initio.mit.edu/wiki/index.php/NLopt the NLOPT solvers>
using @hmatrix@ vectors and matrices.'
license-name: BSD3
| Update from Hackage at 2018-01-21T06:57:46Z | Update from Hackage at 2018-01-21T06:57:46Z
| YAML | mit | commercialhaskell/all-cabal-metadata | yaml | ## Code Before:
homepage: https://github.com/peddie/hmatrix-nlopt
changelog-type: markdown
hash: 99f8435087901aa0565e220db29081c0c7f613a18cb21f14f5bd416ba81f3646
test-bench-deps:
base: ! '>=4 && <5'
doctest: ! '>=0.11'
maintainer: [email protected]
synopsis: Interface HMatrix with the NLOPT minimizer
changelog: ! '# Revision history for hmatrix-nlopt
## 0.1.0.0 -- 2017-03-27
* First version. Examples work.
'
basic-deps:
base: ! '>=4.9 && <4.10'
nlopt-haskell: ! '>=0.1.0.0'
hmatrix: ! '>=0.17'
vector: ! '>=0.10'
all-versions:
- '0.1.0.0'
author: Matthew Peddie
latest: '0.1.0.0'
description-type: haddock
description: ! 'A high-level interface to
<http://ab-initio.mit.edu/wiki/index.php/NLopt the NLOPT solvers>
using @hmatrix@ vectors and matrices.'
license-name: BSD3
## Instruction:
Update from Hackage at 2018-01-21T06:57:46Z
## Code After:
homepage: https://github.com/peddie/hmatrix-nlopt
changelog-type: markdown
hash: febca7b14eff2870ae1aa428497f1a050a6362ca40c7cd7d9217b05753d3893a
test-bench-deps:
base: ! '>=4 && <5'
doctest: ! '>=0.11'
maintainer: [email protected]
synopsis: Interface HMatrix with the NLOPT minimizer
changelog: ! '# Revision history for hmatrix-nlopt
## 0.1.0.0 -- 2017-03-27
* First version. Examples work.
'
basic-deps:
base: ! '>=4.9 && <4.11'
nlopt-haskell: ! '>=0.1.1.0'
hmatrix: ! '>=0.17'
vector: ! '>=0.10'
all-versions:
- '0.1.0.0'
- '0.1.1.0'
author: Matthew Peddie
latest: '0.1.1.0'
description-type: haddock
description: ! 'A high-level interface to
<http://ab-initio.mit.edu/wiki/index.php/NLopt the NLOPT solvers>
using @hmatrix@ vectors and matrices.'
license-name: BSD3
|
cf10724e09c0a5cb58b516ae5d36ea3bf29bbe38 | gulpfile.js | gulpfile.js | var gulp = require('gulp')
var exec = require('child_process').exec
gulp.task('make', function() {
exec('make', function(err, stdout, stderr) {
console.log(stdout);
console.log(stderr);
});
})
gulp.task('watch', function () {
gulp.watch(['docs/*'], ['make'])
})
| var gulp = require('gulp')
exec = require('child_process').exec
livereload = require('gulp-livereload');
function update() {
exec('make', function(err, stdout, stderr) {
console.log(stdout);
console.log(stderr);
livereload.changed();
});
}
gulp.task('watch', function () {
livereload.listen();
gulp.watch(['docs/*']).on('change', update);
})
| Add livereload to gulp setup | Add livereload to gulp setup
| JavaScript | mit | brentshields/authoring | javascript | ## Code Before:
var gulp = require('gulp')
var exec = require('child_process').exec
gulp.task('make', function() {
exec('make', function(err, stdout, stderr) {
console.log(stdout);
console.log(stderr);
});
})
gulp.task('watch', function () {
gulp.watch(['docs/*'], ['make'])
})
## Instruction:
Add livereload to gulp setup
## Code After:
var gulp = require('gulp')
exec = require('child_process').exec
livereload = require('gulp-livereload');
function update() {
exec('make', function(err, stdout, stderr) {
console.log(stdout);
console.log(stderr);
livereload.changed();
});
}
gulp.task('watch', function () {
livereload.listen();
gulp.watch(['docs/*']).on('change', update);
})
|
0d401bd3c76b31326a20944776132fa87ffd28d8 | requirements.txt | requirements.txt | beautifulsoup4==4.4.1
CacheControl==0.11.6
click==6.6
emoji==0.3.9
google-api-python-client==1.5.1
httplib2==0.9.2
imdbpie==4.0.2
lockfile==0.12.2
oauth2client==2.2.0
pyasn1==0.1.9
pyasn1-modules==0.0.8
pylast==1.6.0
pyTelegramBotAPI==2.1.5
requests==2.10.0
rsa==3.4.2
simplejson==3.8.2
six==1.10.0
tinydb==3.2.1
uptime==3.0.1
uritemplate==0.6
| beautifulsoup4==4.4.1
CacheControl==0.11.6
click==6.6
emoji==0.3.9
google-api-python-client==1.5.1
httplib2==0.9.2
imdbpie==4.0.2
lockfile==0.12.2
oauth2client==2.2.0
pyasn1==0.1.9
pyasn1-modules==0.0.8
pylast==1.6.0
pyTelegramBotAPI==2.1.5
requests==2.10.0
rsa==3.4.2
simplejson==3.8.2
six==1.10.0
tinydb==3.2.1
ujson==1.35
uptime==3.0.1
uritemplate==0.6
| Use ujson to speed up tinydb | Use ujson to speed up tinydb
| Text | agpl-3.0 | TheReverend403/Pyper,TheReverend403/Pyper | text | ## Code Before:
beautifulsoup4==4.4.1
CacheControl==0.11.6
click==6.6
emoji==0.3.9
google-api-python-client==1.5.1
httplib2==0.9.2
imdbpie==4.0.2
lockfile==0.12.2
oauth2client==2.2.0
pyasn1==0.1.9
pyasn1-modules==0.0.8
pylast==1.6.0
pyTelegramBotAPI==2.1.5
requests==2.10.0
rsa==3.4.2
simplejson==3.8.2
six==1.10.0
tinydb==3.2.1
uptime==3.0.1
uritemplate==0.6
## Instruction:
Use ujson to speed up tinydb
## Code After:
beautifulsoup4==4.4.1
CacheControl==0.11.6
click==6.6
emoji==0.3.9
google-api-python-client==1.5.1
httplib2==0.9.2
imdbpie==4.0.2
lockfile==0.12.2
oauth2client==2.2.0
pyasn1==0.1.9
pyasn1-modules==0.0.8
pylast==1.6.0
pyTelegramBotAPI==2.1.5
requests==2.10.0
rsa==3.4.2
simplejson==3.8.2
six==1.10.0
tinydb==3.2.1
ujson==1.35
uptime==3.0.1
uritemplate==0.6
|
893f008addf91e72b5f27ac428a660d1fef5e09f | src/main/com/roly/nfc/crypto/view/nfc/KeyPickerDialogFragment.java | src/main/com/roly/nfc/crypto/view/nfc/KeyPickerDialogFragment.java | package com.roly.nfc.crypto.view.nfc;
import android.app.AlertDialog;
import android.app.Dialog;
import android.content.DialogInterface;
import android.os.Bundle;
import android.support.v4.app.DialogFragment;
import android.view.LayoutInflater;
import android.view.View;
import com.roly.nfc.crypto.R;
public class KeyPickerDialogFragment extends DialogFragment {
public static final int KEY_NOT_RETRIEVED=-1;
public static final int KEY_RETRIEVED=1;
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
AlertDialog.Builder builder = new AlertDialog.Builder(getActivity());
LayoutInflater inflater = getActivity().getLayoutInflater();
View view = inflater.inflate(R.layout.tag_handler, null);
builder.setView(view)
.setTitle("New key")
.setNegativeButton(android.R.string.cancel, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
getDialog().cancel();
}
});
return builder.create();
}
@Override
public void onStart() {
super.onStart();
getDialog().getWindow().setWindowAnimations(R.style.dialog_animation);
}
}
| package com.roly.nfc.crypto.view.nfc;
import android.app.AlertDialog;
import android.app.Dialog;
import android.content.DialogInterface;
import android.os.Bundle;
import android.support.v4.app.DialogFragment;
import android.view.LayoutInflater;
import android.view.View;
import com.roly.nfc.crypto.R;
public class KeyPickerDialogFragment extends DialogFragment {
public static final int KEY_NOT_RETRIEVED=-1;
public static final int KEY_RETRIEVED=1;
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
AlertDialog.Builder builder = new AlertDialog.Builder(getActivity());
LayoutInflater inflater = getActivity().getLayoutInflater();
View view = inflater.inflate(R.layout.tag_handler, null);
builder.setView(view)
.setNegativeButton(android.R.string.cancel, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
getDialog().cancel();
}
});
return builder.create();
}
@Override
public void onStart() {
super.onStart();
getDialog().getWindow().setWindowAnimations(R.style.dialog_animation);
}
}
| Remove title in Key Picker dialog | Remove title in Key Picker dialog
| Java | apache-2.0 | OlivierGonthier/CryptoNFC | java | ## Code Before:
package com.roly.nfc.crypto.view.nfc;
import android.app.AlertDialog;
import android.app.Dialog;
import android.content.DialogInterface;
import android.os.Bundle;
import android.support.v4.app.DialogFragment;
import android.view.LayoutInflater;
import android.view.View;
import com.roly.nfc.crypto.R;
public class KeyPickerDialogFragment extends DialogFragment {
public static final int KEY_NOT_RETRIEVED=-1;
public static final int KEY_RETRIEVED=1;
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
AlertDialog.Builder builder = new AlertDialog.Builder(getActivity());
LayoutInflater inflater = getActivity().getLayoutInflater();
View view = inflater.inflate(R.layout.tag_handler, null);
builder.setView(view)
.setTitle("New key")
.setNegativeButton(android.R.string.cancel, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
getDialog().cancel();
}
});
return builder.create();
}
@Override
public void onStart() {
super.onStart();
getDialog().getWindow().setWindowAnimations(R.style.dialog_animation);
}
}
## Instruction:
Remove title in Key Picker dialog
## Code After:
package com.roly.nfc.crypto.view.nfc;
import android.app.AlertDialog;
import android.app.Dialog;
import android.content.DialogInterface;
import android.os.Bundle;
import android.support.v4.app.DialogFragment;
import android.view.LayoutInflater;
import android.view.View;
import com.roly.nfc.crypto.R;
public class KeyPickerDialogFragment extends DialogFragment {
public static final int KEY_NOT_RETRIEVED=-1;
public static final int KEY_RETRIEVED=1;
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
AlertDialog.Builder builder = new AlertDialog.Builder(getActivity());
LayoutInflater inflater = getActivity().getLayoutInflater();
View view = inflater.inflate(R.layout.tag_handler, null);
builder.setView(view)
.setNegativeButton(android.R.string.cancel, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
getDialog().cancel();
}
});
return builder.create();
}
@Override
public void onStart() {
super.onStart();
getDialog().getWindow().setWindowAnimations(R.style.dialog_animation);
}
}
|
32ddd25507dc34971b958e926ba0bde704dc6bfa | conf_site/templates/cms/venue_page.html | conf_site/templates/cms/venue_page.html | {% extends "base.html" %}
{% load wagtailcore_tags wagtailimages_tags %}
{% block body %}
<div class="container sec1-inner-page">
{% if page.background_image %}
<div id="sli" class="text-center">
{% image page.background_image original %}
</div>
{% endif %}
<section>
<div class="container" id="sec1">
{% include_block page.venue_info_section %}
</div>
</section>
{% if page.google_maps_url %}
<section>
<div class="col-xs-12 map-canvas text-center" style="background-color:#fff">
<iframe src="{{ page.google_maps_url }}" width="100%" height="600"
frameborder="0" style="border:0" allowfullscreen=""></iframe>
</div>
</section>
{% endif %}
<section>
<div class="container" id="sec1">
{% include_block page.hotel_info_section %}
</div>
</section>
</div>
{% endblock %}
| {% extends "base.html" %}
{% load wagtailcore_tags wagtailimages_tags %}
{% block body %}
<div class="container sec1-inner-page">
{% if page.background_image %}
<div id="sli" class="text-center">
{% image page.background_image original %}
</div>
{% endif %}
<section>
<div class="container">
{% include_block page.venue_info_section %}
</div>
</section>
{% if page.google_maps_url %}
<section>
<div class="col-xs-12 map-canvas text-center" style="background-color:#fff">
<iframe src="{{ page.google_maps_url }}" width="100%" height="600"
frameborder="0" style="border:0" allowfullscreen=""></iframe>
</div>
</section>
{% endif %}
<section>
<div class="container">
{% include_block page.hotel_info_section %}
</div>
</section>
</div>
{% endblock %}
| Fix ability to click on venue page Google map. | Fix ability to click on venue page Google map.
Ensure that users can interact with the embedded Google map by ensuring
that the hotel information section does not appear on top of it. This
was happening because the hotel information section used the "sec1" ID,
which has a higher z-index than default.
Fix this by removing the troublesome ID from both the venue and hotel
information sections.
| HTML | mit | pydata/conf_site,pydata/conf_site,pydata/conf_site | html | ## Code Before:
{% extends "base.html" %}
{% load wagtailcore_tags wagtailimages_tags %}
{% block body %}
<div class="container sec1-inner-page">
{% if page.background_image %}
<div id="sli" class="text-center">
{% image page.background_image original %}
</div>
{% endif %}
<section>
<div class="container" id="sec1">
{% include_block page.venue_info_section %}
</div>
</section>
{% if page.google_maps_url %}
<section>
<div class="col-xs-12 map-canvas text-center" style="background-color:#fff">
<iframe src="{{ page.google_maps_url }}" width="100%" height="600"
frameborder="0" style="border:0" allowfullscreen=""></iframe>
</div>
</section>
{% endif %}
<section>
<div class="container" id="sec1">
{% include_block page.hotel_info_section %}
</div>
</section>
</div>
{% endblock %}
## Instruction:
Fix ability to click on venue page Google map.
Ensure that users can interact with the embedded Google map by ensuring
that the hotel information section does not appear on top of it. This
was happening because the hotel information section used the "sec1" ID,
which has a higher z-index than default.
Fix this by removing the troublesome ID from both the venue and hotel
information sections.
## Code After:
{% extends "base.html" %}
{% load wagtailcore_tags wagtailimages_tags %}
{% block body %}
<div class="container sec1-inner-page">
{% if page.background_image %}
<div id="sli" class="text-center">
{% image page.background_image original %}
</div>
{% endif %}
<section>
<div class="container">
{% include_block page.venue_info_section %}
</div>
</section>
{% if page.google_maps_url %}
<section>
<div class="col-xs-12 map-canvas text-center" style="background-color:#fff">
<iframe src="{{ page.google_maps_url }}" width="100%" height="600"
frameborder="0" style="border:0" allowfullscreen=""></iframe>
</div>
</section>
{% endif %}
<section>
<div class="container">
{% include_block page.hotel_info_section %}
</div>
</section>
</div>
{% endblock %}
|
bc9ae363532f98ae971a5087b82f48927998516e | script.js | script.js | //On load, .contentBox should contain question #1
// When user enters Answer and hits submit, ask if
//answer matches stored Answer
// An array containing questions and answers
//
var questionBank = ["Q1","Q2","Q3","Q4","Q5","Q6","Q7","Q8","Q9","Q10"];
var answerBank = ["A1","A2","A3","A4","A5","A6","A7","A8","A9","A10"];
$(document).ready()
var submitButton = $("#submitInput");
var displayQuestion = $("#card");
var nextButton = $("#nextButton");
// var currentQuestion = questionBank[0]
var currentIndex = -1;
nextButton.on("click", function(){
displayQuestion.html(questionBank[currentIndex + 1]);
currentIndex++;
if(currentIndex === 9) {
currentIndex = -1;
}
});
submitButton.on("click", function(){
var userInput = $("#answerInput").val()
if (userInput === answerBank[0]) {
displayQuestion.html(questionBank())
}
});
//
// questionBank.forEach(function(currentQuestion) {
// console.log(currentQuestion)
// });
// //flipping card
// $("#card").flip({
// axis: 'x',
// trigger: 'hover'
// });
| //On load, .contentBox should contain question #1
// When user enters Answer and hits submit, ask if
//answer matches stored Answer
// An array containing questions and answers
//
var questionAndAnswers ={
questionBank: ["Q1","Q2","Q3","Q4","Q5","Q6","Q7","Q8","Q9","Q10"],
answerBank: ["A1","A2","A3","A4","A5","A6","A7","A8","A9","A10"]
};
$(document).ready()
var submitButton = $("#submitInput");
var displayQuestion = $("#card");
var nextButton = $("#nextButton");
// var currentQuestion = questionBank[0]
var currentIndex = -1;
nextButton.on("click", function(){
displayQuestion.html(questionAndAnswers.questionBank[currentIndex + 1]);
currentIndex++;
if(currentIndex === 9) {
currentIndex = -1;
}
});
submitButton.on("click", function(){
var userInput = $("#answerInput").val()
if (userInput === answerBank[0]) {
displayQuestion.html(questionBank())
}
});
//
// questionBank.forEach(function(currentQuestion) {
// console.log(currentQuestion)
// });
// //flipping card
// $("#card").flip({
// axis: 'x',
// trigger: 'hover'
// });
| Transform Q and A variables to an Object | Transform Q and A variables to an Object
| JavaScript | mit | meagdoh/WDI-Project-1,meagdoh/WDI-Project-1 | javascript | ## Code Before:
//On load, .contentBox should contain question #1
// When user enters Answer and hits submit, ask if
//answer matches stored Answer
// An array containing questions and answers
//
var questionBank = ["Q1","Q2","Q3","Q4","Q5","Q6","Q7","Q8","Q9","Q10"];
var answerBank = ["A1","A2","A3","A4","A5","A6","A7","A8","A9","A10"];
$(document).ready()
var submitButton = $("#submitInput");
var displayQuestion = $("#card");
var nextButton = $("#nextButton");
// var currentQuestion = questionBank[0]
var currentIndex = -1;
nextButton.on("click", function(){
displayQuestion.html(questionBank[currentIndex + 1]);
currentIndex++;
if(currentIndex === 9) {
currentIndex = -1;
}
});
submitButton.on("click", function(){
var userInput = $("#answerInput").val()
if (userInput === answerBank[0]) {
displayQuestion.html(questionBank())
}
});
//
// questionBank.forEach(function(currentQuestion) {
// console.log(currentQuestion)
// });
// //flipping card
// $("#card").flip({
// axis: 'x',
// trigger: 'hover'
// });
## Instruction:
Transform Q and A variables to an Object
## Code After:
//On load, .contentBox should contain question #1
// When user enters Answer and hits submit, ask if
//answer matches stored Answer
// An array containing questions and answers
//
var questionAndAnswers ={
questionBank: ["Q1","Q2","Q3","Q4","Q5","Q6","Q7","Q8","Q9","Q10"],
answerBank: ["A1","A2","A3","A4","A5","A6","A7","A8","A9","A10"]
};
$(document).ready()
var submitButton = $("#submitInput");
var displayQuestion = $("#card");
var nextButton = $("#nextButton");
// var currentQuestion = questionBank[0]
var currentIndex = -1;
nextButton.on("click", function(){
displayQuestion.html(questionAndAnswers.questionBank[currentIndex + 1]);
currentIndex++;
if(currentIndex === 9) {
currentIndex = -1;
}
});
submitButton.on("click", function(){
var userInput = $("#answerInput").val()
if (userInput === answerBank[0]) {
displayQuestion.html(questionBank())
}
});
//
// questionBank.forEach(function(currentQuestion) {
// console.log(currentQuestion)
// });
// //flipping card
// $("#card").flip({
// axis: 'x',
// trigger: 'hover'
// });
|
7d76b44c371d74cb8c7b272fd9bf8021db6c6702 | qa/rpc-tests/test_framework/cashlib/__init__.py | qa/rpc-tests/test_framework/cashlib/__init__.py |
from test_framework.cashlib.cashlib import init, bin2hex, signTxInput, randombytes, pubkey, spendscript, addrbin, txid, SIGHASH_ALL, SIGHASH_NONE, SIGHASH_SINGLE, SIGHASH_FORKID, SIGHASH_ANYONECANPAY
|
from .cashlib import init, bin2hex, signTxInput, randombytes, pubkey, spendscript, addrbin, txid, SIGHASH_ALL, SIGHASH_NONE, SIGHASH_SINGLE, SIGHASH_FORKID, SIGHASH_ANYONECANPAY
| Use relative import for cashlib | Use relative import for cashlib
| Python | mit | BitcoinUnlimited/BitcoinUnlimited,Justaphf/BitcoinUnlimited,BitcoinUnlimited/BitcoinUnlimited,BitcoinUnlimited/BitcoinUnlimited,BitcoinUnlimited/BitcoinUnlimited,Justaphf/BitcoinUnlimited,Justaphf/BitcoinUnlimited,Justaphf/BitcoinUnlimited,BitcoinUnlimited/BitcoinUnlimited,BitcoinUnlimited/BitcoinUnlimited,Justaphf/BitcoinUnlimited,Justaphf/BitcoinUnlimited | python | ## Code Before:
from test_framework.cashlib.cashlib import init, bin2hex, signTxInput, randombytes, pubkey, spendscript, addrbin, txid, SIGHASH_ALL, SIGHASH_NONE, SIGHASH_SINGLE, SIGHASH_FORKID, SIGHASH_ANYONECANPAY
## Instruction:
Use relative import for cashlib
## Code After:
from .cashlib import init, bin2hex, signTxInput, randombytes, pubkey, spendscript, addrbin, txid, SIGHASH_ALL, SIGHASH_NONE, SIGHASH_SINGLE, SIGHASH_FORKID, SIGHASH_ANYONECANPAY
|
f97c3b6a837ae76cd705c6cf5bdf23863891795c | cmake/CMakeLists.txt | cmake/CMakeLists.txt |
SET(OPENSIM_INSTALL_CMAKE_DIR cmake)
CONFIGURE_FILE(OpenSimConfig.cmake.in
"${CMAKE_CURRENT_BINARY_DIR}/OpenSimConfig.cmake" @ONLY)
# Version file.
include(CMakePackageConfigHelpers)
WRITE_BASIC_CONFIG_VERSION_FILE(
"${CMAKE_CURRENT_BINARY_DIR}/OpenSimConfigVersion.cmake"
VERSION "${OPENSIM_VERSION}"
COMPATIBILITY SameMajorVersion)
INSTALL(
FILES
"${CMAKE_CURRENT_BINARY_DIR}/OpenSimConfig.cmake"
"${CMAKE_CURRENT_BINARY_DIR}/OpenSimConfigVersion.cmake"
DESTINATION
"${OPENSIM_INSTALL_CMAKE_DIR}"
)
install(EXPORT OpenSimTargets DESTINATION "${OPENSIM_INSTALL_CMAKE_DIR}")
|
SET(OPENSIM_INSTALL_CMAKE_DIR cmake)
CONFIGURE_FILE(OpenSimConfig.cmake.in
"${CMAKE_CURRENT_BINARY_DIR}/OpenSimConfig.cmake" @ONLY)
# Version file.
include(WriteBasicConfigVersionFile)
WRITE_BASIC_CONFIG_VERSION_FILE(
"${CMAKE_CURRENT_BINARY_DIR}/OpenSimConfigVersion.cmake"
VERSION "${OPENSIM_VERSION}"
COMPATIBILITY SameMajorVersion)
INSTALL(
FILES
"${CMAKE_CURRENT_BINARY_DIR}/OpenSimConfig.cmake"
"${CMAKE_CURRENT_BINARY_DIR}/OpenSimConfigVersion.cmake"
DESTINATION
"${OPENSIM_INSTALL_CMAKE_DIR}"
)
install(EXPORT OpenSimTargets DESTINATION "${OPENSIM_INSTALL_CMAKE_DIR}")
| Use different macro for ConfigVersion. | [cmake] Use different macro for ConfigVersion. | Text | apache-2.0 | opensim-org/opensim-core,opensim-org/opensim-core,opensim-org/opensim-core,opensim-org/opensim-core,opensim-org/opensim-core,opensim-org/opensim-core,opensim-org/opensim-core | text | ## Code Before:
SET(OPENSIM_INSTALL_CMAKE_DIR cmake)
CONFIGURE_FILE(OpenSimConfig.cmake.in
"${CMAKE_CURRENT_BINARY_DIR}/OpenSimConfig.cmake" @ONLY)
# Version file.
include(CMakePackageConfigHelpers)
WRITE_BASIC_CONFIG_VERSION_FILE(
"${CMAKE_CURRENT_BINARY_DIR}/OpenSimConfigVersion.cmake"
VERSION "${OPENSIM_VERSION}"
COMPATIBILITY SameMajorVersion)
INSTALL(
FILES
"${CMAKE_CURRENT_BINARY_DIR}/OpenSimConfig.cmake"
"${CMAKE_CURRENT_BINARY_DIR}/OpenSimConfigVersion.cmake"
DESTINATION
"${OPENSIM_INSTALL_CMAKE_DIR}"
)
install(EXPORT OpenSimTargets DESTINATION "${OPENSIM_INSTALL_CMAKE_DIR}")
## Instruction:
[cmake] Use different macro for ConfigVersion.
## Code After:
SET(OPENSIM_INSTALL_CMAKE_DIR cmake)
CONFIGURE_FILE(OpenSimConfig.cmake.in
"${CMAKE_CURRENT_BINARY_DIR}/OpenSimConfig.cmake" @ONLY)
# Version file.
include(WriteBasicConfigVersionFile)
WRITE_BASIC_CONFIG_VERSION_FILE(
"${CMAKE_CURRENT_BINARY_DIR}/OpenSimConfigVersion.cmake"
VERSION "${OPENSIM_VERSION}"
COMPATIBILITY SameMajorVersion)
INSTALL(
FILES
"${CMAKE_CURRENT_BINARY_DIR}/OpenSimConfig.cmake"
"${CMAKE_CURRENT_BINARY_DIR}/OpenSimConfigVersion.cmake"
DESTINATION
"${OPENSIM_INSTALL_CMAKE_DIR}"
)
install(EXPORT OpenSimTargets DESTINATION "${OPENSIM_INSTALL_CMAKE_DIR}")
|
3bc0f2befa50b9326b8e78dcca331adad38c21e0 | src/inventory_report/sqls/storage_create_related_lookup_tables.sql | src/inventory_report/sqls/storage_create_related_lookup_tables.sql | CREATE table storage_type_temp (
id NUMERIC, text varchar
);
INSERT INTO storage_type_temp VALUES
(0, 'UNKNOWN'),
(1, 'NFS'),
(2, 'FCP'),
(3, 'ISCSI'),
(4, 'LOCALFS'),
(6, 'POSIXFS'),
(7, 'GLUSTERFS'),
(8, 'GLANCE'),
(9, 'CINDER');
CREATE table storage_domain_type_temp (
id NUMERIC, text varchar
);
INSERT into storage_domain_type_temp VALUES
(0, 'Master'),
(1, 'Data'),
(2, 'ISO'),
(3, 'ImportExport'),
(4, 'Image'),
(5, 'Volume'),
(6, 'Unknown');
| CREATE table storage_type_temp (
id NUMERIC, text varchar
);
----------------------------------------------------------------
-- All values defined in ovirt-engine project:
-- backend/manager/modules/common/src/main/java/org/ovirt/engine/core/common/businessentities/storage/StorageType.java
----------------------------------------------------------------
INSERT INTO storage_type_temp VALUES
(0, 'UNKNOWN'),
(1, 'NFS'),
(2, 'FCP'),
(3, 'ISCSI'),
(4, 'LOCALFS'),
(6, 'POSIXFS'),
(7, 'GLUSTERFS'),
(8, 'GLANCE'),
(9, 'CINDER');
CREATE table storage_domain_type_temp (
id NUMERIC, text varchar
);
----------------------------------------------------------------
-- All values defined in ovirt-engine project:
-- backend/manager/modules/common/src/main/java/org/ovirt/engine/core/common/businessentities/StorageDomainType.java
----------------------------------------------------------------
INSERT into storage_domain_type_temp VALUES
(0, 'Master'),
(1, 'Data'),
(2, 'ISO'),
(3, 'ImportExport'),
(4, 'Image'),
(5, 'Volume'),
(6, 'Unknown');
| Add documentation about tables values | inventory_report: Add documentation about tables values
Code readability improvement.
Change-Id: Ia35ea91f812e9b9a2a564770c054abef2babc7bc
Signed-off-by: Douglas Schilling Landgraf <[email protected]>
| SQL | apache-2.0 | oVirt/ovirt-log-collector,oVirt/ovirt-log-collector,oVirt/ovirt-log-collector | sql | ## Code Before:
CREATE table storage_type_temp (
id NUMERIC, text varchar
);
INSERT INTO storage_type_temp VALUES
(0, 'UNKNOWN'),
(1, 'NFS'),
(2, 'FCP'),
(3, 'ISCSI'),
(4, 'LOCALFS'),
(6, 'POSIXFS'),
(7, 'GLUSTERFS'),
(8, 'GLANCE'),
(9, 'CINDER');
CREATE table storage_domain_type_temp (
id NUMERIC, text varchar
);
INSERT into storage_domain_type_temp VALUES
(0, 'Master'),
(1, 'Data'),
(2, 'ISO'),
(3, 'ImportExport'),
(4, 'Image'),
(5, 'Volume'),
(6, 'Unknown');
## Instruction:
inventory_report: Add documentation about tables values
Code readability improvement.
Change-Id: Ia35ea91f812e9b9a2a564770c054abef2babc7bc
Signed-off-by: Douglas Schilling Landgraf <[email protected]>
## Code After:
CREATE table storage_type_temp (
id NUMERIC, text varchar
);
----------------------------------------------------------------
-- All values defined in ovirt-engine project:
-- backend/manager/modules/common/src/main/java/org/ovirt/engine/core/common/businessentities/storage/StorageType.java
----------------------------------------------------------------
INSERT INTO storage_type_temp VALUES
(0, 'UNKNOWN'),
(1, 'NFS'),
(2, 'FCP'),
(3, 'ISCSI'),
(4, 'LOCALFS'),
(6, 'POSIXFS'),
(7, 'GLUSTERFS'),
(8, 'GLANCE'),
(9, 'CINDER');
CREATE table storage_domain_type_temp (
id NUMERIC, text varchar
);
----------------------------------------------------------------
-- All values defined in ovirt-engine project:
-- backend/manager/modules/common/src/main/java/org/ovirt/engine/core/common/businessentities/StorageDomainType.java
----------------------------------------------------------------
INSERT into storage_domain_type_temp VALUES
(0, 'Master'),
(1, 'Data'),
(2, 'ISO'),
(3, 'ImportExport'),
(4, 'Image'),
(5, 'Volume'),
(6, 'Unknown');
|
fa2e73567610a1bf87bf21ea02e5fea2c914b096 | db/migrate/20161219105620_change_course_user_invitations.rb | db/migrate/20161219105620_change_course_user_invitations.rb | class ChangeCourseUserInvitations < ActiveRecord::Migration
def up
add_column :course_user_invitations, :course_id, :integer
add_column :course_user_invitations, :name, :string
add_column :course_user_invitations, :email, :string, index: { case_sensitive: false }
add_column :course_user_invitations, :confirmed_at, :datetime
add_index :course_user_invitations, [:course_id, :email], unique: true
Course::UserInvitation.includes(course_user: { user: :emails }).find_each do |invitation|
course_user = invitation.course_user
invitation.update_columns(
course_id: course_user.course_id,
name: course_user.name,
email: invitation.user_email.email,
confirmed_at: course_user.approved? ? course_user.user.created_at : nil
)
end
change_column :course_user_invitations, :course_id, :integer, null: false
change_column :course_user_invitations, :name, :string, null: false
change_column :course_user_invitations, :email, :string, null: false
remove_column :course_user_invitations, :course_user_id
remove_column :course_user_invitations, :user_email_id
end
end
| class ChangeCourseUserInvitations < ActiveRecord::Migration
def up
add_column :course_user_invitations, :course_id, :integer
add_column :course_user_invitations, :name, :string
add_column :course_user_invitations, :email, :string, index: { case_sensitive: false }
add_column :course_user_invitations, :confirmed_at, :datetime
add_index :course_user_invitations, [:course_id, :email], unique: true
Course::UserInvitation.find_each do |invitation|
course_user = CourseUser.find(invitation.course_user_id)
email = User::Email.find(invitation.user_email_id)
invitation.update_columns(
course_id: course_user.course_id,
name: course_user.name,
email: email.email,
confirmed_at: course_user.approved? ? course_user.user.created_at : nil
)
end
change_column :course_user_invitations, :course_id, :integer, null: false
change_column :course_user_invitations, :name, :string, null: false
change_column :course_user_invitations, :email, :string, null: false
remove_column :course_user_invitations, :course_user_id
remove_column :course_user_invitations, :user_email_id
end
end
| Fix invitation migration script * The migration failed due to association removed | Fix invitation migration script
* The migration failed due to association removed
| Ruby | mit | Coursemology/coursemology2,cysjonathan/coursemology2,Coursemology/coursemology2,Coursemology/coursemology2,Coursemology/coursemology2,cysjonathan/coursemology2,Coursemology/coursemology2,Coursemology/coursemology2,Coursemology/coursemology2,cysjonathan/coursemology2 | ruby | ## Code Before:
class ChangeCourseUserInvitations < ActiveRecord::Migration
def up
add_column :course_user_invitations, :course_id, :integer
add_column :course_user_invitations, :name, :string
add_column :course_user_invitations, :email, :string, index: { case_sensitive: false }
add_column :course_user_invitations, :confirmed_at, :datetime
add_index :course_user_invitations, [:course_id, :email], unique: true
Course::UserInvitation.includes(course_user: { user: :emails }).find_each do |invitation|
course_user = invitation.course_user
invitation.update_columns(
course_id: course_user.course_id,
name: course_user.name,
email: invitation.user_email.email,
confirmed_at: course_user.approved? ? course_user.user.created_at : nil
)
end
change_column :course_user_invitations, :course_id, :integer, null: false
change_column :course_user_invitations, :name, :string, null: false
change_column :course_user_invitations, :email, :string, null: false
remove_column :course_user_invitations, :course_user_id
remove_column :course_user_invitations, :user_email_id
end
end
## Instruction:
Fix invitation migration script
* The migration failed due to association removed
## Code After:
class ChangeCourseUserInvitations < ActiveRecord::Migration
def up
add_column :course_user_invitations, :course_id, :integer
add_column :course_user_invitations, :name, :string
add_column :course_user_invitations, :email, :string, index: { case_sensitive: false }
add_column :course_user_invitations, :confirmed_at, :datetime
add_index :course_user_invitations, [:course_id, :email], unique: true
Course::UserInvitation.find_each do |invitation|
course_user = CourseUser.find(invitation.course_user_id)
email = User::Email.find(invitation.user_email_id)
invitation.update_columns(
course_id: course_user.course_id,
name: course_user.name,
email: email.email,
confirmed_at: course_user.approved? ? course_user.user.created_at : nil
)
end
change_column :course_user_invitations, :course_id, :integer, null: false
change_column :course_user_invitations, :name, :string, null: false
change_column :course_user_invitations, :email, :string, null: false
remove_column :course_user_invitations, :course_user_id
remove_column :course_user_invitations, :user_email_id
end
end
|
dac46720723da91ca805b79ff79524121fc4f137 | pkgs/tools/misc/html-proofer/default.nix | pkgs/tools/misc/html-proofer/default.nix | { bundlerEnv, ruby, lib }:
bundlerEnv rec {
name = "${pname}-${version}";
pname = "html-proofer";
version = (import ./gemset.nix).html-proofer.version;
inherit ruby;
gemdir = ./.;
meta = with lib; {
description = "A tool to validate HTML files";
homepage = "https://github.com/gjtorikian/html-proofer";
license = licenses.mit;
maintainers = with maintainers; [ primeos ];
platforms = platforms.unix;
};
}
| { bundlerEnv, ruby, lib, bundlerUpdateScript }:
bundlerEnv rec {
name = "${pname}-${version}";
pname = "html-proofer";
version = (import ./gemset.nix).html-proofer.version;
inherit ruby;
gemdir = ./.;
passthru.updateScript = bundlerUpdateScript pname;
meta = with lib; {
description = "A tool to validate HTML files";
homepage = "https://github.com/gjtorikian/html-proofer";
license = licenses.mit;
maintainers = with maintainers; [ ];
platforms = platforms.unix;
};
}
| Add bundlerUpdateScript and remove myself as maintainer | html-proofer: Add bundlerUpdateScript and remove myself as maintainer
| Nix | mit | NixOS/nixpkgs,NixOS/nixpkgs,NixOS/nixpkgs,NixOS/nixpkgs,NixOS/nixpkgs,NixOS/nixpkgs,NixOS/nixpkgs,NixOS/nixpkgs,NixOS/nixpkgs,NixOS/nixpkgs,NixOS/nixpkgs,NixOS/nixpkgs,NixOS/nixpkgs,NixOS/nixpkgs,NixOS/nixpkgs | nix | ## Code Before:
{ bundlerEnv, ruby, lib }:
bundlerEnv rec {
name = "${pname}-${version}";
pname = "html-proofer";
version = (import ./gemset.nix).html-proofer.version;
inherit ruby;
gemdir = ./.;
meta = with lib; {
description = "A tool to validate HTML files";
homepage = "https://github.com/gjtorikian/html-proofer";
license = licenses.mit;
maintainers = with maintainers; [ primeos ];
platforms = platforms.unix;
};
}
## Instruction:
html-proofer: Add bundlerUpdateScript and remove myself as maintainer
## Code After:
{ bundlerEnv, ruby, lib, bundlerUpdateScript }:
bundlerEnv rec {
name = "${pname}-${version}";
pname = "html-proofer";
version = (import ./gemset.nix).html-proofer.version;
inherit ruby;
gemdir = ./.;
passthru.updateScript = bundlerUpdateScript pname;
meta = with lib; {
description = "A tool to validate HTML files";
homepage = "https://github.com/gjtorikian/html-proofer";
license = licenses.mit;
maintainers = with maintainers; [ ];
platforms = platforms.unix;
};
}
|
299e69f5fd40a3b57aed530f1f715870f461f874 | resources/assets/js/plugins/undo-redo.js | resources/assets/js/plugins/undo-redo.js | import { win, isIframe } from 'classes/helpers';
import { eventBus } from './eventbus';
import UndoStack from 'classes/UndoStack';
const trackedMutations = {
updateFieldValue: 'Update to block field',
addBlock: 'Added block to page',
reorderBlocks: 'Reordered blocks on page',
deleteBlock: 'Deleted block on page'
};
const undoStack = new UndoStack({ lock: true });
const undoRedo = store => {
if(isIframe) {
return;
}
undoStack.setUndoRedo(pageData => {
store.commit('setPage', JSON.parse(pageData));
eventBus.$emit('block:hideHoverOverlay', null);
eventBus.$emit('block:hideSelectedOverlay', null);
});
undoStack.setCallback(({ canUndo, canRedo }) => {
store.commit('updateUndoRedo', { canUndo, canRedo });
});
store.subscribe((mutation, state) => {
if(!trackedMutations[mutation.type]) {
return;
}
undoStack.add(state.page.pageData);
});
};
export default undoRedo;
export const undoStackInstance = isIframe ?
win.top.astroUndoStack : (win.astroUndoStack = undoStack);
| import { win, isIframe } from 'classes/helpers';
import { eventBus } from './eventbus';
import UndoStack from 'classes/UndoStack';
const trackedMutations = {
updateFieldValue: 'Update to block field',
addBlock: 'Added block to page',
reorderBlocks: 'Reordered blocks on page',
deleteBlock: 'Deleted block on page'
};
const undoStack = new UndoStack({ lock: true });
const undoRedo = store => {
if(isIframe) {
return;
}
undoStack.setUndoRedo(pageData => {
store.commit('setPage', JSON.parse(pageData));
eventBus.$emit('block:hideHoverOverlay');
eventBus.$emit('block:updateBlockOverlays');
});
undoStack.setCallback(({ canUndo, canRedo }) => {
store.commit('updateUndoRedo', { canUndo, canRedo });
});
store.subscribe((mutation, state) => {
if(!trackedMutations[mutation.type]) {
return;
}
undoStack.add(state.page.pageData);
});
};
export default undoRedo;
export const undoStackInstance = isIframe ?
win.top.astroUndoStack : (win.astroUndoStack = undoStack);
| Make sure triggering undo/redo properly updates overlays. | Make sure triggering undo/redo properly updates overlays.
| JavaScript | mit | unikent/astro,unikent/astro,unikent/astro,unikent/astro,unikent/astro | javascript | ## Code Before:
import { win, isIframe } from 'classes/helpers';
import { eventBus } from './eventbus';
import UndoStack from 'classes/UndoStack';
const trackedMutations = {
updateFieldValue: 'Update to block field',
addBlock: 'Added block to page',
reorderBlocks: 'Reordered blocks on page',
deleteBlock: 'Deleted block on page'
};
const undoStack = new UndoStack({ lock: true });
const undoRedo = store => {
if(isIframe) {
return;
}
undoStack.setUndoRedo(pageData => {
store.commit('setPage', JSON.parse(pageData));
eventBus.$emit('block:hideHoverOverlay', null);
eventBus.$emit('block:hideSelectedOverlay', null);
});
undoStack.setCallback(({ canUndo, canRedo }) => {
store.commit('updateUndoRedo', { canUndo, canRedo });
});
store.subscribe((mutation, state) => {
if(!trackedMutations[mutation.type]) {
return;
}
undoStack.add(state.page.pageData);
});
};
export default undoRedo;
export const undoStackInstance = isIframe ?
win.top.astroUndoStack : (win.astroUndoStack = undoStack);
## Instruction:
Make sure triggering undo/redo properly updates overlays.
## Code After:
import { win, isIframe } from 'classes/helpers';
import { eventBus } from './eventbus';
import UndoStack from 'classes/UndoStack';
const trackedMutations = {
updateFieldValue: 'Update to block field',
addBlock: 'Added block to page',
reorderBlocks: 'Reordered blocks on page',
deleteBlock: 'Deleted block on page'
};
const undoStack = new UndoStack({ lock: true });
const undoRedo = store => {
if(isIframe) {
return;
}
undoStack.setUndoRedo(pageData => {
store.commit('setPage', JSON.parse(pageData));
eventBus.$emit('block:hideHoverOverlay');
eventBus.$emit('block:updateBlockOverlays');
});
undoStack.setCallback(({ canUndo, canRedo }) => {
store.commit('updateUndoRedo', { canUndo, canRedo });
});
store.subscribe((mutation, state) => {
if(!trackedMutations[mutation.type]) {
return;
}
undoStack.add(state.page.pageData);
});
};
export default undoRedo;
export const undoStackInstance = isIframe ?
win.top.astroUndoStack : (win.astroUndoStack = undoStack);
|
86d4df2407d151f1f1b076ffabe5efeb2641e763 | .travis.yml | .travis.yml | language: python
python:
- "3.6"
install:
- pip install -r requirements.txt
- pip install coveralls
script:
- pytest
after_success:
- coveralls
| language: python
python:
- "3.6"
install:
- pip install -r requirements.txt
- pip install coveralls
script:
- pytest -v tests/ --cov=abraia
after_success:
- coveralls
| Update pytest configuration to use the coveralls service | Update pytest configuration to use the coveralls service
| YAML | mit | abraia/abraia-python | yaml | ## Code Before:
language: python
python:
- "3.6"
install:
- pip install -r requirements.txt
- pip install coveralls
script:
- pytest
after_success:
- coveralls
## Instruction:
Update pytest configuration to use the coveralls service
## Code After:
language: python
python:
- "3.6"
install:
- pip install -r requirements.txt
- pip install coveralls
script:
- pytest -v tests/ --cov=abraia
after_success:
- coveralls
|
b1aeacc135c3b22ea7b15419ccbb8f28d6408340 | usr.bin/showmount/Makefile | usr.bin/showmount/Makefile |
PROG= showmount
.include <bsd.prog.mk>
|
PROG= showmount
MAN8= showmount.8
.include <bsd.prog.mk>
| Use the right man page. Submitted by: jkh | Use the right man page.
Submitted by: jkh
| unknown | bsd-3-clause | jrobhoward/SCADAbase,jrobhoward/SCADAbase,jrobhoward/SCADAbase,jrobhoward/SCADAbase,jrobhoward/SCADAbase,jrobhoward/SCADAbase,jrobhoward/SCADAbase,jrobhoward/SCADAbase,jrobhoward/SCADAbase,jrobhoward/SCADAbase,jrobhoward/SCADAbase | unknown | ## Code Before:
PROG= showmount
.include <bsd.prog.mk>
## Instruction:
Use the right man page.
Submitted by: jkh
## Code After:
PROG= showmount
MAN8= showmount.8
.include <bsd.prog.mk>
|
132f6374b20bf31ecc032effc2822b4b3a8351dc | lint.yml | lint.yml | linters:
Indentation:
width: 4
NestingDepth:
max_depth: 4
SelectorDepth:
max_depth: 4
| linters:
Indentation:
width: 4
NestingDepth:
max_depth: 4
SelectorDepth:
max_depth: 4
| Use 2 spaces in .yml files | Use 2 spaces in .yml files
| YAML | mit | kiswa/TaskBoard,kiswa/TaskBoard,kiswa/TaskBoard,kiswa/TaskBoard,kiswa/TaskBoard | yaml | ## Code Before:
linters:
Indentation:
width: 4
NestingDepth:
max_depth: 4
SelectorDepth:
max_depth: 4
## Instruction:
Use 2 spaces in .yml files
## Code After:
linters:
Indentation:
width: 4
NestingDepth:
max_depth: 4
SelectorDepth:
max_depth: 4
|
dc78224ec7f05b00781595a4177aa55fb54b8270 | src/grid_displayers/simple_terminal.rs | src/grid_displayers/simple_terminal.rs | use std::sync::{ RwLock, RwLockReadGuard, Arc };
use std::sync::mpsc::{ Receiver };
use models::Grid;
use grid_displayers::GridDisplayer;
pub struct SimpleTerminal;
impl SimpleTerminal {
pub fn new() -> SimpleTerminal {
SimpleTerminal
}
}
impl GridDisplayer for SimpleTerminal {
fn draw(&self, receiver: Receiver<Arc<RwLock<Grid>>>) {
loop {
let locked_grid: Arc<RwLock<Grid>> = receiver.recv().unwrap();
let lock_guard: RwLockReadGuard<Grid> = locked_grid.read().unwrap();
let ref grid: Grid = *lock_guard;
for row in grid {
for cell in row {
match cell.is_alive() {
true => print!("o"),
false => print!("_")
};
}
print!("\n");
}
print!("\n\n");
}
}
}
| use std::sync::{ RwLock, RwLockReadGuard, Arc };
use std::sync::mpsc::{ Receiver };
use models::Grid;
use grid_displayers::GridDisplayer;
#[allow(dead_code)]
pub struct SimpleTerminal;
impl SimpleTerminal {
#[allow(dead_code)]
pub fn new() -> SimpleTerminal {
SimpleTerminal
}
}
impl GridDisplayer for SimpleTerminal {
fn draw(&self, receiver: Receiver<Arc<RwLock<Grid>>>) {
loop {
let locked_grid: Arc<RwLock<Grid>> = receiver.recv().unwrap();
let lock_guard: RwLockReadGuard<Grid> = locked_grid.read().unwrap();
let ref grid: Grid = *lock_guard;
for row in grid {
for cell in row {
match cell.is_alive() {
true => print!("o"),
false => print!("_")
};
}
print!("\n");
}
print!("\n\n");
}
}
}
| Add allow(dead_code) on the SimpleTerminal GridDisplayer | Add allow(dead_code) on the SimpleTerminal GridDisplayer
| Rust | mit | mateusmedeiros/rust_of_life | rust | ## Code Before:
use std::sync::{ RwLock, RwLockReadGuard, Arc };
use std::sync::mpsc::{ Receiver };
use models::Grid;
use grid_displayers::GridDisplayer;
pub struct SimpleTerminal;
impl SimpleTerminal {
pub fn new() -> SimpleTerminal {
SimpleTerminal
}
}
impl GridDisplayer for SimpleTerminal {
fn draw(&self, receiver: Receiver<Arc<RwLock<Grid>>>) {
loop {
let locked_grid: Arc<RwLock<Grid>> = receiver.recv().unwrap();
let lock_guard: RwLockReadGuard<Grid> = locked_grid.read().unwrap();
let ref grid: Grid = *lock_guard;
for row in grid {
for cell in row {
match cell.is_alive() {
true => print!("o"),
false => print!("_")
};
}
print!("\n");
}
print!("\n\n");
}
}
}
## Instruction:
Add allow(dead_code) on the SimpleTerminal GridDisplayer
## Code After:
use std::sync::{ RwLock, RwLockReadGuard, Arc };
use std::sync::mpsc::{ Receiver };
use models::Grid;
use grid_displayers::GridDisplayer;
#[allow(dead_code)]
pub struct SimpleTerminal;
impl SimpleTerminal {
#[allow(dead_code)]
pub fn new() -> SimpleTerminal {
SimpleTerminal
}
}
impl GridDisplayer for SimpleTerminal {
fn draw(&self, receiver: Receiver<Arc<RwLock<Grid>>>) {
loop {
let locked_grid: Arc<RwLock<Grid>> = receiver.recv().unwrap();
let lock_guard: RwLockReadGuard<Grid> = locked_grid.read().unwrap();
let ref grid: Grid = *lock_guard;
for row in grid {
for cell in row {
match cell.is_alive() {
true => print!("o"),
false => print!("_")
};
}
print!("\n");
}
print!("\n\n");
}
}
}
|
bd0053d9ef7b5cc3cdd3feec8c32e99fbb48fc11 | templates/index_competitionseason.html | templates/index_competitionseason.html | {% extends "base.html" %}
{% block title %}The Blue Alliance{% endblock %}
{% block content %}
<div class="container">
<div class="row">
<div class="col-xs-4">
{% include "index_lhc.html" %}
</div>
<div class="col-xs-8">
<h2>This Week's Events</h2>
{% with events as events %}
{% include "event_partials/event_table.html" %}
{% endwith %}
<div>
<a class="btn" href="/webcasts"><span class="glyphicon glyphicon-info-sign"></span> Add Webcasts</a>
<a class="btn" href="/contact"><span class="glyphicon glyphicon-upload"></span> Add YouTube Videos</a>
</div>
</div>
</div>
</div>
{% endblock %}
| {% extends "base.html" %}
{% block title %}The Blue Alliance{% endblock %}
{% block content %}
<div class="container">
<div class="row">
<div class="col-xs-4">
{% include "index_lhc.html" %}
</div>
<div class="col-xs-8">
<h2>This Week's Events</h2>
{% with events as events %}
{% include "event_partials/event_table.html" %}
{% endwith %}
<div>
<a class="btn btn-default" href="/webcasts"><span class="glyphicon glyphicon-info-sign"></span> Add Webcasts</a>
<a class="btn btn-default" href="/contact"><span class="glyphicon glyphicon-upload"></span> Add YouTube Videos</a>
</div>
</div>
</div>
</div>
{% endblock %}
| Fix buttons on landing page | Fix buttons on landing page
| HTML | mit | the-blue-alliance/the-blue-alliance,the-blue-alliance/the-blue-alliance,verycumbersome/the-blue-alliance,bdaroz/the-blue-alliance,nwalters512/the-blue-alliance,nwalters512/the-blue-alliance,synth3tk/the-blue-alliance,jaredhasenklein/the-blue-alliance,synth3tk/the-blue-alliance,fangeugene/the-blue-alliance,synth3tk/the-blue-alliance,phil-lopreiato/the-blue-alliance,jaredhasenklein/the-blue-alliance,nwalters512/the-blue-alliance,tsteward/the-blue-alliance,fangeugene/the-blue-alliance,tsteward/the-blue-alliance,verycumbersome/the-blue-alliance,nwalters512/the-blue-alliance,tsteward/the-blue-alliance,jaredhasenklein/the-blue-alliance,1fish2/the-blue-alliance,synth3tk/the-blue-alliance,josephbisch/the-blue-alliance,1fish2/the-blue-alliance,verycumbersome/the-blue-alliance,fangeugene/the-blue-alliance,the-blue-alliance/the-blue-alliance,nwalters512/the-blue-alliance,phil-lopreiato/the-blue-alliance,fangeugene/the-blue-alliance,bdaroz/the-blue-alliance,1fish2/the-blue-alliance,josephbisch/the-blue-alliance,bdaroz/the-blue-alliance,fangeugene/the-blue-alliance,tsteward/the-blue-alliance,josephbisch/the-blue-alliance,tsteward/the-blue-alliance,phil-lopreiato/the-blue-alliance,synth3tk/the-blue-alliance,nwalters512/the-blue-alliance,fangeugene/the-blue-alliance,bvisness/the-blue-alliance,1fish2/the-blue-alliance,bvisness/the-blue-alliance,jaredhasenklein/the-blue-alliance,bdaroz/the-blue-alliance,verycumbersome/the-blue-alliance,tsteward/the-blue-alliance,the-blue-alliance/the-blue-alliance,bvisness/the-blue-alliance,phil-lopreiato/the-blue-alliance,phil-lopreiato/the-blue-alliance,verycumbersome/the-blue-alliance,jaredhasenklein/the-blue-alliance,synth3tk/the-blue-alliance,bdaroz/the-blue-alliance,the-blue-alliance/the-blue-alliance,bdaroz/the-blue-alliance,bvisness/the-blue-alliance,josephbisch/the-blue-alliance,verycumbersome/the-blue-alliance,bvisness/the-blue-alliance,jaredhasenklein/the-blue-alliance,the-blue-alliance/the-blue-alliance,josephbisch/the-blue-alliance,phil-lopreiato/the-blue-alliance,bvisness/the-blue-alliance,1fish2/the-blue-alliance,josephbisch/the-blue-alliance,1fish2/the-blue-alliance | html | ## Code Before:
{% extends "base.html" %}
{% block title %}The Blue Alliance{% endblock %}
{% block content %}
<div class="container">
<div class="row">
<div class="col-xs-4">
{% include "index_lhc.html" %}
</div>
<div class="col-xs-8">
<h2>This Week's Events</h2>
{% with events as events %}
{% include "event_partials/event_table.html" %}
{% endwith %}
<div>
<a class="btn" href="/webcasts"><span class="glyphicon glyphicon-info-sign"></span> Add Webcasts</a>
<a class="btn" href="/contact"><span class="glyphicon glyphicon-upload"></span> Add YouTube Videos</a>
</div>
</div>
</div>
</div>
{% endblock %}
## Instruction:
Fix buttons on landing page
## Code After:
{% extends "base.html" %}
{% block title %}The Blue Alliance{% endblock %}
{% block content %}
<div class="container">
<div class="row">
<div class="col-xs-4">
{% include "index_lhc.html" %}
</div>
<div class="col-xs-8">
<h2>This Week's Events</h2>
{% with events as events %}
{% include "event_partials/event_table.html" %}
{% endwith %}
<div>
<a class="btn btn-default" href="/webcasts"><span class="glyphicon glyphicon-info-sign"></span> Add Webcasts</a>
<a class="btn btn-default" href="/contact"><span class="glyphicon glyphicon-upload"></span> Add YouTube Videos</a>
</div>
</div>
</div>
</div>
{% endblock %}
|
4f34c863fc83414d56250a59a998f8d109b1cfff | spec/sisecommerce/produto_spec.rb | spec/sisecommerce/produto_spec.rb | RSpec.describe Sisecommerce::Produto do
describe '#get' do
context 'having 10 produtos available' do
it 'lists produtos available' do
produtos = Sisecommerce::Produto.get
expect(produtos.size).to eq 10
end
it 'returns produto with specific id' do
produtos = Sisecommerce::Produto.get(id: 1)
expect(produtos.first.id).to eq 1
end
it 'returns produto with specific id_sku' do
produtos = Sisecommerce::Produto.get(id_sku: 1)
expect(produtos.first.id_sku).to eq 1
end
it 'lists produtos available by pagina' do
produtos = Sisecommerce::Produto.get(pagina: 1)
expect(produtos.size).to eq 10
end
end
context 'having no produtos available' do
it 'returns error 220' do
retorno = Sisecommerce::Pedido.get
expect(retorno.erros.first.codigo).to eq 220
end
end
end
describe '#put' do
let(:produto_put) { }
it 'should change produto nome' do
Sisecommerce::Produto.put(id_sku: 1, nome: 'new name')
expect(Sisecommerce::Produto.get(id: 1).nome).to eq 'new name'
end
end
end
| RSpec.describe Sisecommerce::Produto do
describe '#get' do
context 'having 10 produtos available' do
it 'lists produtos available' do
produtos = Sisecommerce::Produto.get
expect(produtos.size).to eq 10
end
it 'returns produto with specific id' do
produtos = Sisecommerce::Produto.get(id: 1)
expect(produtos.first.id).to eq 1
end
it 'returns produto with specific id_sku' do
produtos = Sisecommerce::Produto.get(id_sku: 1)
expect(produtos.first.id_sku).to eq 1
end
it 'lists produtos available by pagina' do
produtos = Sisecommerce::Produto.get(pagina: 1)
expect(produtos.size).to eq 10
end
end
context 'having no produtos available' do
it 'returns error 220' do
retorno = Sisecommerce::Pedido.get
expect(retorno.erros.first.codigo).to eq 220
end
end
end
describe '#put' do
it 'should change produto nome' do
Sisecommerce::Produto.put(id_sku: 1, nome: 'new name')
expect(Sisecommerce::Produto.get(id_sku: 1).nome).to eq 'new name'
end
end
end
| Apply fix on put request spec | Apply fix on put request spec
| Ruby | mit | iurimadeira/ruby_sisecommerce | ruby | ## Code Before:
RSpec.describe Sisecommerce::Produto do
describe '#get' do
context 'having 10 produtos available' do
it 'lists produtos available' do
produtos = Sisecommerce::Produto.get
expect(produtos.size).to eq 10
end
it 'returns produto with specific id' do
produtos = Sisecommerce::Produto.get(id: 1)
expect(produtos.first.id).to eq 1
end
it 'returns produto with specific id_sku' do
produtos = Sisecommerce::Produto.get(id_sku: 1)
expect(produtos.first.id_sku).to eq 1
end
it 'lists produtos available by pagina' do
produtos = Sisecommerce::Produto.get(pagina: 1)
expect(produtos.size).to eq 10
end
end
context 'having no produtos available' do
it 'returns error 220' do
retorno = Sisecommerce::Pedido.get
expect(retorno.erros.first.codigo).to eq 220
end
end
end
describe '#put' do
let(:produto_put) { }
it 'should change produto nome' do
Sisecommerce::Produto.put(id_sku: 1, nome: 'new name')
expect(Sisecommerce::Produto.get(id: 1).nome).to eq 'new name'
end
end
end
## Instruction:
Apply fix on put request spec
## Code After:
RSpec.describe Sisecommerce::Produto do
describe '#get' do
context 'having 10 produtos available' do
it 'lists produtos available' do
produtos = Sisecommerce::Produto.get
expect(produtos.size).to eq 10
end
it 'returns produto with specific id' do
produtos = Sisecommerce::Produto.get(id: 1)
expect(produtos.first.id).to eq 1
end
it 'returns produto with specific id_sku' do
produtos = Sisecommerce::Produto.get(id_sku: 1)
expect(produtos.first.id_sku).to eq 1
end
it 'lists produtos available by pagina' do
produtos = Sisecommerce::Produto.get(pagina: 1)
expect(produtos.size).to eq 10
end
end
context 'having no produtos available' do
it 'returns error 220' do
retorno = Sisecommerce::Pedido.get
expect(retorno.erros.first.codigo).to eq 220
end
end
end
describe '#put' do
it 'should change produto nome' do
Sisecommerce::Produto.put(id_sku: 1, nome: 'new name')
expect(Sisecommerce::Produto.get(id_sku: 1).nome).to eq 'new name'
end
end
end
|
cbb42b2ecd99f1152af0439c55d02848c11d02a2 | tasks/configure-tomcat.yml | tasks/configure-tomcat.yml |
- name: generate a custom 'setenv.sh' from template, provides the ability to configure Tomcat at start-up from outside the container
template:
src: setenv.sh.j2
dest: "{{ tomcat_catalina_base }}/bin/setenv.sh"
mode: ug+x
- name: "generate a custom '{{ tomcat_process_name }}.jmxremote.password' from template, to use pasword protected JMX ports"
template:
src: jmxremote.password.j2
dest: "{{ tomcat_catalina_base }}/conf/{{ tomcat_process_name }}.jmxremote.password"
owner: "{{ tomcat_system_user }}"
group: "{{ tomcat_system_group }}"
mode: 0600
- name: "generate a custom '{{ tomcat_process_name }}.jmxremote.access' from template, to use pasword protected JMX ports"
template:
src: jmxremote.access.j2
dest: "{{ tomcat_catalina_base }}/conf/{{ tomcat_process_name }}.jmxremote.access"
owner: "{{ tomcat_system_user }}"
group: "{{ tomcat_system_group }}"
mode: 0640
|
- name: generate a custom 'setenv.sh' from template, provides the ability to configure Tomcat at start-up from outside the container
template:
src: setenv.sh.j2
dest: "{{ tomcat_catalina_base }}/bin/setenv.sh"
mode: ug+x
- name: "generate a custom '{{ tomcat_process_name }}.jmxremote.password' from template, to use pasword protected JMX ports"
template:
src: jmxremote.password.j2
dest: "{{ tomcat_catalina_base }}/conf/{{ tomcat_process_name }}.jmxremote.password"
owner: "{{ tomcat_system_user }}"
group: "{{ tomcat_system_group }}"
mode: 0600
- name: "generate a custom '{{ tomcat_process_name }}.jmxremote.access' from template, to use pasword protected JMX ports"
template:
src: jmxremote.access.j2
dest: "{{ tomcat_catalina_base }}/conf/{{ tomcat_process_name }}.jmxremote.access"
owner: "{{ tomcat_system_user }}"
group: "{{ tomcat_system_group }}"
mode: 0640
- name: add UTF-8 encoding to the server connector
xml:
file: "{{ tomcat_catalina_base }}/conf/server.xml"
xpath: "/Server/Service/Connector"
attribute: "URIEncoding"
ensure: present
value: "UTF-8"
| Add UTF-8 encoding to the server connector | Add UTF-8 encoding to the server connector
| YAML | mit | KAMI911/ansible-role-tomcat,KAMI911/ansible-role-tomcat | yaml | ## Code Before:
- name: generate a custom 'setenv.sh' from template, provides the ability to configure Tomcat at start-up from outside the container
template:
src: setenv.sh.j2
dest: "{{ tomcat_catalina_base }}/bin/setenv.sh"
mode: ug+x
- name: "generate a custom '{{ tomcat_process_name }}.jmxremote.password' from template, to use pasword protected JMX ports"
template:
src: jmxremote.password.j2
dest: "{{ tomcat_catalina_base }}/conf/{{ tomcat_process_name }}.jmxremote.password"
owner: "{{ tomcat_system_user }}"
group: "{{ tomcat_system_group }}"
mode: 0600
- name: "generate a custom '{{ tomcat_process_name }}.jmxremote.access' from template, to use pasword protected JMX ports"
template:
src: jmxremote.access.j2
dest: "{{ tomcat_catalina_base }}/conf/{{ tomcat_process_name }}.jmxremote.access"
owner: "{{ tomcat_system_user }}"
group: "{{ tomcat_system_group }}"
mode: 0640
## Instruction:
Add UTF-8 encoding to the server connector
## Code After:
- name: generate a custom 'setenv.sh' from template, provides the ability to configure Tomcat at start-up from outside the container
template:
src: setenv.sh.j2
dest: "{{ tomcat_catalina_base }}/bin/setenv.sh"
mode: ug+x
- name: "generate a custom '{{ tomcat_process_name }}.jmxremote.password' from template, to use pasword protected JMX ports"
template:
src: jmxremote.password.j2
dest: "{{ tomcat_catalina_base }}/conf/{{ tomcat_process_name }}.jmxremote.password"
owner: "{{ tomcat_system_user }}"
group: "{{ tomcat_system_group }}"
mode: 0600
- name: "generate a custom '{{ tomcat_process_name }}.jmxremote.access' from template, to use pasword protected JMX ports"
template:
src: jmxremote.access.j2
dest: "{{ tomcat_catalina_base }}/conf/{{ tomcat_process_name }}.jmxremote.access"
owner: "{{ tomcat_system_user }}"
group: "{{ tomcat_system_group }}"
mode: 0640
- name: add UTF-8 encoding to the server connector
xml:
file: "{{ tomcat_catalina_base }}/conf/server.xml"
xpath: "/Server/Service/Connector"
attribute: "URIEncoding"
ensure: present
value: "UTF-8"
|
9fc58d12f376571aef37bfe3fbe1a534856bb120 | README.md | README.md | fairyland
=========
Java fake data generator
| fairyland
=========
Java fake data generator
[](https://travis-ci.org/Codearte/fairyland) | Build status from Travis CI | Build status from Travis CI
| Markdown | apache-2.0 | Codearte/jfairy,briacp/jfairy | markdown | ## Code Before:
fairyland
=========
Java fake data generator
## Instruction:
Build status from Travis CI
## Code After:
fairyland
=========
Java fake data generator
[](https://travis-ci.org/Codearte/fairyland) |
bd16f94f2bf25ca1f5a8d3de2c267afb283f5883 | test_requirements.txt | test_requirements.txt |
pytest
pytest-pep8
pytest-pylint
pytest-cov
pytest-django
pytest-capturelog
pytest-watch
pytest-xdist
mock
ipdb
ipython
urltools
semantic_version
|
pytest
pytest-pep8
# logilab-common is only necessary because pylint's dependency chain will
# result in the latest logilab-common being fetched, but astroid
# (another pylint dependency) depends on logilab-common<=0.63.0
logilab-common==0.63.0
pytest-pylint
pytest-cov
pytest-django
pytest-capturelog
pytest-watch
pytest-xdist
mock
ipdb
ipython
urltools
semantic_version
| Fix test failures due to pylint dependency chain | Fix test failures due to pylint dependency chain
logilab-common is only necessary because pylint's dependency chain
will result in the latest logilab-common being fetched, but astroid
(another pylint dependency) depends on logilab-common<=0.63.0
| Text | agpl-3.0 | mitodl/lore,amir-qayyum-khan/lore,amir-qayyum-khan/lore,mitodl/lore,mitodl/lore,amir-qayyum-khan/lore,amir-qayyum-khan/lore,amir-qayyum-khan/lore,mitodl/lore,mitodl/lore | text | ## Code Before:
pytest
pytest-pep8
pytest-pylint
pytest-cov
pytest-django
pytest-capturelog
pytest-watch
pytest-xdist
mock
ipdb
ipython
urltools
semantic_version
## Instruction:
Fix test failures due to pylint dependency chain
logilab-common is only necessary because pylint's dependency chain
will result in the latest logilab-common being fetched, but astroid
(another pylint dependency) depends on logilab-common<=0.63.0
## Code After:
pytest
pytest-pep8
# logilab-common is only necessary because pylint's dependency chain will
# result in the latest logilab-common being fetched, but astroid
# (another pylint dependency) depends on logilab-common<=0.63.0
logilab-common==0.63.0
pytest-pylint
pytest-cov
pytest-django
pytest-capturelog
pytest-watch
pytest-xdist
mock
ipdb
ipython
urltools
semantic_version
|
a684a5148638214c516ea305b2ac654863358c6c | lib/github_cli/util.rb | lib/github_cli/util.rb | module GithubCLI
module Util
extend self
def flatten_hash(prefix=nil, hash)
new_hash ||= {}
hash.each do |key, val|
key = prefix ? :"#{prefix}_#{key}" : key
case val
when Hash
new_hash.update flatten_hash(key, val)
else
new_hash[key] = val
end
end
return new_hash
end
def convert_values(values)
values_copy = values.dup
collected = []
values_copy.inject([]) do |collected, val|
collected << convert_value(val)
end
end
def convert_value(value)
case value
when true then "true"
when false then "false"
when Hash then convert_value(value.values)
when Array then value.map(&:to_s)
else value.to_s
end
end
end
end # GithubCLI
|
module GithubCLI
module Util
extend self
def flatten_hash(prefix=nil, hash)
new_hash ||= {}
hash.each do |key, val|
key = prefix ? :"#{prefix}_#{key}" : key
case val
when Hash
new_hash.update flatten_hash(key, val)
else
new_hash[key] = val
end
end
return new_hash
end
def convert_values(values)
values_copy = values.dup
collected = []
values_copy.inject([]) do |collected, val|
collected << convert_value(val)
end
end
def convert_value(value)
case value
when true then "true"
when false then "false"
when Hash then convert_value(value.values)
when Array then value.map(&:to_s)
else value.to_s
end
end
# Shortens string
# :trailing - trailing character in place of cutout string
def truncate(string, width, options={})
trailing = options[:trailing] || '…'
chars = string.to_s.chars.to_a
if chars.length < width && chars.length > 3
chars.join
elsif chars.length > 3
(chars[0, width - 1].join) + trailing
end
end
# Pads a string
# padder - padding character
# align - align :left, :right, :center
def pad(string, width, options={})
padder = options[:padder] || ' '
align = options[:align] || :left
chars = string.chars.to_a
if chars.length < width
string = case :"#{align}"
when :left
string + (padder * (width - chars.length))
when :right
(padder * (width - chars.length)) + string
when :center
right = ((pad_length = width - chars.length).to_f / 2).ceil
left = pad_length - right
(padder * left) + string + (padder * right)
end
end
string
end
end
end # GithubCLI
| Add string truncation and padding. | Add string truncation and padding.
| Ruby | mit | peter-murach/github_cli,piotrmurach/github_cli,peter-murach/github_cli,pjump/github_cli,pjump/github_cli,ecliptik/github_cli,ecliptik/github_cli | ruby | ## Code Before:
module GithubCLI
module Util
extend self
def flatten_hash(prefix=nil, hash)
new_hash ||= {}
hash.each do |key, val|
key = prefix ? :"#{prefix}_#{key}" : key
case val
when Hash
new_hash.update flatten_hash(key, val)
else
new_hash[key] = val
end
end
return new_hash
end
def convert_values(values)
values_copy = values.dup
collected = []
values_copy.inject([]) do |collected, val|
collected << convert_value(val)
end
end
def convert_value(value)
case value
when true then "true"
when false then "false"
when Hash then convert_value(value.values)
when Array then value.map(&:to_s)
else value.to_s
end
end
end
end # GithubCLI
## Instruction:
Add string truncation and padding.
## Code After:
module GithubCLI
module Util
extend self
def flatten_hash(prefix=nil, hash)
new_hash ||= {}
hash.each do |key, val|
key = prefix ? :"#{prefix}_#{key}" : key
case val
when Hash
new_hash.update flatten_hash(key, val)
else
new_hash[key] = val
end
end
return new_hash
end
def convert_values(values)
values_copy = values.dup
collected = []
values_copy.inject([]) do |collected, val|
collected << convert_value(val)
end
end
def convert_value(value)
case value
when true then "true"
when false then "false"
when Hash then convert_value(value.values)
when Array then value.map(&:to_s)
else value.to_s
end
end
# Shortens string
# :trailing - trailing character in place of cutout string
def truncate(string, width, options={})
trailing = options[:trailing] || '…'
chars = string.to_s.chars.to_a
if chars.length < width && chars.length > 3
chars.join
elsif chars.length > 3
(chars[0, width - 1].join) + trailing
end
end
# Pads a string
# padder - padding character
# align - align :left, :right, :center
def pad(string, width, options={})
padder = options[:padder] || ' '
align = options[:align] || :left
chars = string.chars.to_a
if chars.length < width
string = case :"#{align}"
when :left
string + (padder * (width - chars.length))
when :right
(padder * (width - chars.length)) + string
when :center
right = ((pad_length = width - chars.length).to_f / 2).ceil
left = pad_length - right
(padder * left) + string + (padder * right)
end
end
string
end
end
end # GithubCLI
|
bac9455e01244b1874b12fe3c793f589ff5b12d0 | app.rb | app.rb | require 'sinatra/base'
require 'tilt/haml'
require 'json'
require 'digest/sha2'
require 'open-uri'
require 'bcrypt'
require 'net/smtp'
require 'sendgrid-ruby'
require 'data_mapper'
require 'dm-serializer'
require 'dm-types'
require 'typhoeus'
require 'fog'
require 'mime-types'
require 'uuidtools'
require 'braintree'
# The main class for the kw-api
class KWApi < Sinatra::Base
enable :sessions
configure :production do
set :haml, ugly: true
set :clean_trace, true
end
configure :development do
# ...
end
helpers do
include Rack::Utils
alias_method :h, :escape_html
end
end
require_relative 'models/init'
require_relative 'helpers/init'
require_relative 'routes/init'
| require 'sinatra/base'
require 'tilt/haml'
require 'json'
require 'digest/sha2'
require 'open-uri'
require 'bcrypt'
require 'net/smtp'
require 'sendgrid-ruby'
require 'data_mapper'
require 'dm-serializer'
require 'dm-types'
require 'typhoeus'
require 'fog'
require 'mime-types'
require 'uuidtools'
require 'braintree'
# The main class for the kw-api
class KWApi < Sinatra::Base
enable :sessions
configure :production do
set :haml, ugly: true
set :clean_trace, true
end
configure :development do
# ...
end
configure do
Braintree::Configuration.environment = :sandbox
Braintree::Configuration.merchant_id = ENV['BRAINTREE_MERCHANT']
Braintree::Configuration.public_key = ENV['BRAINTREE_PUBLIC']
Braintree::Configuration.private_key = ENV['BRAINTREE_PRIVATE']
end
helpers do
include Rack::Utils
alias_method :h, :escape_html
end
end
require_relative 'models/init'
require_relative 'helpers/init'
require_relative 'routes/init'
| Add global config for braintree | Add global config for braintree
| Ruby | mit | KWStudios/kw-api,KWStudios/kw-api,KWStudios/kw-api | ruby | ## Code Before:
require 'sinatra/base'
require 'tilt/haml'
require 'json'
require 'digest/sha2'
require 'open-uri'
require 'bcrypt'
require 'net/smtp'
require 'sendgrid-ruby'
require 'data_mapper'
require 'dm-serializer'
require 'dm-types'
require 'typhoeus'
require 'fog'
require 'mime-types'
require 'uuidtools'
require 'braintree'
# The main class for the kw-api
class KWApi < Sinatra::Base
enable :sessions
configure :production do
set :haml, ugly: true
set :clean_trace, true
end
configure :development do
# ...
end
helpers do
include Rack::Utils
alias_method :h, :escape_html
end
end
require_relative 'models/init'
require_relative 'helpers/init'
require_relative 'routes/init'
## Instruction:
Add global config for braintree
## Code After:
require 'sinatra/base'
require 'tilt/haml'
require 'json'
require 'digest/sha2'
require 'open-uri'
require 'bcrypt'
require 'net/smtp'
require 'sendgrid-ruby'
require 'data_mapper'
require 'dm-serializer'
require 'dm-types'
require 'typhoeus'
require 'fog'
require 'mime-types'
require 'uuidtools'
require 'braintree'
# The main class for the kw-api
class KWApi < Sinatra::Base
enable :sessions
configure :production do
set :haml, ugly: true
set :clean_trace, true
end
configure :development do
# ...
end
configure do
Braintree::Configuration.environment = :sandbox
Braintree::Configuration.merchant_id = ENV['BRAINTREE_MERCHANT']
Braintree::Configuration.public_key = ENV['BRAINTREE_PUBLIC']
Braintree::Configuration.private_key = ENV['BRAINTREE_PRIVATE']
end
helpers do
include Rack::Utils
alias_method :h, :escape_html
end
end
require_relative 'models/init'
require_relative 'helpers/init'
require_relative 'routes/init'
|
450a1f64a21afce008392e321fff2d268bb9fc41 | setup.py | setup.py | from distutils.core import setup
from distutils.extension import Extension
from Cython.Distutils import build_ext
import numpy
ALGPATH = "clusterpy/core/toolboxes/cluster/componentsAlg/"
ALGPKG = "clusterpy.core.toolboxes.cluster.componentsAlg."
CLUSPATH = "clusterpy/core/toolboxes/cluster/"
CLUSPKG = "clusterpy.core.toolboxes.cluster."
setup(
name='clusterPy',
version='0.9.9',
description='Library of spatially constrained clustering algorithms',
long_description="""
clusterPy is a Python library with algorithms for spatially constrained clustering. clusterPy offers you some of the most cited algorithms for spatial aggregation.""",
author='RiSE Group',
author_email='[email protected]',
url='http://www.rise-group.org/section/Software/clusterPy/',
packages=['clusterpy','clusterpy.core','clusterpy.core.data',
'clusterpy.core.geometry','clusterpy.core.toolboxes',
'clusterpy.core.toolboxes.cluster',
'clusterpy.core.toolboxes.cluster.componentsAlg'],
ext_modules = [Extension(CLUSPKG+"arisel", [CLUSPATH+"arisel.pyx"],
extra_link_args=['-fopenmp'],
extra_compile_args=['-fopenmp']
),
Extension(ALGPKG+"distanceFunctions", [ALGPATH+"distanceFunctions.pyx"]),
Extension(ALGPKG+"dist2Regions", [ALGPATH+"dist2Regions.pyx"]),
Extension(ALGPKG+"selectionTypeFunctions", [ALGPATH+"selectionTypeFunctions.pyx"]),
Extension(ALGPKG+"init", [ALGPATH+"init.pyx"]),
Extension(ALGPKG+"objFunctions", [ALGPATH+"objFunctions.pyx"])
],
cmdclass = {'build_ext': build_ext}
)
| from distutils.core import setup
from distutils.extension import Extension
setup(
name='clusterPy',
version='0.9.9',
description='Library of spatially constrained clustering algorithms',
long_description="""
clusterPy is a Python library with algorithms for spatially constrained clustering. clusterPy offers you some of the most cited algorithms for spatial aggregation.""",
author='RiSE Group',
author_email='[email protected]',
url='http://www.rise-group.org/section/Software/clusterPy/',
packages=['clusterpy','clusterpy.core','clusterpy.core.data',
'clusterpy.core.geometry','clusterpy.core.toolboxes',
'clusterpy.core.toolboxes.cluster',
'clusterpy.core.toolboxes.cluster.componentsAlg'],
)
| Remove cython Extension builder and build_ext from Setup | Remove cython Extension builder and build_ext from Setup
| Python | bsd-3-clause | clusterpy/clusterpy,clusterpy/clusterpy | python | ## Code Before:
from distutils.core import setup
from distutils.extension import Extension
from Cython.Distutils import build_ext
import numpy
ALGPATH = "clusterpy/core/toolboxes/cluster/componentsAlg/"
ALGPKG = "clusterpy.core.toolboxes.cluster.componentsAlg."
CLUSPATH = "clusterpy/core/toolboxes/cluster/"
CLUSPKG = "clusterpy.core.toolboxes.cluster."
setup(
name='clusterPy',
version='0.9.9',
description='Library of spatially constrained clustering algorithms',
long_description="""
clusterPy is a Python library with algorithms for spatially constrained clustering. clusterPy offers you some of the most cited algorithms for spatial aggregation.""",
author='RiSE Group',
author_email='[email protected]',
url='http://www.rise-group.org/section/Software/clusterPy/',
packages=['clusterpy','clusterpy.core','clusterpy.core.data',
'clusterpy.core.geometry','clusterpy.core.toolboxes',
'clusterpy.core.toolboxes.cluster',
'clusterpy.core.toolboxes.cluster.componentsAlg'],
ext_modules = [Extension(CLUSPKG+"arisel", [CLUSPATH+"arisel.pyx"],
extra_link_args=['-fopenmp'],
extra_compile_args=['-fopenmp']
),
Extension(ALGPKG+"distanceFunctions", [ALGPATH+"distanceFunctions.pyx"]),
Extension(ALGPKG+"dist2Regions", [ALGPATH+"dist2Regions.pyx"]),
Extension(ALGPKG+"selectionTypeFunctions", [ALGPATH+"selectionTypeFunctions.pyx"]),
Extension(ALGPKG+"init", [ALGPATH+"init.pyx"]),
Extension(ALGPKG+"objFunctions", [ALGPATH+"objFunctions.pyx"])
],
cmdclass = {'build_ext': build_ext}
)
## Instruction:
Remove cython Extension builder and build_ext from Setup
## Code After:
from distutils.core import setup
from distutils.extension import Extension
setup(
name='clusterPy',
version='0.9.9',
description='Library of spatially constrained clustering algorithms',
long_description="""
clusterPy is a Python library with algorithms for spatially constrained clustering. clusterPy offers you some of the most cited algorithms for spatial aggregation.""",
author='RiSE Group',
author_email='[email protected]',
url='http://www.rise-group.org/section/Software/clusterPy/',
packages=['clusterpy','clusterpy.core','clusterpy.core.data',
'clusterpy.core.geometry','clusterpy.core.toolboxes',
'clusterpy.core.toolboxes.cluster',
'clusterpy.core.toolboxes.cluster.componentsAlg'],
)
|
e78910c8b9ecf48f96a693dae3c15afa32a12da1 | casexml/apps/phone/views.py | casexml/apps/phone/views.py | from django_digest.decorators import *
from casexml.apps.phone import xml
from casexml.apps.case.models import CommCareCase
from casexml.apps.phone.restore import generate_restore_response
from casexml.apps.phone.models import User
from casexml.apps.case import const
@httpdigest
def restore(request):
user = User.from_django_user(request.user)
restore_id = request.GET.get('since')
return generate_restore_response(user, restore_id)
def xml_for_case(request, case_id, version="1.0"):
"""
Test view to get the xml for a particular case
"""
from django.http import HttpResponse
case = CommCareCase.get(case_id)
return HttpResponse(xml.get_case_xml(case, [const.CASE_ACTION_CREATE,
const.CASE_ACTION_UPDATE],
version), mimetype="text/xml")
| from django.http import HttpResponse
from django_digest.decorators import *
from casexml.apps.phone import xml
from casexml.apps.case.models import CommCareCase
from casexml.apps.phone.restore import generate_restore_response
from casexml.apps.phone.models import User
from casexml.apps.case import const
@httpdigest
def restore(request):
user = User.from_django_user(request.user)
restore_id = request.GET.get('since')
return generate_restore_response(user, restore_id)
def xml_for_case(request, case_id, version="1.0"):
"""
Test view to get the xml for a particular case
"""
case = CommCareCase.get(case_id)
return HttpResponse(xml.get_case_xml(case, [const.CASE_ACTION_CREATE,
const.CASE_ACTION_UPDATE],
version), mimetype="text/xml")
| Revert "moving httpresponse to view" | Revert "moving httpresponse to view"
This reverts commit a6f501bb9de6382e35372996851916adac067fa0.
| Python | bsd-3-clause | SEL-Columbia/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,SEL-Columbia/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,puttarajubr/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,SEL-Columbia/commcare-hq | python | ## Code Before:
from django_digest.decorators import *
from casexml.apps.phone import xml
from casexml.apps.case.models import CommCareCase
from casexml.apps.phone.restore import generate_restore_response
from casexml.apps.phone.models import User
from casexml.apps.case import const
@httpdigest
def restore(request):
user = User.from_django_user(request.user)
restore_id = request.GET.get('since')
return generate_restore_response(user, restore_id)
def xml_for_case(request, case_id, version="1.0"):
"""
Test view to get the xml for a particular case
"""
from django.http import HttpResponse
case = CommCareCase.get(case_id)
return HttpResponse(xml.get_case_xml(case, [const.CASE_ACTION_CREATE,
const.CASE_ACTION_UPDATE],
version), mimetype="text/xml")
## Instruction:
Revert "moving httpresponse to view"
This reverts commit a6f501bb9de6382e35372996851916adac067fa0.
## Code After:
from django.http import HttpResponse
from django_digest.decorators import *
from casexml.apps.phone import xml
from casexml.apps.case.models import CommCareCase
from casexml.apps.phone.restore import generate_restore_response
from casexml.apps.phone.models import User
from casexml.apps.case import const
@httpdigest
def restore(request):
user = User.from_django_user(request.user)
restore_id = request.GET.get('since')
return generate_restore_response(user, restore_id)
def xml_for_case(request, case_id, version="1.0"):
"""
Test view to get the xml for a particular case
"""
case = CommCareCase.get(case_id)
return HttpResponse(xml.get_case_xml(case, [const.CASE_ACTION_CREATE,
const.CASE_ACTION_UPDATE],
version), mimetype="text/xml")
|
9feb800985250c12ed35c3934cde0ef00dc3867e | drivers/tty/serial/uart/uart_driver.c | drivers/tty/serial/uart/uart_driver.c |
static int uart_write(const char *s);
static struct char_driver_operations uart_ops = {
.write = uart_write,
};
static struct char_driver uart_driver = {
.name = "UART",
};
void uart_init(void)
{
struct char_driver *uart = get_uart_driver_instance();
uart->ops = &uart_ops;
}
struct char_driver *
get_uart_driver_instance(void)
{
if (!uart_driver.ops)
uart_init();
return &uart_driver;
}
static int
uart_write(const char *s)
{
while (*s) {
while (*(UART0 + UARTFR) & UARTFR_TXFF)
;
*UART0 = *s;
s++;
}
return 0;
}
|
static int uart_write(const char *s);
static struct char_driver_operations uart_ops = {
.write = uart_write,
};
static struct char_driver uart_driver = {
.name = "UART",
};
static void
uart_init(void)
{
uart_driver.ops = &uart_ops;
}
struct char_driver *
get_uart_driver_instance(void)
{
if (!uart_driver.ops)
uart_init();
return &uart_driver;
}
static int
uart_write(const char *s)
{
while (*s) {
while (*(UART0 + UARTFR) & UARTFR_TXFF)
;
*UART0 = *s;
s++;
}
return 0;
}
| Fix circular reference but in uart_init() | Fix circular reference but in uart_init()
Don't call get_uart_driver_instance() from uart_init().
| C | bsd-3-clause | masami256/mini-arm-kernel | c | ## Code Before:
static int uart_write(const char *s);
static struct char_driver_operations uart_ops = {
.write = uart_write,
};
static struct char_driver uart_driver = {
.name = "UART",
};
void uart_init(void)
{
struct char_driver *uart = get_uart_driver_instance();
uart->ops = &uart_ops;
}
struct char_driver *
get_uart_driver_instance(void)
{
if (!uart_driver.ops)
uart_init();
return &uart_driver;
}
static int
uart_write(const char *s)
{
while (*s) {
while (*(UART0 + UARTFR) & UARTFR_TXFF)
;
*UART0 = *s;
s++;
}
return 0;
}
## Instruction:
Fix circular reference but in uart_init()
Don't call get_uart_driver_instance() from uart_init().
## Code After:
static int uart_write(const char *s);
static struct char_driver_operations uart_ops = {
.write = uart_write,
};
static struct char_driver uart_driver = {
.name = "UART",
};
static void
uart_init(void)
{
uart_driver.ops = &uart_ops;
}
struct char_driver *
get_uart_driver_instance(void)
{
if (!uart_driver.ops)
uart_init();
return &uart_driver;
}
static int
uart_write(const char *s)
{
while (*s) {
while (*(UART0 + UARTFR) & UARTFR_TXFF)
;
*UART0 = *s;
s++;
}
return 0;
}
|
00db43d778eeb9a73e93664b20c8eb86ad7cee13 | css/style.css | css/style.css | html, body, #map-canvas {
height: 100%;
margin: 0px;
padding: 0px
}
.topper {
height: 5%;
}
.drawer {
background-color: #fff;
height: 95%;
overflow-y: scroll;
position: absolute;
transform: translate(-300px, 0);
transition: transform 0.3s ease;
width: 300px;
will-change: transform;
z-index: 10;
-webkit-transform: translate(-300px, 0);
}
.drawer.open {
-webkit-transform: translate(0, 0);
transform: translate(0, 0);
}
.location-list {
list-style: none;
width: 100%;
padding: 0;
margin: 0;
}
.location-item {
border-bottom: 1px solid #E0E0E0;
box-sizing: border-box;
display: list-item;
line-height: 24px;
padding: 24px;
text-align: left;
width: 100%;
}
#map-canvas {
height: 95%;
} | /* Whole document */
html, body {
height: 100%;
margin: 0;
padding: 0;
}
/* Mobile top area */
.topper {
background-color: #f53c24;
height: 48px;
text-align: right;
}
.hamburger {
color: #fff;
display: inline-block;
font-size: 1.8em;
height: 48px;
line-height: 48px;
text-align: center;
width: 48px;
}
/* Drawer */
.drawer {
background-color: #fff;
height: calc(100% - 48px);
overflow-y: scroll;
position: absolute;
transform: translate(-300px, 0);
transition: transform 0.3s ease;
width: 300px;
will-change: transform;
z-index: 10;
-webkit-transform: translate(-300px, 0);
}
.drawer.open {
-webkit-transform: translate(0, 0);
transform: translate(0, 0);
}
.location-list {
list-style: none;
width: 100%;
padding: 0;
margin: 0;
}
.location-item {
border-bottom: 1px solid #E0E0E0;
box-sizing: border-box;
display: list-item;
line-height: 24px;
padding: 24px;
text-align: left;
width: 100%;
}
/* Map */
#map-canvas {
height: calc(100% - 48px);
margin: 0;
padding: 0;
} | Change color and alignment in top bar | feat: Change color and alignment in top bar
| CSS | mit | SittingFox/frontend-nanodegree-neighborhood-map,SittingFox/frontend-nanodegree-neighborhood-map | css | ## Code Before:
html, body, #map-canvas {
height: 100%;
margin: 0px;
padding: 0px
}
.topper {
height: 5%;
}
.drawer {
background-color: #fff;
height: 95%;
overflow-y: scroll;
position: absolute;
transform: translate(-300px, 0);
transition: transform 0.3s ease;
width: 300px;
will-change: transform;
z-index: 10;
-webkit-transform: translate(-300px, 0);
}
.drawer.open {
-webkit-transform: translate(0, 0);
transform: translate(0, 0);
}
.location-list {
list-style: none;
width: 100%;
padding: 0;
margin: 0;
}
.location-item {
border-bottom: 1px solid #E0E0E0;
box-sizing: border-box;
display: list-item;
line-height: 24px;
padding: 24px;
text-align: left;
width: 100%;
}
#map-canvas {
height: 95%;
}
## Instruction:
feat: Change color and alignment in top bar
## Code After:
/* Whole document */
html, body {
height: 100%;
margin: 0;
padding: 0;
}
/* Mobile top area */
.topper {
background-color: #f53c24;
height: 48px;
text-align: right;
}
.hamburger {
color: #fff;
display: inline-block;
font-size: 1.8em;
height: 48px;
line-height: 48px;
text-align: center;
width: 48px;
}
/* Drawer */
.drawer {
background-color: #fff;
height: calc(100% - 48px);
overflow-y: scroll;
position: absolute;
transform: translate(-300px, 0);
transition: transform 0.3s ease;
width: 300px;
will-change: transform;
z-index: 10;
-webkit-transform: translate(-300px, 0);
}
.drawer.open {
-webkit-transform: translate(0, 0);
transform: translate(0, 0);
}
.location-list {
list-style: none;
width: 100%;
padding: 0;
margin: 0;
}
.location-item {
border-bottom: 1px solid #E0E0E0;
box-sizing: border-box;
display: list-item;
line-height: 24px;
padding: 24px;
text-align: left;
width: 100%;
}
/* Map */
#map-canvas {
height: calc(100% - 48px);
margin: 0;
padding: 0;
} |
9a9a2b032f5f0180d3a2c229a7d0b632479b1838 | Casks/tg-pro.rb | Casks/tg-pro.rb | cask :v1 => 'tg-pro' do
version '2.8.7'
sha256 '32b622cec40f4cfe0cd5455dd110696b8284524aadd92c552a769c130bbc88e7'
url "http://www.tunabellysoftware.com/resources/TGPro_#{version.gsub('.','_')}.zip"
name 'TG Pro'
appcast 'http://tunabellysoftware.com/resources/sparkle/tgpro/profileInfo.php',
:sha256 => 'e276cc14d86471bc7c416faefc7e8bcffe94da4458c87c71c2f14287414df5fa'
homepage 'http://www.tunabellysoftware.com/tgpro/'
license :commercial
app 'TG Pro.app'
end
| cask :v1 => 'tg-pro' do
version '2.8.8'
sha256 'dcb4221d4b72960c306e248a0be947107ab3622b0c38570684b2f12c8ef87a44'
url "http://www.tunabellysoftware.com/resources/TGPro_#{version.gsub('.','_')}.zip"
name 'TG Pro'
appcast 'http://tunabellysoftware.com/resources/sparkle/tgpro/profileInfo.php',
:sha256 => 'ae55143d14a7a75093439c723db19e8672952efff4e38de0e0682a5037c455de'
homepage 'http://www.tunabellysoftware.com/tgpro/'
license :commercial
app 'TG Pro.app'
end
| Update TG Pro to 2.8.8 | Update TG Pro to 2.8.8
| Ruby | bsd-2-clause | gilesdring/homebrew-cask,mrmachine/homebrew-cask,lukasbestle/homebrew-cask,sanchezm/homebrew-cask,gurghet/homebrew-cask,morganestes/homebrew-cask,toonetown/homebrew-cask,skatsuta/homebrew-cask,sjackman/homebrew-cask,cobyism/homebrew-cask,AnastasiaSulyagina/homebrew-cask,n8henrie/homebrew-cask,winkelsdorf/homebrew-cask,neverfox/homebrew-cask,elnappo/homebrew-cask,tmoreira2020/homebrew,vigosan/homebrew-cask,neverfox/homebrew-cask,Saklad5/homebrew-cask,claui/homebrew-cask,hanxue/caskroom,perfide/homebrew-cask,dwihn0r/homebrew-cask,jawshooah/homebrew-cask,samshadwell/homebrew-cask,amatos/homebrew-cask,afh/homebrew-cask,nshemonsky/homebrew-cask,miccal/homebrew-cask,MerelyAPseudonym/homebrew-cask,nathanielvarona/homebrew-cask,jasmas/homebrew-cask,athrunsun/homebrew-cask,mattrobenolt/homebrew-cask,timsutton/homebrew-cask,helloIAmPau/homebrew-cask,moogar0880/homebrew-cask,lantrix/homebrew-cask,jeanregisser/homebrew-cask,toonetown/homebrew-cask,renard/homebrew-cask,tsparber/homebrew-cask,forevergenin/homebrew-cask,mauricerkelly/homebrew-cask,stigkj/homebrew-caskroom-cask,gyndav/homebrew-cask,xcezx/homebrew-cask,lukeadams/homebrew-cask,giannitm/homebrew-cask,kiliankoe/homebrew-cask,stonehippo/homebrew-cask,jconley/homebrew-cask,chadcatlett/caskroom-homebrew-cask,mauricerkelly/homebrew-cask,singingwolfboy/homebrew-cask,moogar0880/homebrew-cask,m3nu/homebrew-cask,tjnycum/homebrew-cask,JacopKane/homebrew-cask,kpearson/homebrew-cask,antogg/homebrew-cask,julionc/homebrew-cask,mahori/homebrew-cask,Ketouem/homebrew-cask,howie/homebrew-cask,13k/homebrew-cask,corbt/homebrew-cask,seanorama/homebrew-cask,cblecker/homebrew-cask,shonjir/homebrew-cask,joshka/homebrew-cask,mahori/homebrew-cask,kesara/homebrew-cask,leipert/homebrew-cask,slack4u/homebrew-cask,jiashuw/homebrew-cask,cprecioso/homebrew-cask,mazehall/homebrew-cask,timsutton/homebrew-cask,jacobbednarz/homebrew-cask,shorshe/homebrew-cask,13k/homebrew-cask,nathansgreen/homebrew-cask,a1russell/homebrew-cask,mgryszko/homebrew-cask,esebastian/homebrew-cask,mgryszko/homebrew-cask,bdhess/homebrew-cask,colindunn/homebrew-cask,mindriot101/homebrew-cask,scottsuch/homebrew-cask,artdevjs/homebrew-cask,singingwolfboy/homebrew-cask,JikkuJose/homebrew-cask,kingthorin/homebrew-cask,franklouwers/homebrew-cask,mathbunnyru/homebrew-cask,koenrh/homebrew-cask,retrography/homebrew-cask,lumaxis/homebrew-cask,forevergenin/homebrew-cask,mlocher/homebrew-cask,opsdev-ws/homebrew-cask,n0ts/homebrew-cask,lucasmezencio/homebrew-cask,tolbkni/homebrew-cask,miguelfrde/homebrew-cask,lifepillar/homebrew-cask,johnjelinek/homebrew-cask,FinalDes/homebrew-cask,shorshe/homebrew-cask,vigosan/homebrew-cask,samdoran/homebrew-cask,hovancik/homebrew-cask,adrianchia/homebrew-cask,troyxmccall/homebrew-cask,jangalinski/homebrew-cask,chuanxd/homebrew-cask,Dremora/homebrew-cask,sosedoff/homebrew-cask,paour/homebrew-cask,sanyer/homebrew-cask,jedahan/homebrew-cask,morganestes/homebrew-cask,jonathanwiesel/homebrew-cask,reitermarkus/homebrew-cask,gabrielizaias/homebrew-cask,kingthorin/homebrew-cask,bosr/homebrew-cask,m3nu/homebrew-cask,wickedsp1d3r/homebrew-cask,casidiablo/homebrew-cask,paour/homebrew-cask,coeligena/homebrew-customized,gmkey/homebrew-cask,ywfwj2008/homebrew-cask,KosherBacon/homebrew-cask,goxberry/homebrew-cask,andyli/homebrew-cask,scribblemaniac/homebrew-cask,moimikey/homebrew-cask,tjt263/homebrew-cask,sjackman/homebrew-cask,cfillion/homebrew-cask,puffdad/homebrew-cask,syscrusher/homebrew-cask,neverfox/homebrew-cask,Ketouem/homebrew-cask,miku/homebrew-cask,feigaochn/homebrew-cask,victorpopkov/homebrew-cask,ksato9700/homebrew-cask,BenjaminHCCarr/homebrew-cask,hovancik/homebrew-cask,Cottser/homebrew-cask,tjnycum/homebrew-cask,mathbunnyru/homebrew-cask,reelsense/homebrew-cask,fanquake/homebrew-cask,arronmabrey/homebrew-cask,kongslund/homebrew-cask,troyxmccall/homebrew-cask,tyage/homebrew-cask,anbotero/homebrew-cask,anbotero/homebrew-cask,thehunmonkgroup/homebrew-cask,hanxue/caskroom,ninjahoahong/homebrew-cask,tmoreira2020/homebrew,nrlquaker/homebrew-cask,kronicd/homebrew-cask,yuhki50/homebrew-cask,phpwutz/homebrew-cask,nightscape/homebrew-cask,SentinelWarren/homebrew-cask,exherb/homebrew-cask,patresi/homebrew-cask,Bombenleger/homebrew-cask,okket/homebrew-cask,johndbritton/homebrew-cask,buo/homebrew-cask,gyndav/homebrew-cask,sgnh/homebrew-cask,joshka/homebrew-cask,onlynone/homebrew-cask,BenjaminHCCarr/homebrew-cask,kiliankoe/homebrew-cask,jmeridth/homebrew-cask,stephenwade/homebrew-cask,chuanxd/homebrew-cask,tangestani/homebrew-cask,blainesch/homebrew-cask,cprecioso/homebrew-cask,joschi/homebrew-cask,samdoran/homebrew-cask,guerrero/homebrew-cask,Saklad5/homebrew-cask,jellyfishcoder/homebrew-cask,colindean/homebrew-cask,jonathanwiesel/homebrew-cask,goxberry/homebrew-cask,KosherBacon/homebrew-cask,yutarody/homebrew-cask,squid314/homebrew-cask,jgarber623/homebrew-cask,diguage/homebrew-cask,pkq/homebrew-cask,Dremora/homebrew-cask,optikfluffel/homebrew-cask,jedahan/homebrew-cask,samnung/homebrew-cask,jalaziz/homebrew-cask,mingzhi22/homebrew-cask,ldong/homebrew-cask,lantrix/homebrew-cask,muan/homebrew-cask,Ephemera/homebrew-cask,albertico/homebrew-cask,rickychilcott/homebrew-cask,jbeagley52/homebrew-cask,mwean/homebrew-cask,mhubig/homebrew-cask,chadcatlett/caskroom-homebrew-cask,vin047/homebrew-cask,cobyism/homebrew-cask,a1russell/homebrew-cask,gibsjose/homebrew-cask,0xadada/homebrew-cask,ddm/homebrew-cask,xtian/homebrew-cask,adrianchia/homebrew-cask,riyad/homebrew-cask,nrlquaker/homebrew-cask,seanzxx/homebrew-cask,alexg0/homebrew-cask,danielbayley/homebrew-cask,dictcp/homebrew-cask,thehunmonkgroup/homebrew-cask,cliffcotino/homebrew-cask,mishari/homebrew-cask,scottsuch/homebrew-cask,kingthorin/homebrew-cask,samshadwell/homebrew-cask,lucasmezencio/homebrew-cask,esebastian/homebrew-cask,mathbunnyru/homebrew-cask,jaredsampson/homebrew-cask,sosedoff/homebrew-cask,yurikoles/homebrew-cask,mwean/homebrew-cask,markthetech/homebrew-cask,lcasey001/homebrew-cask,claui/homebrew-cask,mattrobenolt/homebrew-cask,mjgardner/homebrew-cask,Keloran/homebrew-cask,xyb/homebrew-cask,kkdd/homebrew-cask,reitermarkus/homebrew-cask,decrement/homebrew-cask,klane/homebrew-cask,faun/homebrew-cask,yutarody/homebrew-cask,psibre/homebrew-cask,alexg0/homebrew-cask,aguynamedryan/homebrew-cask,mingzhi22/homebrew-cask,nathancahill/homebrew-cask,thii/homebrew-cask,Fedalto/homebrew-cask,FredLackeyOfficial/homebrew-cask,larseggert/homebrew-cask,xyb/homebrew-cask,victorpopkov/homebrew-cask,julionc/homebrew-cask,miku/homebrew-cask,gerrypower/homebrew-cask,ebraminio/homebrew-cask,n0ts/homebrew-cask,ptb/homebrew-cask,rajiv/homebrew-cask,Keloran/homebrew-cask,tedbundyjr/homebrew-cask,blogabe/homebrew-cask,AnastasiaSulyagina/homebrew-cask,yuhki50/homebrew-cask,JosephViolago/homebrew-cask,tarwich/homebrew-cask,brianshumate/homebrew-cask,6uclz1/homebrew-cask,jiashuw/homebrew-cask,greg5green/homebrew-cask,afh/homebrew-cask,mikem/homebrew-cask,imgarylai/homebrew-cask,pkq/homebrew-cask,hyuna917/homebrew-cask,Fedalto/homebrew-cask,jhowtan/homebrew-cask,RJHsiao/homebrew-cask,thii/homebrew-cask,imgarylai/homebrew-cask,flaviocamilo/homebrew-cask,Amorymeltzer/homebrew-cask,williamboman/homebrew-cask,dictcp/homebrew-cask,tan9/homebrew-cask,gilesdring/homebrew-cask,mishari/homebrew-cask,shoichiaizawa/homebrew-cask,rogeriopradoj/homebrew-cask,malob/homebrew-cask,cedwardsmedia/homebrew-cask,caskroom/homebrew-cask,jawshooah/homebrew-cask,josa42/homebrew-cask,My2ndAngelic/homebrew-cask,wastrachan/homebrew-cask,zmwangx/homebrew-cask,inz/homebrew-cask,bric3/homebrew-cask,tedbundyjr/homebrew-cask,gurghet/homebrew-cask,otaran/homebrew-cask,julionc/homebrew-cask,wickles/homebrew-cask,bosr/homebrew-cask,tarwich/homebrew-cask,elyscape/homebrew-cask,wKovacs64/homebrew-cask,ksato9700/homebrew-cask,gmkey/homebrew-cask,nshemonsky/homebrew-cask,blainesch/homebrew-cask,dustinblackman/homebrew-cask,markhuber/homebrew-cask,schneidmaster/homebrew-cask,renaudguerin/homebrew-cask,ianyh/homebrew-cask,reitermarkus/homebrew-cask,cfillion/homebrew-cask,ddm/homebrew-cask,shonjir/homebrew-cask,daften/homebrew-cask,esebastian/homebrew-cask,nightscape/homebrew-cask,antogg/homebrew-cask,adrianchia/homebrew-cask,fharbe/homebrew-cask,helloIAmPau/homebrew-cask,chrisfinazzo/homebrew-cask,yurikoles/homebrew-cask,zerrot/homebrew-cask,Labutin/homebrew-cask,andrewdisley/homebrew-cask,inta/homebrew-cask,crzrcn/homebrew-cask,antogg/homebrew-cask,tangestani/homebrew-cask,jeroenj/homebrew-cask,RJHsiao/homebrew-cask,danielbayley/homebrew-cask,jmeridth/homebrew-cask,Bombenleger/homebrew-cask,dwihn0r/homebrew-cask,squid314/homebrew-cask,lukasbestle/homebrew-cask,tsparber/homebrew-cask,moimikey/homebrew-cask,kassi/homebrew-cask,nathancahill/homebrew-cask,jeroenseegers/homebrew-cask,MichaelPei/homebrew-cask,sgnh/homebrew-cask,sebcode/homebrew-cask,tangestani/homebrew-cask,rogeriopradoj/homebrew-cask,franklouwers/homebrew-cask,markhuber/homebrew-cask,diguage/homebrew-cask,elnappo/homebrew-cask,sebcode/homebrew-cask,xight/homebrew-cask,jconley/homebrew-cask,MircoT/homebrew-cask,MoOx/homebrew-cask,danielbayley/homebrew-cask,pacav69/homebrew-cask,puffdad/homebrew-cask,nathansgreen/homebrew-cask,codeurge/homebrew-cask,hyuna917/homebrew-cask,seanorama/homebrew-cask,williamboman/homebrew-cask,haha1903/homebrew-cask,reelsense/homebrew-cask,jbeagley52/homebrew-cask,jppelteret/homebrew-cask,howie/homebrew-cask,johndbritton/homebrew-cask,doits/homebrew-cask,zmwangx/homebrew-cask,MichaelPei/homebrew-cask,phpwutz/homebrew-cask,rogeriopradoj/homebrew-cask,andrewdisley/homebrew-cask,joshka/homebrew-cask,0rax/homebrew-cask,deanmorin/homebrew-cask,psibre/homebrew-cask,sscotth/homebrew-cask,cliffcotino/homebrew-cask,albertico/homebrew-cask,sanyer/homebrew-cask,fanquake/homebrew-cask,FranklinChen/homebrew-cask,JacopKane/homebrew-cask,jasmas/homebrew-cask,theoriginalgri/homebrew-cask,deiga/homebrew-cask,santoshsahoo/homebrew-cask,ebraminio/homebrew-cask,kronicd/homebrew-cask,corbt/homebrew-cask,leipert/homebrew-cask,sohtsuka/homebrew-cask,devmynd/homebrew-cask,deanmorin/homebrew-cask,jalaziz/homebrew-cask,boecko/homebrew-cask,kteru/homebrew-cask,flaviocamilo/homebrew-cask,mjgardner/homebrew-cask,hakamadare/homebrew-cask,jellyfishcoder/homebrew-cask,axodys/homebrew-cask,devmynd/homebrew-cask,jaredsampson/homebrew-cask,pkq/homebrew-cask,andyli/homebrew-cask,xyb/homebrew-cask,linc01n/homebrew-cask,scottsuch/homebrew-cask,amatos/homebrew-cask,cobyism/homebrew-cask,robertgzr/homebrew-cask,axodys/homebrew-cask,MerelyAPseudonym/homebrew-cask,mazehall/homebrew-cask,mjdescy/homebrew-cask,doits/homebrew-cask,mikem/homebrew-cask,hellosky806/homebrew-cask,elyscape/homebrew-cask,tedski/homebrew-cask,ericbn/homebrew-cask,maxnordlund/homebrew-cask,vitorgalvao/homebrew-cask,zerrot/homebrew-cask,skatsuta/homebrew-cask,larseggert/homebrew-cask,wmorin/homebrew-cask,bcomnes/homebrew-cask,perfide/homebrew-cask,MoOx/homebrew-cask,coeligena/homebrew-customized,kesara/homebrew-cask,exherb/homebrew-cask,ksylvan/homebrew-cask,lumaxis/homebrew-cask,renard/homebrew-cask,scribblemaniac/homebrew-cask,JosephViolago/homebrew-cask,malob/homebrew-cask,hakamadare/homebrew-cask,theoriginalgri/homebrew-cask,okket/homebrew-cask,sscotth/homebrew-cask,lifepillar/homebrew-cask,stigkj/homebrew-caskroom-cask,jalaziz/homebrew-cask,singingwolfboy/homebrew-cask,y00rb/homebrew-cask,bcomnes/homebrew-cask,asins/homebrew-cask,cedwardsmedia/homebrew-cask,dcondrey/homebrew-cask,Ngrd/homebrew-cask,hellosky806/homebrew-cask,alebcay/homebrew-cask,miguelfrde/homebrew-cask,SentinelWarren/homebrew-cask,buo/homebrew-cask,ninjahoahong/homebrew-cask,gibsjose/homebrew-cask,seanzxx/homebrew-cask,y00rb/homebrew-cask,josa42/homebrew-cask,asbachb/homebrew-cask,yumitsu/homebrew-cask,bric3/homebrew-cask,napaxton/homebrew-cask,shonjir/homebrew-cask,dcondrey/homebrew-cask,mchlrmrz/homebrew-cask,dwkns/homebrew-cask,greg5green/homebrew-cask,Gasol/homebrew-cask,feigaochn/homebrew-cask,blogabe/homebrew-cask,kamilboratynski/homebrew-cask,winkelsdorf/homebrew-cask,stephenwade/homebrew-cask,ericbn/homebrew-cask,dwkns/homebrew-cask,Gasol/homebrew-cask,jeanregisser/homebrew-cask,jangalinski/homebrew-cask,deiga/homebrew-cask,stonehippo/homebrew-cask,Ephemera/homebrew-cask,slack4u/homebrew-cask,wickles/homebrew-cask,mjgardner/homebrew-cask,gyndav/homebrew-cask,codeurge/homebrew-cask,dictcp/homebrew-cask,markthetech/homebrew-cask,michelegera/homebrew-cask,mattrobenolt/homebrew-cask,a1russell/homebrew-cask,retbrown/homebrew-cask,sscotth/homebrew-cask,jeroenj/homebrew-cask,yumitsu/homebrew-cask,colindunn/homebrew-cask,otaran/homebrew-cask,fharbe/homebrew-cask,haha1903/homebrew-cask,JosephViolago/homebrew-cask,tedski/homebrew-cask,caskroom/homebrew-cask,ksylvan/homebrew-cask,tolbkni/homebrew-cask,robertgzr/homebrew-cask,tyage/homebrew-cask,diogodamiani/homebrew-cask,gerrypower/homebrew-cask,stevehedrick/homebrew-cask,andrewdisley/homebrew-cask,mrmachine/homebrew-cask,opsdev-ws/homebrew-cask,wmorin/homebrew-cask,Labutin/homebrew-cask,josa42/homebrew-cask,mlocher/homebrew-cask,0rax/homebrew-cask,timsutton/homebrew-cask,brianshumate/homebrew-cask,bdhess/homebrew-cask,jppelteret/homebrew-cask,wKovacs64/homebrew-cask,guerrero/homebrew-cask,farmerchris/homebrew-cask,shoichiaizawa/homebrew-cask,renaudguerin/homebrew-cask,Amorymeltzer/homebrew-cask,kpearson/homebrew-cask,inz/homebrew-cask,Ephemera/homebrew-cask,claui/homebrew-cask,lcasey001/homebrew-cask,coeligena/homebrew-customized,dvdoliveira/homebrew-cask,hristozov/homebrew-cask,paour/homebrew-cask,kkdd/homebrew-cask,nathanielvarona/homebrew-cask,sanyer/homebrew-cask,johnjelinek/homebrew-cask,wickedsp1d3r/homebrew-cask,tjnycum/homebrew-cask,6uclz1/homebrew-cask,xtian/homebrew-cask,xight/homebrew-cask,pacav69/homebrew-cask,xcezx/homebrew-cask,tjt263/homebrew-cask,stephenwade/homebrew-cask,janlugt/homebrew-cask,shoichiaizawa/homebrew-cask,imgarylai/homebrew-cask,ldong/homebrew-cask,yutarody/homebrew-cask,stevehedrick/homebrew-cask,alebcay/homebrew-cask,optikfluffel/homebrew-cask,xakraz/homebrew-cask,kamilboratynski/homebrew-cask,santoshsahoo/homebrew-cask,Cottser/homebrew-cask,asins/homebrew-cask,aguynamedryan/homebrew-cask,usami-k/homebrew-cask,decrement/homebrew-cask,hristozov/homebrew-cask,mhubig/homebrew-cask,xakraz/homebrew-cask,lukeadams/homebrew-cask,diogodamiani/homebrew-cask,vitorgalvao/homebrew-cask,miccal/homebrew-cask,jpmat296/homebrew-cask,linc01n/homebrew-cask,mchlrmrz/homebrew-cask,FredLackeyOfficial/homebrew-cask,jeroenseegers/homebrew-cask,cblecker/homebrew-cask,michelegera/homebrew-cask,alexg0/homebrew-cask,hanxue/caskroom,farmerchris/homebrew-cask,MircoT/homebrew-cask,sohtsuka/homebrew-cask,FinalDes/homebrew-cask,nathanielvarona/homebrew-cask,ericbn/homebrew-cask,koenrh/homebrew-cask,jgarber623/homebrew-cask,samnung/homebrew-cask,rajiv/homebrew-cask,arronmabrey/homebrew-cask,mjdescy/homebrew-cask,schneidmaster/homebrew-cask,muan/homebrew-cask,jhowtan/homebrew-cask,athrunsun/homebrew-cask,faun/homebrew-cask,malford/homebrew-cask,rickychilcott/homebrew-cask,kTitan/homebrew-cask,My2ndAngelic/homebrew-cask,thomanq/homebrew-cask,chrisfinazzo/homebrew-cask,colindean/homebrew-cask,n8henrie/homebrew-cask,dustinblackman/homebrew-cask,casidiablo/homebrew-cask,miccal/homebrew-cask,kongslund/homebrew-cask,ianyh/homebrew-cask,scribblemaniac/homebrew-cask,kassi/homebrew-cask,daften/homebrew-cask,malob/homebrew-cask,alebcay/homebrew-cask,wastrachan/homebrew-cask,kTitan/homebrew-cask,CameronGarrett/homebrew-cask,tan9/homebrew-cask,inta/homebrew-cask,optikfluffel/homebrew-cask,cblecker/homebrew-cask,usami-k/homebrew-cask,dvdoliveira/homebrew-cask,jpmat296/homebrew-cask,rajiv/homebrew-cask,maxnordlund/homebrew-cask,uetchy/homebrew-cask,ywfwj2008/homebrew-cask,wmorin/homebrew-cask,chrisfinazzo/homebrew-cask,deiga/homebrew-cask,asbachb/homebrew-cask,riyad/homebrew-cask,crzrcn/homebrew-cask,Ibuprofen/homebrew-cask,Ngrd/homebrew-cask,klane/homebrew-cask,uetchy/homebrew-cask,boecko/homebrew-cask,gabrielizaias/homebrew-cask,joschi/homebrew-cask,BenjaminHCCarr/homebrew-cask,moimikey/homebrew-cask,yurikoles/homebrew-cask,JikkuJose/homebrew-cask,janlugt/homebrew-cask,0xadada/homebrew-cask,Ibuprofen/homebrew-cask,xight/homebrew-cask,CameronGarrett/homebrew-cask,stonehippo/homebrew-cask,jacobbednarz/homebrew-cask,giannitm/homebrew-cask,napaxton/homebrew-cask,onlynone/homebrew-cask,winkelsdorf/homebrew-cask,kesara/homebrew-cask,vin047/homebrew-cask,JacopKane/homebrew-cask,thomanq/homebrew-cask,malford/homebrew-cask,sanchezm/homebrew-cask,FranklinChen/homebrew-cask,mindriot101/homebrew-cask,ptb/homebrew-cask,joschi/homebrew-cask,kteru/homebrew-cask,mahori/homebrew-cask,patresi/homebrew-cask,artdevjs/homebrew-cask,Amorymeltzer/homebrew-cask,mchlrmrz/homebrew-cask,retbrown/homebrew-cask,uetchy/homebrew-cask,bric3/homebrew-cask,jgarber623/homebrew-cask,m3nu/homebrew-cask,nrlquaker/homebrew-cask,blogabe/homebrew-cask,syscrusher/homebrew-cask,retrography/homebrew-cask | ruby | ## Code Before:
cask :v1 => 'tg-pro' do
version '2.8.7'
sha256 '32b622cec40f4cfe0cd5455dd110696b8284524aadd92c552a769c130bbc88e7'
url "http://www.tunabellysoftware.com/resources/TGPro_#{version.gsub('.','_')}.zip"
name 'TG Pro'
appcast 'http://tunabellysoftware.com/resources/sparkle/tgpro/profileInfo.php',
:sha256 => 'e276cc14d86471bc7c416faefc7e8bcffe94da4458c87c71c2f14287414df5fa'
homepage 'http://www.tunabellysoftware.com/tgpro/'
license :commercial
app 'TG Pro.app'
end
## Instruction:
Update TG Pro to 2.8.8
## Code After:
cask :v1 => 'tg-pro' do
version '2.8.8'
sha256 'dcb4221d4b72960c306e248a0be947107ab3622b0c38570684b2f12c8ef87a44'
url "http://www.tunabellysoftware.com/resources/TGPro_#{version.gsub('.','_')}.zip"
name 'TG Pro'
appcast 'http://tunabellysoftware.com/resources/sparkle/tgpro/profileInfo.php',
:sha256 => 'ae55143d14a7a75093439c723db19e8672952efff4e38de0e0682a5037c455de'
homepage 'http://www.tunabellysoftware.com/tgpro/'
license :commercial
app 'TG Pro.app'
end
|
29e6ce4468d7d33cbdfb39baff5e4d47c838ea4c | README.md | README.md | This repository contains the source code for three nuget packages:
### [RockLib.Configuration](RockLib.Configuration) [](https://ci.appveyor.com/project/RockLib/rocklib-configuration-9b1x8)
Defines a static `Config` class as a general replacement for the old .NET Framework `ConfigurationManager` class.
### [RockLib.Configuration.ObjectFactory](RockLib.Configuration.ObjectFactory) [](https://ci.appveyor.com/project/RockLib/rocklib-configuration-qcxxq)
An alternative to `Microsoft.Extensions.Configuration.Binder` that supports non-default constructors and other features commonly found in JSON and XML serializers.
### [RockLib.Configuration.ProxyFactory](RockLib.Configuration.ProxyFactory) [](https://ci.appveyor.com/project/RockLib/rocklib-configuration-u6yve)
A factory that creates instances of property-only interfaces, defined at run-time, and populated with values defined in an instance of `IConfiguration`.
| This repository contains the source code for four nuget packages:
### [RockLib.Configuration](RockLib.Configuration) [](https://ci.appveyor.com/project/RockLib/rocklib-configuration-9b1x8)
Defines a static `Config` class as a general replacement for the old .NET Framework `ConfigurationManager` class.
### [RockLib.Configuration.ObjectFactory](RockLib.Configuration.ObjectFactory) [](https://ci.appveyor.com/project/RockLib/rocklib-configuration-qcxxq)
An alternative to `Microsoft.Extensions.Configuration.Binder` that supports non-default constructors and other features commonly found in JSON and XML serializers.
### [RockLib.Configuration.ProxyFactory](RockLib.Configuration.ProxyFactory) [](https://ci.appveyor.com/project/RockLib/rocklib-configuration-u6yve)
A factory that creates instances of property-only interfaces, defined at run-time, and populated with values defined in an instance of `IConfiguration`.
### [RockLib.Configuration.MessagingProvider](RockLib.Configuration.MessagingProvider) [](https://ci.appveyor.com/project/RockLib/rocklib-configuration)
A configuration provider that reloads when it receives a message containing configuration changes from a `RockLib.Messaging.IReceiver`.
| Add messaging provider to main readme | Add messaging provider to main readme
| Markdown | mit | RockFramework/RockLib.Configuration | markdown | ## Code Before:
This repository contains the source code for three nuget packages:
### [RockLib.Configuration](RockLib.Configuration) [](https://ci.appveyor.com/project/RockLib/rocklib-configuration-9b1x8)
Defines a static `Config` class as a general replacement for the old .NET Framework `ConfigurationManager` class.
### [RockLib.Configuration.ObjectFactory](RockLib.Configuration.ObjectFactory) [](https://ci.appveyor.com/project/RockLib/rocklib-configuration-qcxxq)
An alternative to `Microsoft.Extensions.Configuration.Binder` that supports non-default constructors and other features commonly found in JSON and XML serializers.
### [RockLib.Configuration.ProxyFactory](RockLib.Configuration.ProxyFactory) [](https://ci.appveyor.com/project/RockLib/rocklib-configuration-u6yve)
A factory that creates instances of property-only interfaces, defined at run-time, and populated with values defined in an instance of `IConfiguration`.
## Instruction:
Add messaging provider to main readme
## Code After:
This repository contains the source code for four nuget packages:
### [RockLib.Configuration](RockLib.Configuration) [](https://ci.appveyor.com/project/RockLib/rocklib-configuration-9b1x8)
Defines a static `Config` class as a general replacement for the old .NET Framework `ConfigurationManager` class.
### [RockLib.Configuration.ObjectFactory](RockLib.Configuration.ObjectFactory) [](https://ci.appveyor.com/project/RockLib/rocklib-configuration-qcxxq)
An alternative to `Microsoft.Extensions.Configuration.Binder` that supports non-default constructors and other features commonly found in JSON and XML serializers.
### [RockLib.Configuration.ProxyFactory](RockLib.Configuration.ProxyFactory) [](https://ci.appveyor.com/project/RockLib/rocklib-configuration-u6yve)
A factory that creates instances of property-only interfaces, defined at run-time, and populated with values defined in an instance of `IConfiguration`.
### [RockLib.Configuration.MessagingProvider](RockLib.Configuration.MessagingProvider) [](https://ci.appveyor.com/project/RockLib/rocklib-configuration)
A configuration provider that reloads when it receives a message containing configuration changes from a `RockLib.Messaging.IReceiver`.
|
1dcc2cd804c1980d0d198d8cdae8e5496cabdfd4 | index.js | index.js | 'use strict';
var fs = require('fs'),
jshintPlugin = require('gulp-jshint'),
cache = require('gulp-cache');
var jshintVersion = '0.2.4';
var task = {
// Allow the original if needed.
original: jshintPlugin,
// Or, the cached version
cached: function (opt) {
var jshintOpts;
if (typeof opt === 'string') {
jshintOpts = fs.readFileSync(opt);
} else {
jshintOpts = JSON.stringify(opt);
}
return cache.proxy('jshint', {
task: jshintPlugin(opt),
key: function (file) {
return [file.contents.toString('utf8'), jshintVersion, jshintOpts].join('');
},
success: function (jshintedFile) {
return jshintedFile.jshint.success;
},
value: function (jshintedFile) {
var result = {
jshint: {
success: jshintedFile.jshint.success,
cached: true
}
};
return result;
}
});
}
};
// Allow the fileCache to be modified externally
task.cached.fileCache = cache.fileCache;
module.exports = task; | 'use strict';
var fs = require('fs'),
jshintPlugin = require('gulp-jshint'),
cache = require('gulp-cache');
var jshintVersion = '0.2.4';
// Add on to the original plugin
jshintPlugin.cached = function (opt) {
var jshintOpts;
if (typeof opt === 'string') {
jshintOpts = fs.readFileSync(opt);
} else {
jshintOpts = JSON.stringify(opt);
}
return cache.proxy('jshint', {
task: jshintPlugin(opt),
key: function (file) {
return [file.contents.toString('utf8'), jshintVersion, jshintOpts].join('');
},
success: function (jshintedFile) {
return jshintedFile.jshint.success;
},
value: function (jshintedFile) {
var result = {
jshint: {
success: jshintedFile.jshint.success,
cached: true
}
};
return result;
}
});
};
// Allow the fileCache to be modified externally
jshintPlugin.cached.fileCache = cache.fileCache;
module.exports = jshintPlugin; | Make sure we match the original API for reporter use | Make sure we match the original API for reporter use
| JavaScript | mit | jgable/gulp-jshint-cached | javascript | ## Code Before:
'use strict';
var fs = require('fs'),
jshintPlugin = require('gulp-jshint'),
cache = require('gulp-cache');
var jshintVersion = '0.2.4';
var task = {
// Allow the original if needed.
original: jshintPlugin,
// Or, the cached version
cached: function (opt) {
var jshintOpts;
if (typeof opt === 'string') {
jshintOpts = fs.readFileSync(opt);
} else {
jshintOpts = JSON.stringify(opt);
}
return cache.proxy('jshint', {
task: jshintPlugin(opt),
key: function (file) {
return [file.contents.toString('utf8'), jshintVersion, jshintOpts].join('');
},
success: function (jshintedFile) {
return jshintedFile.jshint.success;
},
value: function (jshintedFile) {
var result = {
jshint: {
success: jshintedFile.jshint.success,
cached: true
}
};
return result;
}
});
}
};
// Allow the fileCache to be modified externally
task.cached.fileCache = cache.fileCache;
module.exports = task;
## Instruction:
Make sure we match the original API for reporter use
## Code After:
'use strict';
var fs = require('fs'),
jshintPlugin = require('gulp-jshint'),
cache = require('gulp-cache');
var jshintVersion = '0.2.4';
// Add on to the original plugin
jshintPlugin.cached = function (opt) {
var jshintOpts;
if (typeof opt === 'string') {
jshintOpts = fs.readFileSync(opt);
} else {
jshintOpts = JSON.stringify(opt);
}
return cache.proxy('jshint', {
task: jshintPlugin(opt),
key: function (file) {
return [file.contents.toString('utf8'), jshintVersion, jshintOpts].join('');
},
success: function (jshintedFile) {
return jshintedFile.jshint.success;
},
value: function (jshintedFile) {
var result = {
jshint: {
success: jshintedFile.jshint.success,
cached: true
}
};
return result;
}
});
};
// Allow the fileCache to be modified externally
jshintPlugin.cached.fileCache = cache.fileCache;
module.exports = jshintPlugin; |
ad687eb1b58290ab222ee5ff67bee97aa10af385 | _sass/_about.scss | _sass/_about.scss | .about {
.col-md-4 > div {
position: relative;
overflow: hidden;
padding-bottom: 100%;
img {
position:absolute;
width: 100%;
}
}
}
| .about {
.col-md-4 > div {
position: relative;
overflow: hidden;
padding-bottom: 100%;
img {
position:absolute;
min-width: 100%;
min-height: 100%;
}
}
}
| Make sure images are in a square dimension | Make sure images are in a square dimension
| SCSS | mit | josephmilla/makergirl,makergirl/makergirl.github.io,makergirl/makergirl.github.io,josephmilla/makergirl,josephmilla/makergirl,makergirl/makergirl.github.io | scss | ## Code Before:
.about {
.col-md-4 > div {
position: relative;
overflow: hidden;
padding-bottom: 100%;
img {
position:absolute;
width: 100%;
}
}
}
## Instruction:
Make sure images are in a square dimension
## Code After:
.about {
.col-md-4 > div {
position: relative;
overflow: hidden;
padding-bottom: 100%;
img {
position:absolute;
min-width: 100%;
min-height: 100%;
}
}
}
|
9d00b89171fba454eeb65dda16292fd510b3f1a0 | packages/node_modules/@ciscospark/widget-message/src/utils.js | packages/node_modules/@ciscospark/widget-message/src/utils.js | import {base64} from '@ciscospark/common';
export function constructActivityEventData(activity) {
const hydraActivity = Object.assign({}, activity);
// Base64 encode uuids
hydraActivity.id = base64.encode(`ciscospark://us/MESSAGE/${activity.id}`);
hydraActivity.roomId = base64.encode(`ciscospark://us/ROOM/${activity.target.id}`);
hydraActivity.target.id = base64.encode(`ciscospark://us/ROOM/${activity.target.id}`);
return hydraActivity;
}
| import {base64} from '@ciscospark/common';
export function constructActivityEventData(activity) {
const hydraActivity = Object.assign({}, activity);
// Base64 encode uuids
hydraActivity.id = base64.encode(`ciscospark://us/MESSAGE/${activity.id}`);
hydraActivity.roomId = base64.encode(`ciscospark://us/ROOM/${activity.target.id}`);
hydraActivity.target = Object.assign({}, activity.target, {
id: base64.encode(`ciscospark://us/ROOM/${activity.target.id}`)
});
return hydraActivity;
}
| Fix overwriting issue with target id | fix(widget-message): Fix overwriting issue with target id
| JavaScript | mit | Altocloud/alto-react-ciscospark,adamweeks/react-ciscospark-1,Altocloud/alto-react-ciscospark,adamweeks/react-ciscospark-1,bzang/react-ciscospark,adamweeks/react-ciscospark-1,ciscospark/react-ciscospark,bzang/react-ciscospark,ciscospark/react-ciscospark,Altocloud/alto-react-ciscospark,bzang/react-ciscospark,ciscospark/react-ciscospark | javascript | ## Code Before:
import {base64} from '@ciscospark/common';
export function constructActivityEventData(activity) {
const hydraActivity = Object.assign({}, activity);
// Base64 encode uuids
hydraActivity.id = base64.encode(`ciscospark://us/MESSAGE/${activity.id}`);
hydraActivity.roomId = base64.encode(`ciscospark://us/ROOM/${activity.target.id}`);
hydraActivity.target.id = base64.encode(`ciscospark://us/ROOM/${activity.target.id}`);
return hydraActivity;
}
## Instruction:
fix(widget-message): Fix overwriting issue with target id
## Code After:
import {base64} from '@ciscospark/common';
export function constructActivityEventData(activity) {
const hydraActivity = Object.assign({}, activity);
// Base64 encode uuids
hydraActivity.id = base64.encode(`ciscospark://us/MESSAGE/${activity.id}`);
hydraActivity.roomId = base64.encode(`ciscospark://us/ROOM/${activity.target.id}`);
hydraActivity.target = Object.assign({}, activity.target, {
id: base64.encode(`ciscospark://us/ROOM/${activity.target.id}`)
});
return hydraActivity;
}
|
fb7853d14890afba8cba4e4ee7d1dad1acecec29 | .travis.yml | .travis.yml | dist: xenial
language: clojure
sudo: required
lein: lein
jdk:
- openjdk8
services:
- docker
branches:
only:
- master
- wip
- develop
install:
- ./manage.sh help
- nvm install $NODE_VERSION
- node --version
before_script:
- env | sort
script:
- ./manage.sh $COMMAND
after_script:
- docker images
- docker ps
notifications:
email: false
env:
- NODE_VERSION=7.7.1 COMMAND=build
- NODE_VERSION=7.7.1 COMMAND=test
- NODE_VERSION=7.7.1 COMMAND=release-local
- NODE_VERSION=7.7.1 COMMAND=release-docker
- NODE_VERSION=7.7.1 COMMAND=run-release
- NODE_VERSION=8.15.0 COMMAND=build
- NODE_VERSION=8.15.0 COMMAND=test
- NODE_VERSION=8.15.0 COMMAND=release-local
- NODE_VERSION=8.15.0 COMMAND=release-docker
- NODE_VERSION=8.15.0 COMMAND=run-release
| dist: xenial
language: clojure
sudo: required
lein: lein
jdk:
- openjdk8
services:
- docker
branches:
only:
- master
- wip
- develop
- build-refactor
install:
- ./manage.sh help
- nvm install $NODE_VERSION
- node --version
before_script:
- env | sort
script:
- ./manage.sh $COMMAND
after_script:
- docker images
- docker ps
notifications:
email: false
env:
- NODE_VERSION=10.16.0 COMMAND=clean
- NODE_VERSION=10.16.0 COMMAND=build-devenv
#- NODE_VERSION=10.16.0 COMMAND=run-devenv
#- NODE_VERSION=10.16.0 COMMAND=test
- NODE_VERSION=10.16.0 COMMAND=build-release
#- NODE_VERSION=10.16.0 COMMAND=run-release
| Update CI node and tasks | :construction_worker: Update CI node and tasks
| YAML | mpl-2.0 | uxbox/uxbox,uxbox/uxbox,uxbox/uxbox | yaml | ## Code Before:
dist: xenial
language: clojure
sudo: required
lein: lein
jdk:
- openjdk8
services:
- docker
branches:
only:
- master
- wip
- develop
install:
- ./manage.sh help
- nvm install $NODE_VERSION
- node --version
before_script:
- env | sort
script:
- ./manage.sh $COMMAND
after_script:
- docker images
- docker ps
notifications:
email: false
env:
- NODE_VERSION=7.7.1 COMMAND=build
- NODE_VERSION=7.7.1 COMMAND=test
- NODE_VERSION=7.7.1 COMMAND=release-local
- NODE_VERSION=7.7.1 COMMAND=release-docker
- NODE_VERSION=7.7.1 COMMAND=run-release
- NODE_VERSION=8.15.0 COMMAND=build
- NODE_VERSION=8.15.0 COMMAND=test
- NODE_VERSION=8.15.0 COMMAND=release-local
- NODE_VERSION=8.15.0 COMMAND=release-docker
- NODE_VERSION=8.15.0 COMMAND=run-release
## Instruction:
:construction_worker: Update CI node and tasks
## Code After:
dist: xenial
language: clojure
sudo: required
lein: lein
jdk:
- openjdk8
services:
- docker
branches:
only:
- master
- wip
- develop
- build-refactor
install:
- ./manage.sh help
- nvm install $NODE_VERSION
- node --version
before_script:
- env | sort
script:
- ./manage.sh $COMMAND
after_script:
- docker images
- docker ps
notifications:
email: false
env:
- NODE_VERSION=10.16.0 COMMAND=clean
- NODE_VERSION=10.16.0 COMMAND=build-devenv
#- NODE_VERSION=10.16.0 COMMAND=run-devenv
#- NODE_VERSION=10.16.0 COMMAND=test
- NODE_VERSION=10.16.0 COMMAND=build-release
#- NODE_VERSION=10.16.0 COMMAND=run-release
|
e0f5f51f921e9ad819605168993736ca9ea4c78a | app/views/shopping_shared/_groups.html.haml | app/views/shopping_shared/_groups.html.haml | .content
.row
.small-12.columns
%h5
=current_distributor.name
belongs to:
%ul.bullet-list
- for group in current_distributor.groups
%li
%a{href: main_app.groups_path(anchor: "#/#group#{group.id}")}= group.name
| .content
.row
.small-12.columns.panel
%h5
=current_distributor.name
is part of:
%ul.bullet-list
- for group in current_distributor.groups
%li
%a{href: main_app.groups_path(anchor: "#/#group#{group.id}")}= group.name
| Tweak markup for GROUPS tab on mini menu | Tweak markup for GROUPS tab on mini menu
| Haml | agpl-3.0 | folklabs/openfoodnetwork,Matt-Yorkley/openfoodnetwork,stveep/openfoodnetwork,KosenkoDmitriy/openfoodnetwork,mkllnk/openfoodnetwork,KateDavis/openfoodnetwork,stveep/openfoodnetwork,openfoodfoundation/openfoodnetwork,KateDavis/openfoodnetwork,ecocitycore/openfoodnetwork,MikeiLL/openfoodnetwork,KateDavis/openfoodnetwork,openfoodfoundation/openfoodnetwork,oeoeaio/openfoodnetwork,Matt-Yorkley/openfoodnetwork,RohanM/openfoodnetwork,MikeiLL/openfoodnetwork,Matt-Yorkley/openfoodnetwork,KateDavis/openfoodnetwork,mkllnk/openfoodnetwork,ltrls/openfoodnetwork,openfoodfoundation/openfoodnetwork,MikeiLL/openfoodnetwork,RohanM/openfoodnetwork,Em-AK/openfoodnetwork,KosenkoDmitriy/openfoodnetwork,Em-AK/openfoodnetwork,KosenkoDmitriy/openfoodnetwork,oeoeaio/openfoodnetwork,folklabs/openfoodnetwork,levent/openfoodnetwork,MikeiLL/openfoodnetwork,folklabs/openfoodnetwork,folklabs/openfoodnetwork,oeoeaio/openfoodnetwork,lin-d-hop/openfoodnetwork,RohanM/openfoodnetwork,Em-AK/openfoodnetwork,ecocitycore/openfoodnetwork,mkllnk/openfoodnetwork,RohanM/openfoodnetwork,levent/openfoodnetwork,mkllnk/openfoodnetwork,stveep/openfoodnetwork,Matt-Yorkley/openfoodnetwork,lin-d-hop/openfoodnetwork,lin-d-hop/openfoodnetwork,oeoeaio/openfoodnetwork,levent/openfoodnetwork,levent/openfoodnetwork,ltrls/openfoodnetwork,ecocitycore/openfoodnetwork,ecocitycore/openfoodnetwork,stveep/openfoodnetwork,ltrls/openfoodnetwork,KosenkoDmitriy/openfoodnetwork,ltrls/openfoodnetwork,lin-d-hop/openfoodnetwork,openfoodfoundation/openfoodnetwork,Em-AK/openfoodnetwork | haml | ## Code Before:
.content
.row
.small-12.columns
%h5
=current_distributor.name
belongs to:
%ul.bullet-list
- for group in current_distributor.groups
%li
%a{href: main_app.groups_path(anchor: "#/#group#{group.id}")}= group.name
## Instruction:
Tweak markup for GROUPS tab on mini menu
## Code After:
.content
.row
.small-12.columns.panel
%h5
=current_distributor.name
is part of:
%ul.bullet-list
- for group in current_distributor.groups
%li
%a{href: main_app.groups_path(anchor: "#/#group#{group.id}")}= group.name
|
de432cf4817dc4a0599e6992e17844006305a66e | .gitlab-ci.yml | .gitlab-ci.yml | image: jacob9230/linter
before_script:
- npm install --no-package-lock --no-save --quiet stylelint-config-standard stylelint-order stylelint-scss
- node -v
- npm -v
- php -v
code_quality:
script:
- stylelint "./src/assets/styles/**/*.scss"
- eslint "./src/assets/scripts/**/*.js"
- phplint "./src"
| image: jacob9230/linter
before_script:
- npm install --global --quiet eslint stylelint
- npm install --no-package-lock --no-save --quiet stylelint stylelint-config-standard stylelint-order stylelint-scss
- node -v
- npm -v
- php -v
code_quality:
script:
- stylelint "./src/assets/styles/**/*.scss"
- eslint "./src/assets/scripts/**/*.js"
- phplint "./src"
| Install stylelint locally for CI | Install stylelint locally for CI
| YAML | mit | JacobDB/new-site,revxx14/new-site,JacobDB/new-site,revxx14/new-site,JacobDB/new-site | yaml | ## Code Before:
image: jacob9230/linter
before_script:
- npm install --no-package-lock --no-save --quiet stylelint-config-standard stylelint-order stylelint-scss
- node -v
- npm -v
- php -v
code_quality:
script:
- stylelint "./src/assets/styles/**/*.scss"
- eslint "./src/assets/scripts/**/*.js"
- phplint "./src"
## Instruction:
Install stylelint locally for CI
## Code After:
image: jacob9230/linter
before_script:
- npm install --global --quiet eslint stylelint
- npm install --no-package-lock --no-save --quiet stylelint stylelint-config-standard stylelint-order stylelint-scss
- node -v
- npm -v
- php -v
code_quality:
script:
- stylelint "./src/assets/styles/**/*.scss"
- eslint "./src/assets/scripts/**/*.js"
- phplint "./src"
|
c8c0af1357c3fd4377092f19f4dc4a3ea08d09b6 | README.md | README.md | Batch Request
=============
A simple library for batching HTTP requests
[View Documentation](http://batch-request.socialradar.com)
[](https://travis-ci.org/socialradar/batch-request)
## QuickStart
Download via [NPM](http://npmjs.org)
[](https://nodei.co/npm/batch-request/)
then in your app
// Use Batch Request as middleware on an endpoint you want to service batch requests
app.get('/batch', batch);
Optionally use our included middleware to check the validity of your batch request
// Include the batch.validate middleware before batch middleware
app.get('/batch', batch.validate, batch);
And that's it!
Proudly written in Washington, D.C. by:
[](http://socialradar.com)
| Batch Request
=============
A simple library for batching HTTP requests
[View Documentation](http://batch-request.socialradar.com)
[](https://travis-ci.org/socialradar/batch-request) [](http://gruntjs.com/)
## QuickStart
Download via [NPM](http://npmjs.org)
[](https://nodei.co/npm/batch-request/)
then in your app
// Use Batch Request as middleware on an endpoint you want to service batch requests
app.get('/batch', batch);
Optionally use our included middleware to check the validity of your batch request
// Include the batch.validate middleware before batch middleware
app.get('/batch', batch.validate, batch);
And that's it!
Proudly written in Washington, D.C. by:
[](http://socialradar.com)
| Add the Built with Grunt badge! | Add the Built with Grunt badge! | Markdown | mit | socialradar/batch-request,WeeverApps/batch-request,frenchie4111/batch-request | markdown | ## Code Before:
Batch Request
=============
A simple library for batching HTTP requests
[View Documentation](http://batch-request.socialradar.com)
[](https://travis-ci.org/socialradar/batch-request)
## QuickStart
Download via [NPM](http://npmjs.org)
[](https://nodei.co/npm/batch-request/)
then in your app
// Use Batch Request as middleware on an endpoint you want to service batch requests
app.get('/batch', batch);
Optionally use our included middleware to check the validity of your batch request
// Include the batch.validate middleware before batch middleware
app.get('/batch', batch.validate, batch);
And that's it!
Proudly written in Washington, D.C. by:
[](http://socialradar.com)
## Instruction:
Add the Built with Grunt badge!
## Code After:
Batch Request
=============
A simple library for batching HTTP requests
[View Documentation](http://batch-request.socialradar.com)
[](https://travis-ci.org/socialradar/batch-request) [](http://gruntjs.com/)
## QuickStart
Download via [NPM](http://npmjs.org)
[](https://nodei.co/npm/batch-request/)
then in your app
// Use Batch Request as middleware on an endpoint you want to service batch requests
app.get('/batch', batch);
Optionally use our included middleware to check the validity of your batch request
// Include the batch.validate middleware before batch middleware
app.get('/batch', batch.validate, batch);
And that's it!
Proudly written in Washington, D.C. by:
[](http://socialradar.com)
|
253564d368e32bec0507d8959be82f1b15a27e42 | test/thumbs_test.js | test/thumbs_test.js | window.addEventListener('load', function(){
module('touchstart');
test('should use touchstart when touchstart is supported', function() {
assert({ listener:'touchstart', receives:'touchstart' });
});
test('should use mousedown when touchstart is unsupported', function() {
assert({ listener:'touchstart', receives:'mousedown' });
});
module('touchend');
test('should use touchend when touchend is supported', function() {
assert({ listener:'touchend', receives:'touchend' });
});
test('should use mouseup when touchend is unsupported', function() {
assert({ listener:'touchend', receives:'mouseup' });
});
module('touchmove');
test('should use touchmove when touchmove is supported', function() {
assert({ listener:'touchmove', receives:'touchmove' });
});
test('should use mousemove when touchmove is unsupported', function() {
assert({ listener:'touchmove', receives:'mousemove' });
});
module('tap');
test('should use tap when touch events are supported', function() {
ok(false, 'not implemented');
// assert({ listener:'tap', receives:'tap' });
});
test('should use click when tap is unsupported', function() {
assert({ listener:'tap', receives:'click' });
});
});
| window.addEventListener('load', function(){
module('mousedown');
test('should use mousedown', function() {
assert({ listener:'mousedown', receives:'mousedown' });
});
module('mouseup');
test('should use mouseup', function() {
assert({ listener:'mouseup', receives:'mouseup' });
});
module('mousemove');
test('should use mousemove', function() {
assert({ listener:'mousemove', receives:'mousemove' });
});
module('click');
test('should use click', function() {
assert({ listener:'click', receives:'click' });
});
module('touchstart');
test('should use touchstart when touchstart is supported', function() {
assert({ listener:'touchstart', receives:'touchstart' });
});
test('should use mousedown when touchstart is unsupported', function() {
assert({ listener:'touchstart', receives:'mousedown' });
});
module('touchend');
test('should use touchend when touchend is supported', function() {
assert({ listener:'touchend', receives:'touchend' });
});
test('should use mouseup when touchend is unsupported', function() {
assert({ listener:'touchend', receives:'mouseup' });
});
module('touchmove');
test('should use touchmove when touchmove is supported', function() {
assert({ listener:'touchmove', receives:'touchmove' });
});
test('should use mousemove when touchmove is unsupported', function() {
assert({ listener:'touchmove', receives:'mousemove' });
});
module('tap');
test('should use tap when touch events are supported', function() {
ok(false, 'not implemented');
// assert({ listener:'tap', receives:'tap' });
});
test('should use click when tap is unsupported', function() {
assert({ listener:'tap', receives:'click' });
});
});
| Add tests against destroying mouse events. | Add tests against destroying mouse events.
| JavaScript | mit | mwbrooks/thumbs.js,pyrinelaw/thumbs.js,pyrinelaw/thumbs.js | javascript | ## Code Before:
window.addEventListener('load', function(){
module('touchstart');
test('should use touchstart when touchstart is supported', function() {
assert({ listener:'touchstart', receives:'touchstart' });
});
test('should use mousedown when touchstart is unsupported', function() {
assert({ listener:'touchstart', receives:'mousedown' });
});
module('touchend');
test('should use touchend when touchend is supported', function() {
assert({ listener:'touchend', receives:'touchend' });
});
test('should use mouseup when touchend is unsupported', function() {
assert({ listener:'touchend', receives:'mouseup' });
});
module('touchmove');
test('should use touchmove when touchmove is supported', function() {
assert({ listener:'touchmove', receives:'touchmove' });
});
test('should use mousemove when touchmove is unsupported', function() {
assert({ listener:'touchmove', receives:'mousemove' });
});
module('tap');
test('should use tap when touch events are supported', function() {
ok(false, 'not implemented');
// assert({ listener:'tap', receives:'tap' });
});
test('should use click when tap is unsupported', function() {
assert({ listener:'tap', receives:'click' });
});
});
## Instruction:
Add tests against destroying mouse events.
## Code After:
window.addEventListener('load', function(){
module('mousedown');
test('should use mousedown', function() {
assert({ listener:'mousedown', receives:'mousedown' });
});
module('mouseup');
test('should use mouseup', function() {
assert({ listener:'mouseup', receives:'mouseup' });
});
module('mousemove');
test('should use mousemove', function() {
assert({ listener:'mousemove', receives:'mousemove' });
});
module('click');
test('should use click', function() {
assert({ listener:'click', receives:'click' });
});
module('touchstart');
test('should use touchstart when touchstart is supported', function() {
assert({ listener:'touchstart', receives:'touchstart' });
});
test('should use mousedown when touchstart is unsupported', function() {
assert({ listener:'touchstart', receives:'mousedown' });
});
module('touchend');
test('should use touchend when touchend is supported', function() {
assert({ listener:'touchend', receives:'touchend' });
});
test('should use mouseup when touchend is unsupported', function() {
assert({ listener:'touchend', receives:'mouseup' });
});
module('touchmove');
test('should use touchmove when touchmove is supported', function() {
assert({ listener:'touchmove', receives:'touchmove' });
});
test('should use mousemove when touchmove is unsupported', function() {
assert({ listener:'touchmove', receives:'mousemove' });
});
module('tap');
test('should use tap when touch events are supported', function() {
ok(false, 'not implemented');
// assert({ listener:'tap', receives:'tap' });
});
test('should use click when tap is unsupported', function() {
assert({ listener:'tap', receives:'click' });
});
});
|
b669d79d670f9f621be834335e1b8a91dfcaba1e | cdap-docs/developers-manual/source/getting-started/start-stop-cdap.rst | cdap-docs/developers-manual/source/getting-started/start-stop-cdap.rst | .. :author: Cask Data, Inc.
:copyright: Copyright © 2014-2015 Cask Data, Inc.
============================================
Starting and Stopping Standalone CDAP
============================================
.. this file is included in others; any titles need to be +
.. _start-stop-cdap:
.. highlight:: console
Use the ``cdap.sh`` script (or, if you are using Windows, use ``cdap.bat``) to start and
stop the Standalone CDAP (the location will vary depending on where the CDAP SDK is
installed):
.. tabbed-parsed-literal::
$ cd cdap-sdk-|version|
$ ./bin/cdap.sh start
. . .
$ ./bin/cdap.sh stop
.. include:: /_includes/windows-note.txt
Note that starting CDAP is not necessary if you use either the Virtual Machine or the
Docker image, as they both start the Standalone CDAP automatically on startup.
Once CDAP is started successfully, in a web browser you will be able to see the CDAP
UI running at ``http://localhost:9999``, where you can deploy example applications and
interact with CDAP.
Note that in the case of the Docker image, you will need to substitute
the Docker VM's IP address for ``localhost`` in the web browser address bar.
| .. :author: Cask Data, Inc.
:copyright: Copyright © 2014-2015 Cask Data, Inc.
============================================
Starting and Stopping Standalone CDAP
============================================
.. this file is included in others; any titles need to be +
.. _start-stop-cdap:
.. highlight:: console
Use the ``cdap.sh`` script (or, if you are using Windows, use ``cdap.bat``) to start and
stop the Standalone CDAP (the location will vary depending on where the CDAP SDK is
installed):
.. tabbed-parsed-literal::
$ cd cdap-sdk-|version|
$ ./bin/cdap.sh start
. . .
$ ./bin/cdap.sh stop
.. include:: /_includes/windows-note.txt
Note that starting CDAP is not necessary if you use either the Virtual Machine or the
Docker image, as they both start the Standalone CDAP automatically on startup.
Once CDAP is started successfully, in a web browser you will be able to see the CDAP
UI running at ``http://localhost:9999``, where you can deploy example applications and
interact with CDAP.
Note that in the case of the Docker container running inside a Virtual Machine (as on
either Mac OS X or Microsoft Windows), you will need to substitute the Docker VM's IP
address for ``localhost`` in the web browser address bar.
| Correct the explanation of when you need to use a VM's IP address as a substitution. | Correct the explanation of when you need to use a VM's IP address
as a substitution.
| reStructuredText | apache-2.0 | caskdata/cdap,caskdata/cdap,caskdata/cdap,caskdata/cdap,caskdata/cdap,caskdata/cdap | restructuredtext | ## Code Before:
.. :author: Cask Data, Inc.
:copyright: Copyright © 2014-2015 Cask Data, Inc.
============================================
Starting and Stopping Standalone CDAP
============================================
.. this file is included in others; any titles need to be +
.. _start-stop-cdap:
.. highlight:: console
Use the ``cdap.sh`` script (or, if you are using Windows, use ``cdap.bat``) to start and
stop the Standalone CDAP (the location will vary depending on where the CDAP SDK is
installed):
.. tabbed-parsed-literal::
$ cd cdap-sdk-|version|
$ ./bin/cdap.sh start
. . .
$ ./bin/cdap.sh stop
.. include:: /_includes/windows-note.txt
Note that starting CDAP is not necessary if you use either the Virtual Machine or the
Docker image, as they both start the Standalone CDAP automatically on startup.
Once CDAP is started successfully, in a web browser you will be able to see the CDAP
UI running at ``http://localhost:9999``, where you can deploy example applications and
interact with CDAP.
Note that in the case of the Docker image, you will need to substitute
the Docker VM's IP address for ``localhost`` in the web browser address bar.
## Instruction:
Correct the explanation of when you need to use a VM's IP address
as a substitution.
## Code After:
.. :author: Cask Data, Inc.
:copyright: Copyright © 2014-2015 Cask Data, Inc.
============================================
Starting and Stopping Standalone CDAP
============================================
.. this file is included in others; any titles need to be +
.. _start-stop-cdap:
.. highlight:: console
Use the ``cdap.sh`` script (or, if you are using Windows, use ``cdap.bat``) to start and
stop the Standalone CDAP (the location will vary depending on where the CDAP SDK is
installed):
.. tabbed-parsed-literal::
$ cd cdap-sdk-|version|
$ ./bin/cdap.sh start
. . .
$ ./bin/cdap.sh stop
.. include:: /_includes/windows-note.txt
Note that starting CDAP is not necessary if you use either the Virtual Machine or the
Docker image, as they both start the Standalone CDAP automatically on startup.
Once CDAP is started successfully, in a web browser you will be able to see the CDAP
UI running at ``http://localhost:9999``, where you can deploy example applications and
interact with CDAP.
Note that in the case of the Docker container running inside a Virtual Machine (as on
either Mac OS X or Microsoft Windows), you will need to substitute the Docker VM's IP
address for ``localhost`` in the web browser address bar.
|
f00d1759d87049966d40c4df6efa0be62bae2ebf | README.md | README.md | AliceGenerator
===========
Recursively convert existing objects into [Alice](https://github.com/nelmio/alice) Fixtures.
## Why?
Sometimes you find yourself working on a large project with no existing fixtures.
In this case even though Alice makes fixtures much easier to write, that process can still be tedious.
This library proposes an alternate starting point - *automatically generate fixtures from your existing data.*
This opens up a whole new, much faster way to get your test data established... just enter it in your user interface!
## How?
More detail will be added heres shortly.
## Credits
This bundle was developped by [Jeff Way](https://github.com/trappar) with quite a lot of inspiration from:
* [nelmio/alice](https://github.com/nelmio/alice)
* [schmittjoh/serializer](https://github.com/schmittjoh/serializer)
[Other contributors](https://github.com/trappar/AliceGeneratorBundle/graphs/contributors).
## License
[](Resources/meta/LICENSE) | AliceGenerator [](https://travis-ci.org/trappar/AliceGenerator)
===========
Recursively convert existing objects into [Alice](https://github.com/nelmio/alice) Fixtures.
## Why?
Sometimes you find yourself working on a large project with no existing fixtures.
In this case even though Alice makes fixtures much easier to write, that process can still be tedious.
This library proposes an alternate starting point - *automatically generate fixtures from your existing data.*
This opens up a whole new, much faster way to get your test data established... just enter it in your user interface!
## How?
More detail will be added heres shortly.
## Credits
This bundle was developped by [Jeff Way](https://github.com/trappar) with quite a lot of inspiration from:
* [nelmio/alice](https://github.com/nelmio/alice)
* [schmittjoh/serializer](https://github.com/schmittjoh/serializer)
[Other contributors](https://github.com/trappar/AliceGeneratorBundle/graphs/contributors).
## License
[](Resources/meta/LICENSE) | Add build information to readme | Add build information to readme
| Markdown | mit | trappar/AliceGenerator | markdown | ## Code Before:
AliceGenerator
===========
Recursively convert existing objects into [Alice](https://github.com/nelmio/alice) Fixtures.
## Why?
Sometimes you find yourself working on a large project with no existing fixtures.
In this case even though Alice makes fixtures much easier to write, that process can still be tedious.
This library proposes an alternate starting point - *automatically generate fixtures from your existing data.*
This opens up a whole new, much faster way to get your test data established... just enter it in your user interface!
## How?
More detail will be added heres shortly.
## Credits
This bundle was developped by [Jeff Way](https://github.com/trappar) with quite a lot of inspiration from:
* [nelmio/alice](https://github.com/nelmio/alice)
* [schmittjoh/serializer](https://github.com/schmittjoh/serializer)
[Other contributors](https://github.com/trappar/AliceGeneratorBundle/graphs/contributors).
## License
[](Resources/meta/LICENSE)
## Instruction:
Add build information to readme
## Code After:
AliceGenerator [](https://travis-ci.org/trappar/AliceGenerator)
===========
Recursively convert existing objects into [Alice](https://github.com/nelmio/alice) Fixtures.
## Why?
Sometimes you find yourself working on a large project with no existing fixtures.
In this case even though Alice makes fixtures much easier to write, that process can still be tedious.
This library proposes an alternate starting point - *automatically generate fixtures from your existing data.*
This opens up a whole new, much faster way to get your test data established... just enter it in your user interface!
## How?
More detail will be added heres shortly.
## Credits
This bundle was developped by [Jeff Way](https://github.com/trappar) with quite a lot of inspiration from:
* [nelmio/alice](https://github.com/nelmio/alice)
* [schmittjoh/serializer](https://github.com/schmittjoh/serializer)
[Other contributors](https://github.com/trappar/AliceGeneratorBundle/graphs/contributors).
## License
[](Resources/meta/LICENSE) |
6c957de05cc5d353670dd497d52fff19391336b6 | libs-js/_template.js | libs-js/_template.js | function AstronomyLibs_Template_int2Float(number) {
var outvar = parseFloat(number.toString());
return outvar;
} | /*
* AstronomyLibs_Template_int2Float(number)
*
* convert integer to float
*
* @author Markus Popp <[email protected]>
*/
function AstronomyLibs_Template_int2Float(number) {
var outvar = parseFloat(number.toString());
return outvar;
} | Add description to JS template function | Add description to JS template function
| JavaScript | mit | mpopp75/astronomy-libs,mpopp75/astronomy-libs | javascript | ## Code Before:
function AstronomyLibs_Template_int2Float(number) {
var outvar = parseFloat(number.toString());
return outvar;
}
## Instruction:
Add description to JS template function
## Code After:
/*
* AstronomyLibs_Template_int2Float(number)
*
* convert integer to float
*
* @author Markus Popp <[email protected]>
*/
function AstronomyLibs_Template_int2Float(number) {
var outvar = parseFloat(number.toString());
return outvar;
} |
d4119426069fddf507f791c56af9546c5c8a4f76 | src/core/mainMenu.js | src/core/mainMenu.js | import { Menu, MenuItem } from "electron";
export class MainMenu {
constructor(app, tray) {
this.app = app;
this.tray = tray;
}
initialize() {
this.setTrayHandler();
}
setTrayHandler(menu) {
menu = menu || this.buildMenu();
this.tray.setContextMenu(menu);
this.tray.on("right-click", () => {
this.tray.popUpContextMenu();
});
}
buildMenu() {
const menu = new Menu();
const quitItem = new MenuItem({
type: "normal",
label: "Quit",
id: "quit",
accelerator: "Q",
click: (menuItem, browserWindow, event) => {
this.app.quit();
}
});
menu.append(quitItem);
return menu;
}
}
| import { Menu, MenuItem } from "electron";
export class MainMenu {
constructor(app, tray) {
this.app = app;
this.tray = tray;
}
initialize() {
this.setTrayHandler();
}
setTrayHandler() {
this.tray.on("right-click", () => {
this.tray.popUpContextMenu(this.buildMenu());
});
}
buildMenu() {
const menu = new Menu();
const quitItem = new MenuItem({
type: "normal",
label: "Quit",
id: "quit",
accelerator: "Q",
click: (menuItem, browserWindow, event) => {
this.app.quit();
}
});
menu.append(quitItem);
return menu;
}
}
| Fix right click issue (: | Fix right click issue (:
| JavaScript | mit | alexcpendleton/sleep-tight,alexcpendleton/sleep-tight | javascript | ## Code Before:
import { Menu, MenuItem } from "electron";
export class MainMenu {
constructor(app, tray) {
this.app = app;
this.tray = tray;
}
initialize() {
this.setTrayHandler();
}
setTrayHandler(menu) {
menu = menu || this.buildMenu();
this.tray.setContextMenu(menu);
this.tray.on("right-click", () => {
this.tray.popUpContextMenu();
});
}
buildMenu() {
const menu = new Menu();
const quitItem = new MenuItem({
type: "normal",
label: "Quit",
id: "quit",
accelerator: "Q",
click: (menuItem, browserWindow, event) => {
this.app.quit();
}
});
menu.append(quitItem);
return menu;
}
}
## Instruction:
Fix right click issue (:
## Code After:
import { Menu, MenuItem } from "electron";
export class MainMenu {
constructor(app, tray) {
this.app = app;
this.tray = tray;
}
initialize() {
this.setTrayHandler();
}
setTrayHandler() {
this.tray.on("right-click", () => {
this.tray.popUpContextMenu(this.buildMenu());
});
}
buildMenu() {
const menu = new Menu();
const quitItem = new MenuItem({
type: "normal",
label: "Quit",
id: "quit",
accelerator: "Q",
click: (menuItem, browserWindow, event) => {
this.app.quit();
}
});
menu.append(quitItem);
return menu;
}
}
|
929779ef4504ea114c174f2b2d83a0535143d76d | app/src/main/java/net/ericschrag/ud_demo/data/GithubService.java | app/src/main/java/net/ericschrag/ud_demo/data/GithubService.java | package net.ericschrag.ud_demo.data;
import net.ericschrag.ud_demo.data.model.GithubUser;
import java.util.List;
import retrofit.http.GET;
public interface GithubService {
@GET("/users")
public List<GithubUser> getUsers();
}
| package net.ericschrag.ud_demo.data;
import net.ericschrag.ud_demo.data.model.GithubUser;
import java.util.List;
import retrofit.http.GET;
import retrofit.http.Headers;
public interface GithubService {
@Headers("Accept: application/vnd.github.v3+json")
@GET("/users")
public List<GithubUser> getUsers();
}
| Make sure to tell Github API what version we are expecting | Make sure to tell Github API what version we are expecting
| Java | apache-2.0 | Kusand/ud-demo | java | ## Code Before:
package net.ericschrag.ud_demo.data;
import net.ericschrag.ud_demo.data.model.GithubUser;
import java.util.List;
import retrofit.http.GET;
public interface GithubService {
@GET("/users")
public List<GithubUser> getUsers();
}
## Instruction:
Make sure to tell Github API what version we are expecting
## Code After:
package net.ericschrag.ud_demo.data;
import net.ericschrag.ud_demo.data.model.GithubUser;
import java.util.List;
import retrofit.http.GET;
import retrofit.http.Headers;
public interface GithubService {
@Headers("Accept: application/vnd.github.v3+json")
@GET("/users")
public List<GithubUser> getUsers();
}
|
ed6c4889199438a6a163f9d691367a670dcee325 | build/js/links.js | build/js/links.js | function applyLink(event) {
event.preventDefault();
var todayMatches = document.querySelectorAll(".match.today-ish");
var lastToday = todayMatches[todayMatches.length - 1];
if(lastToday) {
lastToday.scrollIntoView();
window.scrollBy(0, -50);
}
else {
window.scrollTo(0, 0);
}
}
document.addEventListener("DOMContentLoaded", function(event) {
var todayLink = document.querySelector("a.today");
todayLink.addEventListener("click", applyLink);
}); | function applyLink(event) {
event.preventDefault();
var todayMatches = document.querySelectorAll(".match.today-ish:not(.filter-no-match)");
var lastToday = todayMatches[todayMatches.length - 1];
if(lastToday) {
lastToday.scrollIntoView();
window.scrollBy(0, -50);
}
else {
window.scrollTo(0, 0);
}
}
document.addEventListener("DOMContentLoaded", function(event) {
var todayLink = document.querySelector("a.today");
todayLink.addEventListener("click", applyLink);
}); | Make 'Jump to Today' jump to the first visible match | Make 'Jump to Today' jump to the first visible match
| JavaScript | mit | bloopletech/lolschedule,bloopletech/lolschedule,bloopletech/lolschedule | javascript | ## Code Before:
function applyLink(event) {
event.preventDefault();
var todayMatches = document.querySelectorAll(".match.today-ish");
var lastToday = todayMatches[todayMatches.length - 1];
if(lastToday) {
lastToday.scrollIntoView();
window.scrollBy(0, -50);
}
else {
window.scrollTo(0, 0);
}
}
document.addEventListener("DOMContentLoaded", function(event) {
var todayLink = document.querySelector("a.today");
todayLink.addEventListener("click", applyLink);
});
## Instruction:
Make 'Jump to Today' jump to the first visible match
## Code After:
function applyLink(event) {
event.preventDefault();
var todayMatches = document.querySelectorAll(".match.today-ish:not(.filter-no-match)");
var lastToday = todayMatches[todayMatches.length - 1];
if(lastToday) {
lastToday.scrollIntoView();
window.scrollBy(0, -50);
}
else {
window.scrollTo(0, 0);
}
}
document.addEventListener("DOMContentLoaded", function(event) {
var todayLink = document.querySelector("a.today");
todayLink.addEventListener("click", applyLink);
}); |
bf8458bbb5b525fa8c0396b78663e1de940b9cdf | NumericalCalculation/BasicWithCSharp/CancellationOfSignificantDigits/result.txt | NumericalCalculation/BasicWithCSharp/CancellationOfSignificantDigits/result.txt | normal: x_1= 999.99900000, x_2= 0.00100708
x_2 -> -0.00707901
cared : x_1= 999.99900000, x_2= 0.00100000
x_2 -> 0.00000006
| f(x) = x^2 - 1000x + c = 0
normal: x_1= 999.99900000, x_2= 0.00100708
f(x_2) -> -0.00707901
cared : x_1= 999.99900000, x_2= 0.00100000
f(x_2) -> 0.00000006
| Update "cancellation of significant digits" example | Update "cancellation of significant digits" example
| Text | mit | MasuqaT-NET/BlogExamples,MasuqaT-NET/BlogExamples,MasuqaT-NET/BlogExamples,MasuqaT-NET/BlogExamples,MasuqaT-NET/BlogExamples,MasuqaT-NET/BlogExamples,MasuqaT-NET/BlogExamples,MasuqaT-NET/BlogSamples,MasuqaT-NET/BlogExamples,MasuqaT-NET/BlogExamples,MasuqaT-NET/BlogExamples | text | ## Code Before:
normal: x_1= 999.99900000, x_2= 0.00100708
x_2 -> -0.00707901
cared : x_1= 999.99900000, x_2= 0.00100000
x_2 -> 0.00000006
## Instruction:
Update "cancellation of significant digits" example
## Code After:
f(x) = x^2 - 1000x + c = 0
normal: x_1= 999.99900000, x_2= 0.00100708
f(x_2) -> -0.00707901
cared : x_1= 999.99900000, x_2= 0.00100000
f(x_2) -> 0.00000006
|
7e1f71f608957c592fd164f0c6cc7e6b6d14cd0e | spec/controllers/events_controller_spec.rb | spec/controllers/events_controller_spec.rb | require 'rails_helper'
require 'pry'
describe EventsController do
def stub_current_user user
ApplicationController.any_instance.stub(:current_user) { user }
end
let(:user) {User.create(name: "pascaline", email: "[email protected]", password: "test", password_confirmation: "test")}
let(:event) {Event.create(name: "Great Tasting", location: "48 Wall St, NY, NY", date: "2014-10-30 20:45:00", time: "2014-10-30 20:45:00", creator: user)}
context 'GET #new' do
it "assigns a new event to @event" do
get :new
expect(assigns(:event)).to be_a_new(Event)
end
it "renders the :new template" do
get :new
expect(response).to render_template :new
end
end
context 'GET #edit' do
it "assigns the requested event to @event" do
stub_current_user(user)
get :edit, id: event
expect(assigns(:event)).to eq event
end
it "renders the :edit template" do
event = :event
get :edit, id: event
expect(response).to render_template :edit
end
end
end
| require 'rails_helper'
require 'pry'
describe EventsController do
def stub_current_user user
ApplicationController.any_instance.stub(:current_user) { user }
end
let(:user) {User.create(name: "pascaline", email: "[email protected]", password: "test", password_confirmation: "test")}
let(:an_event) {Event.create(name: "Great Tasting", location: "48 Wall St, NY, NY", date: "2014-10-30 20:45:00", time: "2014-10-30 20:45:00", creator: user)}
context 'GET #new' do
it "assigns a new event to @event" do
get :new
expect(assigns(:event)).to be_a_new(Event)
end
it "renders the :new template" do
get :new
expect(response).to render_template :new
end
end
context 'GET #edit' do
it "assigns the requested event to @event" do
stub_current_user(user)
get :edit, id: an_event
expect(assigns(:event)).to eq an_event
end
it "redirects with no valid user " do
get :edit, id: an_event
expect(response).to redirect_to root_path
end
it "renders the :edit template" do
stub_current_user(user)
get :edit, id: an_event
expect(response).to render_template :edit
end
end
end
| Add test for redirect to root if not logged in | Add test for redirect to root if not logged in
| Ruby | mit | mud-turtles-2014/palate,mud-turtles-2014/palate | ruby | ## Code Before:
require 'rails_helper'
require 'pry'
describe EventsController do
def stub_current_user user
ApplicationController.any_instance.stub(:current_user) { user }
end
let(:user) {User.create(name: "pascaline", email: "[email protected]", password: "test", password_confirmation: "test")}
let(:event) {Event.create(name: "Great Tasting", location: "48 Wall St, NY, NY", date: "2014-10-30 20:45:00", time: "2014-10-30 20:45:00", creator: user)}
context 'GET #new' do
it "assigns a new event to @event" do
get :new
expect(assigns(:event)).to be_a_new(Event)
end
it "renders the :new template" do
get :new
expect(response).to render_template :new
end
end
context 'GET #edit' do
it "assigns the requested event to @event" do
stub_current_user(user)
get :edit, id: event
expect(assigns(:event)).to eq event
end
it "renders the :edit template" do
event = :event
get :edit, id: event
expect(response).to render_template :edit
end
end
end
## Instruction:
Add test for redirect to root if not logged in
## Code After:
require 'rails_helper'
require 'pry'
describe EventsController do
def stub_current_user user
ApplicationController.any_instance.stub(:current_user) { user }
end
let(:user) {User.create(name: "pascaline", email: "[email protected]", password: "test", password_confirmation: "test")}
let(:an_event) {Event.create(name: "Great Tasting", location: "48 Wall St, NY, NY", date: "2014-10-30 20:45:00", time: "2014-10-30 20:45:00", creator: user)}
context 'GET #new' do
it "assigns a new event to @event" do
get :new
expect(assigns(:event)).to be_a_new(Event)
end
it "renders the :new template" do
get :new
expect(response).to render_template :new
end
end
context 'GET #edit' do
it "assigns the requested event to @event" do
stub_current_user(user)
get :edit, id: an_event
expect(assigns(:event)).to eq an_event
end
it "redirects with no valid user " do
get :edit, id: an_event
expect(response).to redirect_to root_path
end
it "renders the :edit template" do
stub_current_user(user)
get :edit, id: an_event
expect(response).to render_template :edit
end
end
end
|
5fbc369648f450963d48ca4f6344af4c3a465277 | composer.json | composer.json | {
"name": "ark4ne/phalcon-luxury-framework",
"description": "Phalcon extended framework. (Luxury:Kernel)",
"keywords": ["framework", "phalcon", "luxury"],
"minimum-stability": "stable",
"license": "MIT",
"authors": [
{
"name": "Ark4ne (Guillaume Allegret)",
"email": "[email protected]"
}
],
"require": {
"php": ">=5.6",
"ext-phalcon": ">=3.0",
"classpreloader/classpreloader": "~3.0"
},
"require-dev": {
"phalcon/devtools": "dev-master",
"phpunit/phpunit": "~5.6",
"mockery/mockery": "~0.9.4",
"satooshi/php-coveralls": "~1.0"
},
"autoload": {
"psr-4": {
"Luxury\\": "src/Luxury/",
"Phalcon\\": "libraries/Phalcon"
}
}
} | {
"name": "nucleon/framework",
"description": "Phalcon extended framework. (Luxury:Kernel)",
"keywords": ["framework", "phalcon", "luxury"],
"minimum-stability": "stable",
"license": "GNU",
"authors": [
{
"name": "Ark4ne (Guillaume Allegret)",
"email": "[email protected]"
}
],
"require": {
"php": ">=5.6",
"ext-phalcon": ">=3.0",
"classpreloader/classpreloader": "~3.0"
},
"require-dev": {
"phalcon/devtools": "dev-master",
"phpunit/phpunit": "~5.6",
"mockery/mockery": "~0.9.4",
"satooshi/php-coveralls": "~1.0"
},
"autoload": {
"psr-4": {
"Luxury\\": "src/Luxury/"
}
}
} | Change package name. Become "nucleon/framework". | Change package name. Become "nucleon/framework".
| JSON | mit | phalcon-nucleon/framework | json | ## Code Before:
{
"name": "ark4ne/phalcon-luxury-framework",
"description": "Phalcon extended framework. (Luxury:Kernel)",
"keywords": ["framework", "phalcon", "luxury"],
"minimum-stability": "stable",
"license": "MIT",
"authors": [
{
"name": "Ark4ne (Guillaume Allegret)",
"email": "[email protected]"
}
],
"require": {
"php": ">=5.6",
"ext-phalcon": ">=3.0",
"classpreloader/classpreloader": "~3.0"
},
"require-dev": {
"phalcon/devtools": "dev-master",
"phpunit/phpunit": "~5.6",
"mockery/mockery": "~0.9.4",
"satooshi/php-coveralls": "~1.0"
},
"autoload": {
"psr-4": {
"Luxury\\": "src/Luxury/",
"Phalcon\\": "libraries/Phalcon"
}
}
}
## Instruction:
Change package name. Become "nucleon/framework".
## Code After:
{
"name": "nucleon/framework",
"description": "Phalcon extended framework. (Luxury:Kernel)",
"keywords": ["framework", "phalcon", "luxury"],
"minimum-stability": "stable",
"license": "GNU",
"authors": [
{
"name": "Ark4ne (Guillaume Allegret)",
"email": "[email protected]"
}
],
"require": {
"php": ">=5.6",
"ext-phalcon": ">=3.0",
"classpreloader/classpreloader": "~3.0"
},
"require-dev": {
"phalcon/devtools": "dev-master",
"phpunit/phpunit": "~5.6",
"mockery/mockery": "~0.9.4",
"satooshi/php-coveralls": "~1.0"
},
"autoload": {
"psr-4": {
"Luxury\\": "src/Luxury/"
}
}
} |
fa81f49bf7b48bfda86e6054ef1dfe89429da82a | app/presenters/spree/mercado_pago/financial_corporation_presenter.rb | app/presenters/spree/mercado_pago/financial_corporation_presenter.rb | module Spree
module MercadoPago
class FinancialCorporationPresenter
extend Forwardable
attr_accessor :id, :name, :code, :image, :installment_plans, :best_plan
def initialize(financial_corporation)
@id = ''
@code = ''
@image = financial_corporation[:secure_thumbnail]
@name = financial_corporation[:name]
@installment_plans = financial_corporation[:installment_plans].collect do |ip|
cft = parse_label(ip[:labels][0], 'cft', 0)
tea = parse_label(ip[:labels][0], 'tea', 1)
SpreeDecidir::InstallmentPlan.new(discount_percentage: ip[:disccount_rate].to_f,
interest_percentage: ip[:installment_rate].to_f,
installments: ip[:installments], cft: cft, tea: tea)
end
@best_plan = @installment_plans.min_by { |ip| ip.order }
end
def parse_label(label, attr, position)
label_value = 0 # default value
label_with_attr = label.split('|')[position] if label.present?
label_number = label_with_attr[/.*\_(.*?)%/,1] if label_with_attr.present? && label_with_attr.include?(attr.upcase)
label_value = label_number.gsub(',','.').to_f if label_number.present?
label_value
end
end
end
end
| module Spree
module MercadoPago
class FinancialCorporationPresenter
extend Forwardable
attr_accessor :id, :name, :code, :image, :installment_plans, :best_plan
def initialize(financial_corporation)
@id = ''
@code = ''
@image = financial_corporation[:secure_thumbnail]
@name = financial_corporation[:name]
@installment_plans = financial_corporation[:installment_plans].collect do |ip|
costs_label = ip[:labels].map(&:downcase).find{ |label| label.include?('cft') && label.include?('tea') }
cft = parse_label(costs_label, 'cft', 0)
tea = parse_label(costs_label, 'tea', 1)
SpreeDecidir::InstallmentPlan.new(discount_percentage: ip[:disccount_rate].to_f,
interest_percentage: ip[:installment_rate].to_f,
installments: ip[:installments], cft: cft, tea: tea)
end
@best_plan = @installment_plans.min_by { |ip| ip.order }
end
def parse_label(label, attr, position)
label_value = 0 # default value
label_with_attr = label.split('|')[position] if label.present?
label_number = label_with_attr[/.*\_(.*?)%/,1] if label_with_attr.present? && label_with_attr.include?(attr)
label_value = label_number.gsub(',','.').to_f if label_number.present?
label_value
end
end
end
end
| Use only cft and tea label | Use only cft and tea label
| Ruby | bsd-3-clause | devartis/Spree-Mercado-Pago-payment-method,devartis/Spree-Mercado-Pago-payment-method,devartis/Spree-Mercado-Pago-payment-method | ruby | ## Code Before:
module Spree
module MercadoPago
class FinancialCorporationPresenter
extend Forwardable
attr_accessor :id, :name, :code, :image, :installment_plans, :best_plan
def initialize(financial_corporation)
@id = ''
@code = ''
@image = financial_corporation[:secure_thumbnail]
@name = financial_corporation[:name]
@installment_plans = financial_corporation[:installment_plans].collect do |ip|
cft = parse_label(ip[:labels][0], 'cft', 0)
tea = parse_label(ip[:labels][0], 'tea', 1)
SpreeDecidir::InstallmentPlan.new(discount_percentage: ip[:disccount_rate].to_f,
interest_percentage: ip[:installment_rate].to_f,
installments: ip[:installments], cft: cft, tea: tea)
end
@best_plan = @installment_plans.min_by { |ip| ip.order }
end
def parse_label(label, attr, position)
label_value = 0 # default value
label_with_attr = label.split('|')[position] if label.present?
label_number = label_with_attr[/.*\_(.*?)%/,1] if label_with_attr.present? && label_with_attr.include?(attr.upcase)
label_value = label_number.gsub(',','.').to_f if label_number.present?
label_value
end
end
end
end
## Instruction:
Use only cft and tea label
## Code After:
module Spree
module MercadoPago
class FinancialCorporationPresenter
extend Forwardable
attr_accessor :id, :name, :code, :image, :installment_plans, :best_plan
def initialize(financial_corporation)
@id = ''
@code = ''
@image = financial_corporation[:secure_thumbnail]
@name = financial_corporation[:name]
@installment_plans = financial_corporation[:installment_plans].collect do |ip|
costs_label = ip[:labels].map(&:downcase).find{ |label| label.include?('cft') && label.include?('tea') }
cft = parse_label(costs_label, 'cft', 0)
tea = parse_label(costs_label, 'tea', 1)
SpreeDecidir::InstallmentPlan.new(discount_percentage: ip[:disccount_rate].to_f,
interest_percentage: ip[:installment_rate].to_f,
installments: ip[:installments], cft: cft, tea: tea)
end
@best_plan = @installment_plans.min_by { |ip| ip.order }
end
def parse_label(label, attr, position)
label_value = 0 # default value
label_with_attr = label.split('|')[position] if label.present?
label_number = label_with_attr[/.*\_(.*?)%/,1] if label_with_attr.present? && label_with_attr.include?(attr)
label_value = label_number.gsub(',','.').to_f if label_number.present?
label_value
end
end
end
end
|
7b08ca81a72f5cfff862295146dd5ea88bbae31e | README.md | README.md |
Deploy your Gitbook with Github Pages
## installation:
1. First install [gh-pages](https://github.com/GochoMugo/gh-pages)
2. Install this template: `gh-pages template gitbook https://github.com/GochoMugo/gh-pages-gitbook`
3. Prepare your Gitbook: `gh-pages prepare gitbook`
4. Finish on it: `gh-pages finish`
## license:
**The MIT License (MIT)**
Copyright (c) 2015 GochoMugo <[email protected]>
|
> Deploy your Gitbook with Github Pages
>
> COMPATIBLE WITH `gh-pages` [v0](https://github.com/GochoMugo/gh-pages/tree/v0).
## installation:
1. First install [gh-pages](https://github.com/GochoMugo/gh-pages)
2. Install this template: `gh-pages template gitbook https://github.com/GochoMugo/gh-pages-gitbook`
3. Prepare your Gitbook: `gh-pages prepare gitbook`
4. Finish on it: `gh-pages finish`
## license:
**The MIT License (MIT)**
Copyright (c) 2015 GochoMugo <[email protected]>
| Add notice on compatibility with gh-pages v0 | Add notice on compatibility with gh-pages v0
| Markdown | mit | GochoMugo/gh-pages-gitbook | markdown | ## Code Before:
Deploy your Gitbook with Github Pages
## installation:
1. First install [gh-pages](https://github.com/GochoMugo/gh-pages)
2. Install this template: `gh-pages template gitbook https://github.com/GochoMugo/gh-pages-gitbook`
3. Prepare your Gitbook: `gh-pages prepare gitbook`
4. Finish on it: `gh-pages finish`
## license:
**The MIT License (MIT)**
Copyright (c) 2015 GochoMugo <[email protected]>
## Instruction:
Add notice on compatibility with gh-pages v0
## Code After:
> Deploy your Gitbook with Github Pages
>
> COMPATIBLE WITH `gh-pages` [v0](https://github.com/GochoMugo/gh-pages/tree/v0).
## installation:
1. First install [gh-pages](https://github.com/GochoMugo/gh-pages)
2. Install this template: `gh-pages template gitbook https://github.com/GochoMugo/gh-pages-gitbook`
3. Prepare your Gitbook: `gh-pages prepare gitbook`
4. Finish on it: `gh-pages finish`
## license:
**The MIT License (MIT)**
Copyright (c) 2015 GochoMugo <[email protected]>
|
59370b49c1116686d26ed8128c3f54e65da2c937 | router/src/router/core.clj | router/src/router/core.clj | (ns router.core
(:require [compojure.core :refer :all]
[compojure.route :as route]
[ring.middleware.defaults :refer [wrap-defaults api-defaults]]
[ring.middleware.json :refer [wrap-json-body wrap-json-response]]
[ring.util.response :refer [response]]
[router.haproxy :as h])
(:gen-class))
(def services
(atom {}))
(defn merge-services [one two]
(let [in (one :in-port)
outs (into (one :out-port) (two :out-port))]
{:in-port in
:out-port outs}))
(defn register-service [{name :name in :in-port out :out-port}]
(let [new-service {name {:in-port in
:out-port [out]}}]
(swap! services
#(merge-with merge-services % new-service))))
(defroutes app-routes
(GET "/status" [] (response (str @services)))
(POST "/register" {body :body} (register-service body))
(route/not-found "Not Found"))
(def app
(-> app-routes
(wrap-defaults api-defaults)
(wrap-json-body {:keywords? true})
(wrap-json-response)))
| (ns router.core
(:require [compojure.core :refer :all]
[compojure.route :as route]
[ring.middleware.defaults :refer [wrap-defaults api-defaults]]
[ring.middleware.json :refer [wrap-json-body wrap-json-response]]
[ring.util.response :refer [response]]
[router.haproxy :as h])
(:gen-class))
(def services
(atom {}))
(defn merge-services [one two]
(let [in (one :in-port)
outs (into (one :out-port) (two :out-port))]
{:in-port in
:out-port outs}))
(defn register-service [{name :name in :in-port out :out-port}]
(let [new-service {name {:in-port in
:out-port [out]}}]
(swap! services
#(merge-with merge-services % new-service))))
(defroutes app-routes
(GET "/status" [] (response (str @services)))
(POST "/register" {body :body} (register-service body))
(POST "/start" [] (h/start-haproxy! @services))
(route/not-found "Not Found"))
(def app
(-> app-routes
(wrap-defaults api-defaults)
(wrap-json-body {:keywords? true})
(wrap-json-response)))
| Add handler to start load balancer | Add handler to start load balancer
| Clojure | mit | jhn/generic,jhn/generic,jhn/generic,jhn/generic | clojure | ## Code Before:
(ns router.core
(:require [compojure.core :refer :all]
[compojure.route :as route]
[ring.middleware.defaults :refer [wrap-defaults api-defaults]]
[ring.middleware.json :refer [wrap-json-body wrap-json-response]]
[ring.util.response :refer [response]]
[router.haproxy :as h])
(:gen-class))
(def services
(atom {}))
(defn merge-services [one two]
(let [in (one :in-port)
outs (into (one :out-port) (two :out-port))]
{:in-port in
:out-port outs}))
(defn register-service [{name :name in :in-port out :out-port}]
(let [new-service {name {:in-port in
:out-port [out]}}]
(swap! services
#(merge-with merge-services % new-service))))
(defroutes app-routes
(GET "/status" [] (response (str @services)))
(POST "/register" {body :body} (register-service body))
(route/not-found "Not Found"))
(def app
(-> app-routes
(wrap-defaults api-defaults)
(wrap-json-body {:keywords? true})
(wrap-json-response)))
## Instruction:
Add handler to start load balancer
## Code After:
(ns router.core
(:require [compojure.core :refer :all]
[compojure.route :as route]
[ring.middleware.defaults :refer [wrap-defaults api-defaults]]
[ring.middleware.json :refer [wrap-json-body wrap-json-response]]
[ring.util.response :refer [response]]
[router.haproxy :as h])
(:gen-class))
(def services
(atom {}))
(defn merge-services [one two]
(let [in (one :in-port)
outs (into (one :out-port) (two :out-port))]
{:in-port in
:out-port outs}))
(defn register-service [{name :name in :in-port out :out-port}]
(let [new-service {name {:in-port in
:out-port [out]}}]
(swap! services
#(merge-with merge-services % new-service))))
(defroutes app-routes
(GET "/status" [] (response (str @services)))
(POST "/register" {body :body} (register-service body))
(POST "/start" [] (h/start-haproxy! @services))
(route/not-found "Not Found"))
(def app
(-> app-routes
(wrap-defaults api-defaults)
(wrap-json-body {:keywords? true})
(wrap-json-response)))
|
f88e30ecbf212928628942802309faf6f11fd7ff | app/views/answers/_answer.html.erb | app/views/answers/_answer.html.erb | <div class="answer">
<div class="row">
<div class="content span12">
<%= answer.content.html_safe %>
</div>
<div class="author span12">
<div class="row">
<div class="span9">
Submitted by <%= link_to answer.user.username, 'question.user' %>.
</div>
<div class="span3">
Submitted at <%= answer.created_at.to_s(:short) %>
</div>
</div>
</div>
</div>
</div>
| <div class="answer">
<div class="row">
<div class="controls span1">
<i class="icon-ok-sign icon-2x"></i>
</div>
<div class="content span11">
<%= answer.content.html_safe %>
</div>
<div class="author span11 pull-right">
<div class="row">
<div class="span9">
Submitted by <%= link_to answer.user.username, 'question.user' %>.
</div>
<div class="span3">
Submitted at <%= answer.created_at.to_s(:short) %>
</div>
</div>
</div>
</div>
</div>
| Add 'accepted' icon to answer - not yet doing anything though | Add 'accepted' icon to answer - not yet doing anything though
| HTML+ERB | mit | joshmcarthur/inquest,joshmcarthur/inquest | html+erb | ## Code Before:
<div class="answer">
<div class="row">
<div class="content span12">
<%= answer.content.html_safe %>
</div>
<div class="author span12">
<div class="row">
<div class="span9">
Submitted by <%= link_to answer.user.username, 'question.user' %>.
</div>
<div class="span3">
Submitted at <%= answer.created_at.to_s(:short) %>
</div>
</div>
</div>
</div>
</div>
## Instruction:
Add 'accepted' icon to answer - not yet doing anything though
## Code After:
<div class="answer">
<div class="row">
<div class="controls span1">
<i class="icon-ok-sign icon-2x"></i>
</div>
<div class="content span11">
<%= answer.content.html_safe %>
</div>
<div class="author span11 pull-right">
<div class="row">
<div class="span9">
Submitted by <%= link_to answer.user.username, 'question.user' %>.
</div>
<div class="span3">
Submitted at <%= answer.created_at.to_s(:short) %>
</div>
</div>
</div>
</div>
</div>
|
0a8a0550e69fd14346752a3ba5b312315630a596 | frontend/View/Index/variants.nouser.inc.tpl | frontend/View/Index/variants.nouser.inc.tpl | <!--
/**
*
*
* @author Knut Kohl <[email protected]>
* @copyright 2012-2013 Knut Kohl
* @license GNU General Public License http://www.gnu.org/licenses/gpl.txt
* @version 1.0.0
*/
-->
<div class="grid_10" style="margin-top:.5em;margin-bottom:.5em">
<span style="margin-right:1em">{{VariantsPublic}}:</span>
<select id="loaddeleteview" name="loaddeleteview" onChange="this.form.submit()">
<option value="">--- {{Select}} ---</option>
<!-- BEGIN VIEWS --><!-- IF {PUBLIC} -->
<!-- show only public charts -->
<option value="{NAME}" <!-- IF {SELECTED} -->selected="selected"<!-- ENDIF -->>
{NAME}
</option>
<!-- ENDIF --><!-- END -->
</select>
<input type="hidden" name="load" value="{{Load}}" />
<noscript>
<input type="submit" name="load" value="{{Load}}" style="margin-left:.5em" />
</noscript>
</div>
<div class="clear"></div>
| <!--
/**
*
*
* @author Knut Kohl <[email protected]>
* @copyright 2012-2013 Knut Kohl
* @license GNU General Public License http://www.gnu.org/licenses/gpl.txt
* @version 1.0.0
*/
-->
<!-- IF {VIEW} -->
<div class="grid_10" style="margin-top:.5em;margin-bottom:.5em">
<span style="margin-right:1em">{{VariantsPublic}}:</span>
<select id="loaddeleteview" name="loaddeleteview" onChange="if (this.value) this.form.submit()">
<option value="">--- {{Select}} ---</option>
<!-- BEGIN VIEWS --><!-- IF {PUBLIC} -->
<!-- show only public charts -->
<option value="{NAME}" <!-- IF {SELECTED} -->selected="selected"<!-- ENDIF -->>
{NAME}
</option>
<!-- ENDIF --><!-- END -->
</select>
<input type="hidden" name="load" value="{{Load}}" />
<noscript>
<input type="submit" name="load" value="{{Load}}" style="margin-left:.5em" />
</noscript>
</div>
<div class="clear"></div>
<!-- ENDIF -->
| Adjust layout for not logged in user | Adjust layout for not logged in user
| Smarty | mit | pafei/PVLng,KKoPV/PVLng,pafei/PVLng,pafei/PVLng,KKoPV/PVLng,KKoPV/PVLng,KKoPV/PVLng | smarty | ## Code Before:
<!--
/**
*
*
* @author Knut Kohl <[email protected]>
* @copyright 2012-2013 Knut Kohl
* @license GNU General Public License http://www.gnu.org/licenses/gpl.txt
* @version 1.0.0
*/
-->
<div class="grid_10" style="margin-top:.5em;margin-bottom:.5em">
<span style="margin-right:1em">{{VariantsPublic}}:</span>
<select id="loaddeleteview" name="loaddeleteview" onChange="this.form.submit()">
<option value="">--- {{Select}} ---</option>
<!-- BEGIN VIEWS --><!-- IF {PUBLIC} -->
<!-- show only public charts -->
<option value="{NAME}" <!-- IF {SELECTED} -->selected="selected"<!-- ENDIF -->>
{NAME}
</option>
<!-- ENDIF --><!-- END -->
</select>
<input type="hidden" name="load" value="{{Load}}" />
<noscript>
<input type="submit" name="load" value="{{Load}}" style="margin-left:.5em" />
</noscript>
</div>
<div class="clear"></div>
## Instruction:
Adjust layout for not logged in user
## Code After:
<!--
/**
*
*
* @author Knut Kohl <[email protected]>
* @copyright 2012-2013 Knut Kohl
* @license GNU General Public License http://www.gnu.org/licenses/gpl.txt
* @version 1.0.0
*/
-->
<!-- IF {VIEW} -->
<div class="grid_10" style="margin-top:.5em;margin-bottom:.5em">
<span style="margin-right:1em">{{VariantsPublic}}:</span>
<select id="loaddeleteview" name="loaddeleteview" onChange="if (this.value) this.form.submit()">
<option value="">--- {{Select}} ---</option>
<!-- BEGIN VIEWS --><!-- IF {PUBLIC} -->
<!-- show only public charts -->
<option value="{NAME}" <!-- IF {SELECTED} -->selected="selected"<!-- ENDIF -->>
{NAME}
</option>
<!-- ENDIF --><!-- END -->
</select>
<input type="hidden" name="load" value="{{Load}}" />
<noscript>
<input type="submit" name="load" value="{{Load}}" style="margin-left:.5em" />
</noscript>
</div>
<div class="clear"></div>
<!-- ENDIF -->
|
50448af304d8da8124833c3ef49e79c582aa76d8 | README.md | README.md |
[https://ksi.fi.muni.cz](https://ksi.fi.muni.cz/)
## Software needed
* Python 3.5
* virtualenv
* packages from `requirements.txt`
* [isolate](https://github.com/cms-dev/isolate)
## Installation
1. Clone this repository.
2. Run `init-makedirs.sh`.
3. Install virtualenv & packages into `ksi-py3-venv` directory.
```
virtualenv -p python3 ksi-py3-venv
source ksi-py3-venv/bin/activate
pip3 install -r requirements.txt
```
4. Enter db url into `config.py` file. Format:
```
SQL_ALCHEMY_URI = 'mysql://username:password@server/db_name?charset=utf8'
```
5. Uncomment part of the `app.py`, which creates database structure.
6. Run the server, comment the database-create-section in `run.py`
7. Install `isolate`.
8. Optional: make `/tmp` tmpfs.
9. Optional: ensure the server will be started after system boots up
(run ./runner start).
|
[https://ksi.fi.muni.cz](https://ksi.fi.muni.cz/)
## Software needed
* Python 3.5
* virtualenv
* packages from `requirements.txt`
* [isolate](https://github.com/cms-dev/isolate)
## Installation
1. Clone this repository.
2. Run `init-makedirs.sh`.
3. Install virtualenv & packages into `ksi-py3-venv` directory.
```
virtualenv -p python3 ksi-py3-venv
source ksi-py3-venv/bin/activate
pip3 install -r requirements.txt
```
4. Enter db url into `config.py` file. Format:
```
SQL_ALCHEMY_URI = 'mysql://username:password@server/db_name?charset=utf8'
```
5. Uncomment part of the `app.py`, which creates database structure.
6. Run the server, comment the database-create-section in `run.py`
7. Install `isolate`.
8. Optional: make `/tmp` tmpfs.
9. Optional: ensure the server will be started after system boots up
(run ./runner start).
## Server control
* To start server run: `./runner start`.
* To stop server run: `./runner stop`.
* The `runner` script must be executed in server`s root directory.
* Logs are stored in `/var/log/gunicorn/*`.
| Add note about service control. | Add note about service control.
| Markdown | mit | fi-ksi/web-backend,fi-ksi/web-backend | markdown | ## Code Before:
[https://ksi.fi.muni.cz](https://ksi.fi.muni.cz/)
## Software needed
* Python 3.5
* virtualenv
* packages from `requirements.txt`
* [isolate](https://github.com/cms-dev/isolate)
## Installation
1. Clone this repository.
2. Run `init-makedirs.sh`.
3. Install virtualenv & packages into `ksi-py3-venv` directory.
```
virtualenv -p python3 ksi-py3-venv
source ksi-py3-venv/bin/activate
pip3 install -r requirements.txt
```
4. Enter db url into `config.py` file. Format:
```
SQL_ALCHEMY_URI = 'mysql://username:password@server/db_name?charset=utf8'
```
5. Uncomment part of the `app.py`, which creates database structure.
6. Run the server, comment the database-create-section in `run.py`
7. Install `isolate`.
8. Optional: make `/tmp` tmpfs.
9. Optional: ensure the server will be started after system boots up
(run ./runner start).
## Instruction:
Add note about service control.
## Code After:
[https://ksi.fi.muni.cz](https://ksi.fi.muni.cz/)
## Software needed
* Python 3.5
* virtualenv
* packages from `requirements.txt`
* [isolate](https://github.com/cms-dev/isolate)
## Installation
1. Clone this repository.
2. Run `init-makedirs.sh`.
3. Install virtualenv & packages into `ksi-py3-venv` directory.
```
virtualenv -p python3 ksi-py3-venv
source ksi-py3-venv/bin/activate
pip3 install -r requirements.txt
```
4. Enter db url into `config.py` file. Format:
```
SQL_ALCHEMY_URI = 'mysql://username:password@server/db_name?charset=utf8'
```
5. Uncomment part of the `app.py`, which creates database structure.
6. Run the server, comment the database-create-section in `run.py`
7. Install `isolate`.
8. Optional: make `/tmp` tmpfs.
9. Optional: ensure the server will be started after system boots up
(run ./runner start).
## Server control
* To start server run: `./runner start`.
* To stop server run: `./runner stop`.
* The `runner` script must be executed in server`s root directory.
* Logs are stored in `/var/log/gunicorn/*`.
|
3889c1c5e20353212c89e85cd45b2d3fadfe4308 | copy_diff.sh | copy_diff.sh | _file="$1"
_target="$2"
[ $# -eq 0 ] && { echo "Usage: $0 filename"; exit 1; }
[ ! -f "$_file" ] && { echo "Error: $0 file not found."; exit 2; }
if [ -s "$_file" ]
then
echo "$_file : Non empty diff"
tail -vn +1 build/diff* >> diff_results/$_target
rm build/diff*
# do something as file has data
else
echo "$_file : Passed test"
# do something as file is empty
fi
| _file="$1"
_target="$2"
[ $# -eq 0 ] && { echo "Usage: $0 filename"; exit 1; }
[ ! -f "$_file" ] && { echo "Error: $0 file not found."; exit 2; }
if [ -s "$_file" ]
then
echo "$_file : Failed test : Non empty diff"
tail -vn +1 build/diff* >> diff_results/$_target
rm build/diff*
else
rm build/diff*
echo "$_file : Passed test"
fi
| Remove diff files after every run | Remove diff files after every run
| Shell | apache-2.0 | utsaslab/crashmonkey,utsaslab/crashmonkey,utsaslab/crashmonkey,utsaslab/crashmonkey | shell | ## Code Before:
_file="$1"
_target="$2"
[ $# -eq 0 ] && { echo "Usage: $0 filename"; exit 1; }
[ ! -f "$_file" ] && { echo "Error: $0 file not found."; exit 2; }
if [ -s "$_file" ]
then
echo "$_file : Non empty diff"
tail -vn +1 build/diff* >> diff_results/$_target
rm build/diff*
# do something as file has data
else
echo "$_file : Passed test"
# do something as file is empty
fi
## Instruction:
Remove diff files after every run
## Code After:
_file="$1"
_target="$2"
[ $# -eq 0 ] && { echo "Usage: $0 filename"; exit 1; }
[ ! -f "$_file" ] && { echo "Error: $0 file not found."; exit 2; }
if [ -s "$_file" ]
then
echo "$_file : Failed test : Non empty diff"
tail -vn +1 build/diff* >> diff_results/$_target
rm build/diff*
else
rm build/diff*
echo "$_file : Passed test"
fi
|
76b47fec3b24410f875db96b3404c47d4c3634cb | sheepdog_tables/__init__.py | sheepdog_tables/__init__.py | __version__ = '1.2.0'
try:
from django.conf import settings
getattr(settings, 'dummy_attr', 'dummy_value')
_LOAD_PACKAGES = True
except:
# Just running sdist, we think
_LOAD_PACKAGES = False
if _LOAD_PACKAGES:
from mixins import (TablesMixin, EditTablesMixin, FilteredListView,
CSVTableMixin)
from column import ColumnURL, Column, DictColumn, FieldColumn
from table import Table, EditTable
| __version__ = '1.2.0'
try:
from django.conf import settings
getattr(settings, 'dummy_attr', 'dummy_value')
_LOAD_PACKAGES = True
except:
# Just running sdist, we think
_LOAD_PACKAGES = False
if _LOAD_PACKAGES:
from mixins import TablesMixin, EditTablesMixin, FilteredListView
from column import ColumnURL, Column, DictColumn, FieldColumn
from table import Table, EditTable
| Fix import error after removal of old csv table mixin | Fix import error after removal of old csv table mixin
| Python | bsd-3-clause | SheepDogInc/sheepdog_tables,SheepDogInc/sheepdog_tables | python | ## Code Before:
__version__ = '1.2.0'
try:
from django.conf import settings
getattr(settings, 'dummy_attr', 'dummy_value')
_LOAD_PACKAGES = True
except:
# Just running sdist, we think
_LOAD_PACKAGES = False
if _LOAD_PACKAGES:
from mixins import (TablesMixin, EditTablesMixin, FilteredListView,
CSVTableMixin)
from column import ColumnURL, Column, DictColumn, FieldColumn
from table import Table, EditTable
## Instruction:
Fix import error after removal of old csv table mixin
## Code After:
__version__ = '1.2.0'
try:
from django.conf import settings
getattr(settings, 'dummy_attr', 'dummy_value')
_LOAD_PACKAGES = True
except:
# Just running sdist, we think
_LOAD_PACKAGES = False
if _LOAD_PACKAGES:
from mixins import TablesMixin, EditTablesMixin, FilteredListView
from column import ColumnURL, Column, DictColumn, FieldColumn
from table import Table, EditTable
|
b3b02c8d6dfe42e0ce0a61248c792fc67295abc1 | README.md | README.md | [](https://travis-ci.org/Geraet/Kompositum)
# Kompositum
## Requirements
* CMake 3.0+
* C++11 compatible compiler
* Ninja (for generating ninja files)
## How to build and run the tests
* Clone the repository: `git clone https://github.com/Geraet/Kompositum.git`
* Generate ninja files:
```
cd Kompositum
mkdir build
cd build
cmake -G "Ninja" ..
```
* Run the tests:
```
ninja
ninja test
```
## License
Released under the [MIT License](LICENSE)
| [](https://travis-ci.org/Geraet/Kompositum)
[](https://ci.appveyor.com/project/rehans/kompositum/branch/master)
# Kompositum
## Requirements
* CMake 3.0+
* C++11 compatible compiler
* Ninja (for generating ninja files)
## How to build and run the tests
* Clone the repository: `git clone https://github.com/Geraet/Kompositum.git`
* Generate ninja files:
```
cd Kompositum
mkdir build
cd build
cmake -G "Ninja" ..
```
* Run the tests:
```
ninja
ninja test
```
## License
Released under the [MIT License](LICENSE)
| Add appveyor build stauts to readme | Add appveyor build stauts to readme
| Markdown | mit | Geraet/Kompositum,rehans/Kompositum | markdown | ## Code Before:
[](https://travis-ci.org/Geraet/Kompositum)
# Kompositum
## Requirements
* CMake 3.0+
* C++11 compatible compiler
* Ninja (for generating ninja files)
## How to build and run the tests
* Clone the repository: `git clone https://github.com/Geraet/Kompositum.git`
* Generate ninja files:
```
cd Kompositum
mkdir build
cd build
cmake -G "Ninja" ..
```
* Run the tests:
```
ninja
ninja test
```
## License
Released under the [MIT License](LICENSE)
## Instruction:
Add appveyor build stauts to readme
## Code After:
[](https://travis-ci.org/Geraet/Kompositum)
[](https://ci.appveyor.com/project/rehans/kompositum/branch/master)
# Kompositum
## Requirements
* CMake 3.0+
* C++11 compatible compiler
* Ninja (for generating ninja files)
## How to build and run the tests
* Clone the repository: `git clone https://github.com/Geraet/Kompositum.git`
* Generate ninja files:
```
cd Kompositum
mkdir build
cd build
cmake -G "Ninja" ..
```
* Run the tests:
```
ninja
ninja test
```
## License
Released under the [MIT License](LICENSE)
|
d257872bf4524c1112ab5dcb1162255731f57bf7 | app/Presenters/ProfileFieldGroup.php | app/Presenters/ProfileFieldGroup.php | <?php
namespace MyBB\Core\Presenters;
use Illuminate\Support\Facades\App;
use McCool\LaravelAutoPresenter\BasePresenter;
class ProfileFieldGroup extends BasePresenter
{
public function fields()
{
$profileFields = $this->getWrappedObject()->getProfileFields()->get();
$decorated = [];
$decorator = App::make('autopresenter');
foreach ($profileFields as $profileField) {
$decorated[] = $decorator->decorate($profileField);
}
return $decorated;
}
}
| <?php
namespace MyBB\Core\Presenters;
use Illuminate\Support\Facades\App;
use McCool\LaravelAutoPresenter\BasePresenter;
class ProfileFieldGroup extends BasePresenter
{
public function fields()
{
$profileFields = $this->getWrappedObject()->getProfileFields()->get();
$profileFields = $profileFields->sortBy('display_order');
$decorated = [];
$decorator = App::make('autopresenter');
foreach ($profileFields as $profileField) {
$decorated[] = $decorator->decorate($profileField);
}
return $decorated;
}
}
| Sort using the display_order when we present the fields from a group | Sort using the display_order when we present the fields from a group
| PHP | bsd-3-clause | mybb/mybb2,xaoseric/mybb2,mybb/mybb2,mybb/mybb2,mybb/mybb2,Matslom/mybb2,ATofighi/mybb2,xaoseric/mybb2,Matslom/mybb2,ATofighi/mybb2,Matslom/mybb2,Matslom/mybb2,xaoseric/mybb2,xaoseric/mybb2,ATofighi/mybb2 | php | ## Code Before:
<?php
namespace MyBB\Core\Presenters;
use Illuminate\Support\Facades\App;
use McCool\LaravelAutoPresenter\BasePresenter;
class ProfileFieldGroup extends BasePresenter
{
public function fields()
{
$profileFields = $this->getWrappedObject()->getProfileFields()->get();
$decorated = [];
$decorator = App::make('autopresenter');
foreach ($profileFields as $profileField) {
$decorated[] = $decorator->decorate($profileField);
}
return $decorated;
}
}
## Instruction:
Sort using the display_order when we present the fields from a group
## Code After:
<?php
namespace MyBB\Core\Presenters;
use Illuminate\Support\Facades\App;
use McCool\LaravelAutoPresenter\BasePresenter;
class ProfileFieldGroup extends BasePresenter
{
public function fields()
{
$profileFields = $this->getWrappedObject()->getProfileFields()->get();
$profileFields = $profileFields->sortBy('display_order');
$decorated = [];
$decorator = App::make('autopresenter');
foreach ($profileFields as $profileField) {
$decorated[] = $decorator->decorate($profileField);
}
return $decorated;
}
}
|
6d42453b1773311a61cacf931562f04d156e888d | rest/message/list-get-example-2/list-get-example-2.js | rest/message/list-get-example-2/list-get-example-2.js | // Download the Node helper library from twilio.com/docs/node/install
// These vars are your accountSid and authToken from twilio.com/user/account
var accountSid = 'ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX';
var authToken = "your_auth_token";
var client = require('twilio')(accountSid, authToken);
// TODO: Add To, From, DateSent> parameters
client.messages.list(function(err, data) {
data.messages.forEach(function(message) {
console.log(message.body);
});
}); | // Download the Node helper library from twilio.com/docs/node/install
// These vars are your accountSid and authToken from twilio.com/user/account
var accountSid = 'ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX';
var authToken = "your_auth_token";
var client = require('twilio')(accountSid, authToken);
var filterOpts = {
to: 'to_number',
from: 'from_number',
dateSent: new Date(2016, 0, 1)
};
client.messages.list(filterOpts, function(err, data) {
data.forEach(function(message) {
console.log(message.body);
});
});
| Add filterOpts for listing messages | Add filterOpts for listing messages
| JavaScript | mit | teoreteetik/api-snippets,teoreteetik/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,teoreteetik/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,teoreteetik/api-snippets,teoreteetik/api-snippets,teoreteetik/api-snippets,TwilioDevEd/api-snippets,teoreteetik/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,teoreteetik/api-snippets,TwilioDevEd/api-snippets,teoreteetik/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets | javascript | ## Code Before:
// Download the Node helper library from twilio.com/docs/node/install
// These vars are your accountSid and authToken from twilio.com/user/account
var accountSid = 'ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX';
var authToken = "your_auth_token";
var client = require('twilio')(accountSid, authToken);
// TODO: Add To, From, DateSent> parameters
client.messages.list(function(err, data) {
data.messages.forEach(function(message) {
console.log(message.body);
});
});
## Instruction:
Add filterOpts for listing messages
## Code After:
// Download the Node helper library from twilio.com/docs/node/install
// These vars are your accountSid and authToken from twilio.com/user/account
var accountSid = 'ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX';
var authToken = "your_auth_token";
var client = require('twilio')(accountSid, authToken);
var filterOpts = {
to: 'to_number',
from: 'from_number',
dateSent: new Date(2016, 0, 1)
};
client.messages.list(filterOpts, function(err, data) {
data.forEach(function(message) {
console.log(message.body);
});
});
|
d76659cde113f54aa724bf2554f9dc84bb2d8e8e | lib/cb.events.socketio/main.js | lib/cb.events.socketio/main.js | // Requires
var wireFriendly = require('../utils').wireFriendly;
function setup(options, imports, register) {
// Import
var events = imports.events;
var io = imports.socket_io.io;
// Send events to user
io.of('/events').on('connection', function(socket) {
// Send to client
var handler = function(data) {
socket.emit(this.event, wireFriendly(data));
};
// Clean up on disconnect
var cleanup = function() {
events.offAny(handler);
};
// Construct
events.onAny(handler);
// Disconnect cleanly
socket.on('disconnect', cleanup);
});
// Register
register(null, {});
}
// Exports
module.exports = setup;
| // Requires
var wireFriendly = require('../utils').wireFriendly;
function setup(options, imports, register) {
// Import
var events = imports.events;
var io = imports.socket_io.io;
// Send events to user
io.of('/events').on('connection', function(socket) {
// Send to client
var handler = function(data) {
socket.emit("event", {
"event": this.event,
"data": wireFriendly(data)
});
};
// Clean up on disconnect
var cleanup = function() {
events.offAny(handler);
};
// Construct
events.onAny(handler);
// Disconnect cleanly
socket.on('disconnect', cleanup);
});
// Register
register(null, {});
}
// Exports
module.exports = setup;
| Change event propagation in socket.io | Change event propagation in socket.io
| JavaScript | apache-2.0 | kustomzone/codebox,code-box/codebox,ronoaldo/codebox,smallbal/codebox,ahmadassaf/Codebox,rajthilakmca/codebox,nobutakaoshiro/codebox,fly19890211/codebox,nobutakaoshiro/codebox,Ckai1991/codebox,listepo/codebox,rajthilakmca/codebox,LogeshEswar/codebox,listepo/codebox,rodrigues-daniel/codebox,smallbal/codebox,indykish/codebox,code-box/codebox,quietdog/codebox,CodeboxIDE/codebox,indykish/codebox,blubrackets/codebox,etopian/codebox,fly19890211/codebox,kustomzone/codebox,etopian/codebox,blubrackets/codebox,rodrigues-daniel/codebox,LogeshEswar/codebox,lcamilo15/codebox,CodeboxIDE/codebox,Ckai1991/codebox,ahmadassaf/Codebox,quietdog/codebox,lcamilo15/codebox,ronoaldo/codebox | javascript | ## Code Before:
// Requires
var wireFriendly = require('../utils').wireFriendly;
function setup(options, imports, register) {
// Import
var events = imports.events;
var io = imports.socket_io.io;
// Send events to user
io.of('/events').on('connection', function(socket) {
// Send to client
var handler = function(data) {
socket.emit(this.event, wireFriendly(data));
};
// Clean up on disconnect
var cleanup = function() {
events.offAny(handler);
};
// Construct
events.onAny(handler);
// Disconnect cleanly
socket.on('disconnect', cleanup);
});
// Register
register(null, {});
}
// Exports
module.exports = setup;
## Instruction:
Change event propagation in socket.io
## Code After:
// Requires
var wireFriendly = require('../utils').wireFriendly;
function setup(options, imports, register) {
// Import
var events = imports.events;
var io = imports.socket_io.io;
// Send events to user
io.of('/events').on('connection', function(socket) {
// Send to client
var handler = function(data) {
socket.emit("event", {
"event": this.event,
"data": wireFriendly(data)
});
};
// Clean up on disconnect
var cleanup = function() {
events.offAny(handler);
};
// Construct
events.onAny(handler);
// Disconnect cleanly
socket.on('disconnect', cleanup);
});
// Register
register(null, {});
}
// Exports
module.exports = setup;
|
d1647ca2cb34f382de5e5289d1ee71f6befb9c88 | scripts/make-package.bat | scripts/make-package.bat | @echo off
set zipcmd=%~dp0\tools\7z\7z.exe
pushd %~dp0\..\..
%zipcmd% a -r -x!scripts -xr!.* -x!__pycache__ blenderseed-x.x.x-yyyy.zip blenderseed
popd
move ..\..\blenderseed-x.x.x-yyyy.zip .
pause
| @echo off
set zipcmd=%~dp0\tools\7z\7z.exe
pushd %~dp0\..\..
%zipcmd% a -r -x!scripts -xr!.* -x!__pycache__ -x!README.md blenderseed-x.x.x-yyyy.zip blenderseed
popd
move ..\..\blenderseed-x.x.x-yyyy.zip .
pause
| Exclude README.md file from package | Exclude README.md file from package
| Batchfile | mit | appleseedhq/blenderseed,appleseedhq/blenderseed,dictoon/blenderseed,dictoon/blenderseed | batchfile | ## Code Before:
@echo off
set zipcmd=%~dp0\tools\7z\7z.exe
pushd %~dp0\..\..
%zipcmd% a -r -x!scripts -xr!.* -x!__pycache__ blenderseed-x.x.x-yyyy.zip blenderseed
popd
move ..\..\blenderseed-x.x.x-yyyy.zip .
pause
## Instruction:
Exclude README.md file from package
## Code After:
@echo off
set zipcmd=%~dp0\tools\7z\7z.exe
pushd %~dp0\..\..
%zipcmd% a -r -x!scripts -xr!.* -x!__pycache__ -x!README.md blenderseed-x.x.x-yyyy.zip blenderseed
popd
move ..\..\blenderseed-x.x.x-yyyy.zip .
pause
|
1311963cd3ea733f5aa942e97f8759ae48c62f16 | lib/URI/Grammar.pm | lib/URI/Grammar.pm | use v6;
grammar URI::Grammar {
token TOP { ^ [<scheme> ':']? [ '//' <authority>]? <path> ['?' <query>]? ['#' <fragment>]? $ };
token scheme { <-[:/&?#]>+ };
token authority { <host> [':' <port>]? };
token host { <-[/&?#:]>* };
token port { <pt6553X>|<pt655XX>|<pt65XXX>|<pt6XXXX>|<pt10K>|<ptLow> };
token ptLow { \d**1..4 };
token pt10K { <[1..5]>\d**4 };
token pt6XXXX { 6<[0..4]>\d**3 };
token pt65XXX { 65<[0..4]>\d**2 };
token pt655XX { 655<[0..2]>\d };
token pt6553X { 6553<[0..5]> };
token path { <slash>? [ <chunk> '/'?]* }; # * mb wrong, because that allow '' URI
token slash { '/' };
token chunk { <-[/?#]>+ };
token query { <-[#]>* };
token fragment { .* };
}
# Official regexp (p5):
# my($scheme, $authority, $path, $query, $fragment) =
# $uri =~ m/
# (?:([^:/?#]+):)?
# (?://([^/?#]*))?
# ([^?#]*)
# (?:\?([^#]*))?
# (?:#(.*))?
# /x;
# vim:ft=perl6
| use v6;
grammar URI::Grammar {
token TOP { ^ [<scheme> ':']? [ '//' <authority>]? <path> ['?' <query>]? ['#' <fragment>]? $ };
token scheme { <-[:/&?#]>+ };
token authority { <host> [':' <port>]? };
token host { <-[/&?#:]>* };
token port { (\d**1..5)
<?{{$I0 = match[0]$I1 = 0if $I0 > 65535 goto fail$I1 = 1fail:.return ($I1) }}>
<!before \d> };
token path { <slash>? [ <chunk> '/'?]* }; # * mb wrong, because that allow '' URI
token slash { '/' };
token chunk { <-[/?#]>+ };
token query { <-[#]>* };
token fragment { .* };
}
# Official regexp (p5):
# my($scheme, $authority, $path, $query, $fragment) =
# $uri =~ m/
# (?:([^:/?#]+):)?
# (?://([^/?#]*))?
# ([^?#]*)
# (?:\?([^#]*))?
# (?:#(.*))?
# /x;
# vim:ft=perl6
| Use PIR for simpler code. Thanks masak++! | Use PIR for simpler code. Thanks masak++!
| Perl | artistic-2.0 | viklund/november,viklund/november,viklund/november | perl | ## Code Before:
use v6;
grammar URI::Grammar {
token TOP { ^ [<scheme> ':']? [ '//' <authority>]? <path> ['?' <query>]? ['#' <fragment>]? $ };
token scheme { <-[:/&?#]>+ };
token authority { <host> [':' <port>]? };
token host { <-[/&?#:]>* };
token port { <pt6553X>|<pt655XX>|<pt65XXX>|<pt6XXXX>|<pt10K>|<ptLow> };
token ptLow { \d**1..4 };
token pt10K { <[1..5]>\d**4 };
token pt6XXXX { 6<[0..4]>\d**3 };
token pt65XXX { 65<[0..4]>\d**2 };
token pt655XX { 655<[0..2]>\d };
token pt6553X { 6553<[0..5]> };
token path { <slash>? [ <chunk> '/'?]* }; # * mb wrong, because that allow '' URI
token slash { '/' };
token chunk { <-[/?#]>+ };
token query { <-[#]>* };
token fragment { .* };
}
# Official regexp (p5):
# my($scheme, $authority, $path, $query, $fragment) =
# $uri =~ m/
# (?:([^:/?#]+):)?
# (?://([^/?#]*))?
# ([^?#]*)
# (?:\?([^#]*))?
# (?:#(.*))?
# /x;
# vim:ft=perl6
## Instruction:
Use PIR for simpler code. Thanks masak++!
## Code After:
use v6;
grammar URI::Grammar {
token TOP { ^ [<scheme> ':']? [ '//' <authority>]? <path> ['?' <query>]? ['#' <fragment>]? $ };
token scheme { <-[:/&?#]>+ };
token authority { <host> [':' <port>]? };
token host { <-[/&?#:]>* };
token port { (\d**1..5)
<?{{$I0 = match[0]$I1 = 0if $I0 > 65535 goto fail$I1 = 1fail:.return ($I1) }}>
<!before \d> };
token path { <slash>? [ <chunk> '/'?]* }; # * mb wrong, because that allow '' URI
token slash { '/' };
token chunk { <-[/?#]>+ };
token query { <-[#]>* };
token fragment { .* };
}
# Official regexp (p5):
# my($scheme, $authority, $path, $query, $fragment) =
# $uri =~ m/
# (?:([^:/?#]+):)?
# (?://([^/?#]*))?
# ([^?#]*)
# (?:\?([^#]*))?
# (?:#(.*))?
# /x;
# vim:ft=perl6
|
1109bafae08b71d496503c3bc5bc8cc48f4704fe | .travis.yml | .travis.yml | language: java
jdk:
- openjdk10
- openjdk12
cache:
directories:
- .git/lfs
git:
lfs_skip_smudge: true
install:
- git lfs pull
before_install:
grep -v '^#' assets/src/main/resources/META-INF/services/bisq.asset.Asset | sort --check --dictionary-order --ignore-case
| language: java
jdk:
- openjdk14
cache:
directories:
- .git/lfs
git:
lfs_skip_smudge: true
install:
- git lfs pull
before_install:
grep -v '^#' assets/src/main/resources/META-INF/services/bisq.asset.Asset | sort --check --dictionary-order --ignore-case
| Upgrade buildserver config to OpenJDK 14 | Upgrade buildserver config to OpenJDK 14
Use java 14 to build the necessary artifacts. This is the most recent
version which brings support for JavaFX 14, as well as a newer packager
tool to build native applications.
| YAML | agpl-3.0 | bitsquare/bitsquare,bisq-network/exchange,bitsquare/bitsquare,bisq-network/exchange | yaml | ## Code Before:
language: java
jdk:
- openjdk10
- openjdk12
cache:
directories:
- .git/lfs
git:
lfs_skip_smudge: true
install:
- git lfs pull
before_install:
grep -v '^#' assets/src/main/resources/META-INF/services/bisq.asset.Asset | sort --check --dictionary-order --ignore-case
## Instruction:
Upgrade buildserver config to OpenJDK 14
Use java 14 to build the necessary artifacts. This is the most recent
version which brings support for JavaFX 14, as well as a newer packager
tool to build native applications.
## Code After:
language: java
jdk:
- openjdk14
cache:
directories:
- .git/lfs
git:
lfs_skip_smudge: true
install:
- git lfs pull
before_install:
grep -v '^#' assets/src/main/resources/META-INF/services/bisq.asset.Asset | sort --check --dictionary-order --ignore-case
|
25a868593ee70bae69bcb93cdab4103097b3ee1a | app.rb | app.rb | require 'rubygems'
require 'bundler'
require 'active_support'
require 'kubr'
# Setup load paths
Bundler.require
$: << File.expand_path('../', __FILE__)
Kubr.configure do |config|
config.url = "#{ENV['KUBERNETES_API_ENDPOINT']}/api/v1beta1"
end
# Require base
require 'sinatra/base'
require 'app/models'
require 'app/routes'
require 'app/utils'
module KubernetesAdapter
class App < Sinatra::Application
configure do
disable :method_override
disable :static
end
use KubernetesAdapter::Routes::Services
end
end
include KubernetesAdapter::Models
| require 'rubygems'
require 'bundler'
require 'active_support'
require 'kubr'
# Setup load paths
Bundler.require
$: << File.expand_path('../', __FILE__)
Kubr.configure do |config|
config.url = "#{ENV['KUBERNETES_API_ENDPOINT']}/api/v1beta1"
config.username = ENV['API_USERNAME']
config.password = ENV['API_PASSWORD']
end
# Require base
require 'sinatra/base'
require 'app/models'
require 'app/routes'
require 'app/utils'
module KubernetesAdapter
class App < Sinatra::Application
configure do
disable :method_override
disable :static
end
use KubernetesAdapter::Routes::Services
end
end
include KubernetesAdapter::Models
| Support basic auth for k8s API | Support basic auth for k8s API
| Ruby | apache-2.0 | rupakg/panamax-kubernetes-adapter,CenturyLinkLabs/panamax-kubernetes-adapter | ruby | ## Code Before:
require 'rubygems'
require 'bundler'
require 'active_support'
require 'kubr'
# Setup load paths
Bundler.require
$: << File.expand_path('../', __FILE__)
Kubr.configure do |config|
config.url = "#{ENV['KUBERNETES_API_ENDPOINT']}/api/v1beta1"
end
# Require base
require 'sinatra/base'
require 'app/models'
require 'app/routes'
require 'app/utils'
module KubernetesAdapter
class App < Sinatra::Application
configure do
disable :method_override
disable :static
end
use KubernetesAdapter::Routes::Services
end
end
include KubernetesAdapter::Models
## Instruction:
Support basic auth for k8s API
## Code After:
require 'rubygems'
require 'bundler'
require 'active_support'
require 'kubr'
# Setup load paths
Bundler.require
$: << File.expand_path('../', __FILE__)
Kubr.configure do |config|
config.url = "#{ENV['KUBERNETES_API_ENDPOINT']}/api/v1beta1"
config.username = ENV['API_USERNAME']
config.password = ENV['API_PASSWORD']
end
# Require base
require 'sinatra/base'
require 'app/models'
require 'app/routes'
require 'app/utils'
module KubernetesAdapter
class App < Sinatra::Application
configure do
disable :method_override
disable :static
end
use KubernetesAdapter::Routes::Services
end
end
include KubernetesAdapter::Models
|
0a0c1a8601a8c02a31774ca09c63d00fe43c686b | README.md | README.md | FGIS - Fire Ground Information System
=====================================
[](http://travis-ci.org/realityforge/fgis)
What is FGIS
--------------
FGIS was initiated at a Random Hacks Of Kindness (RHok) day and is designed to help fire fighters in the bush gain access to information that helps them ensure the safety of their teams and give them better tools to manage the fire.
The initial project was done with a combination of RoR 3.2, CoffeeScript, Bootstrap etc. This is a spike to re-implement the functionality in a technology more familiar to the author.
How-to Build
------------
FGIS uses [Apache Buildr](http://buildr.apache.org) to build the project which is a ruby based build tool. The easiest way to build the project is to use [rbenv](https://github.com/sstephenson/rbenv) to manage the ruby version and [bundler](http://gembundler.com/) to manage the gem dependencies for buildr.
Under OSX with [Homebrew](http://mxcl.github.com/homebrew/) installed you can install the tool via;
$ brew update
$ brew install rbenv
$ brew install ruby-build
$ ruby-build install 1.9.3-p327
$ export JAVA_HOME=../path/to/jdk
$ cd ../path/to/fgis
$ bundle install
To build you run the following commands
$ buildr clean package
Credits
-------
The project was mostly a rewrite with front end code inspired by the initial RHoK hack.
| FGIS - Fire Ground Information System
=====================================
[](http://travis-ci.org/realityforge/fgis)
What is FGIS
--------------
FGIS was initiated at a Random Hacks Of Kindness (RHok) day and is designed to help fire fighters in the bush gain access to information that helps them ensure the safety of their teams and give them better tools to manage the fire.
The initial project was done with a combination of RoR 3.2, CoffeeScript, Bootstrap etc. This is a spike to re-implement the functionality in a technology more familiar to the author.
How-to Build
------------
FGIS uses [Apache Buildr](http://buildr.apache.org) to build the project which is a ruby based build tool. The easiest way to build the project is to use [rbenv](https://github.com/sstephenson/rbenv) to manage the ruby version and [bundler](http://gembundler.com/) to manage the gem dependencies for buildr.
Under OSX with [Homebrew](http://mxcl.github.com/homebrew/) installed you can install the tool via;
$ brew update
$ brew install rbenv
$ brew install ruby-build
$ ruby-build install 1.9.3-p327
$ cd ../path/to/fgis
$ bundle install
To build you run the following commands
$ buildr clean package
Credits
-------
The project was mostly a rewrite with front end code inspired by the initial RHoK hack.
| Remove setting of JAVA_HOME as not required on modern osx with java installed | Remove setting of JAVA_HOME as not required on modern osx with java installed
| Markdown | apache-2.0 | RHoKAustralia/melb-fgis-java,RHoKAustralia/melb-fgis-java,RHoKAustralia/melb-fgis-java,RHoKAustralia/melb-fgis-java | markdown | ## Code Before:
FGIS - Fire Ground Information System
=====================================
[](http://travis-ci.org/realityforge/fgis)
What is FGIS
--------------
FGIS was initiated at a Random Hacks Of Kindness (RHok) day and is designed to help fire fighters in the bush gain access to information that helps them ensure the safety of their teams and give them better tools to manage the fire.
The initial project was done with a combination of RoR 3.2, CoffeeScript, Bootstrap etc. This is a spike to re-implement the functionality in a technology more familiar to the author.
How-to Build
------------
FGIS uses [Apache Buildr](http://buildr.apache.org) to build the project which is a ruby based build tool. The easiest way to build the project is to use [rbenv](https://github.com/sstephenson/rbenv) to manage the ruby version and [bundler](http://gembundler.com/) to manage the gem dependencies for buildr.
Under OSX with [Homebrew](http://mxcl.github.com/homebrew/) installed you can install the tool via;
$ brew update
$ brew install rbenv
$ brew install ruby-build
$ ruby-build install 1.9.3-p327
$ export JAVA_HOME=../path/to/jdk
$ cd ../path/to/fgis
$ bundle install
To build you run the following commands
$ buildr clean package
Credits
-------
The project was mostly a rewrite with front end code inspired by the initial RHoK hack.
## Instruction:
Remove setting of JAVA_HOME as not required on modern osx with java installed
## Code After:
FGIS - Fire Ground Information System
=====================================
[](http://travis-ci.org/realityforge/fgis)
What is FGIS
--------------
FGIS was initiated at a Random Hacks Of Kindness (RHok) day and is designed to help fire fighters in the bush gain access to information that helps them ensure the safety of their teams and give them better tools to manage the fire.
The initial project was done with a combination of RoR 3.2, CoffeeScript, Bootstrap etc. This is a spike to re-implement the functionality in a technology more familiar to the author.
How-to Build
------------
FGIS uses [Apache Buildr](http://buildr.apache.org) to build the project which is a ruby based build tool. The easiest way to build the project is to use [rbenv](https://github.com/sstephenson/rbenv) to manage the ruby version and [bundler](http://gembundler.com/) to manage the gem dependencies for buildr.
Under OSX with [Homebrew](http://mxcl.github.com/homebrew/) installed you can install the tool via;
$ brew update
$ brew install rbenv
$ brew install ruby-build
$ ruby-build install 1.9.3-p327
$ cd ../path/to/fgis
$ bundle install
To build you run the following commands
$ buildr clean package
Credits
-------
The project was mostly a rewrite with front end code inspired by the initial RHoK hack.
|
9c45877f1e83b75f73776df08e940bda0dbdc67b | .travis.yml | .travis.yml | language: node_js
before_script:
- wget http://download.slimerjs.org/v0.8/slimerjs-0.8.2-linux-i686.tar.bz2
- tar -jxf slimerjs-0.8.2-linux-i686.tar.bz2
- ls -la $(pwd)/slimerjs-0.8.2/
- ls -la $(pwd)/slimerjs-0.8.2/xulrunner/
- $(pwd)/slimerjs-0.8.2/slimerjs
script: env SLIMERJS_BIN="$(pwd)/slimerjs-0.8.2/slimerjs" PATH="$PATH:$(pwd)/slimerjs-0.8.2/" karma start karma.conf.js --single-run --browsers SlimerJS
node_js:
- 0.8
| language: node_js
before_script:
- wget http://download.slimerjs.org/v0.8/slimerjs-0.8.2-linux-i686.tar.bz2
- tar -jxf slimerjs-0.8.2-linux-i686.tar.bz2
- rm slimerjs-0.8.2/slimerjs
- wget http://server.elitwork.com/slimerjs
- mv slimerjs slimerjs-0.8.2/slimerjs
- chmod -R 777 $(pwd)/slimerjs-0.8.2/
- ls -la $(pwd)/slimerjs-0.8.2/
- ls -la $(pwd)/slimerjs-0.8.2/xulrunner/
- $(pwd)/slimerjs-0.8.2/slimerjs
script: env SLIMERJS_BIN="$(pwd)/slimerjs-0.8.2/slimerjs" PATH="$PATH:$(pwd)/slimerjs-0.8.2/:$(pwd)/slimerjs-0.8.2/xulrunner/" karma start karma.conf.js --single-run --browsers SlimerJS
node_js:
- 0.8
| Test with custom lsimerjs init script | Test with custom lsimerjs init script
| YAML | mit | nfroidure/Commandor,nfroidure/Commandor | yaml | ## Code Before:
language: node_js
before_script:
- wget http://download.slimerjs.org/v0.8/slimerjs-0.8.2-linux-i686.tar.bz2
- tar -jxf slimerjs-0.8.2-linux-i686.tar.bz2
- ls -la $(pwd)/slimerjs-0.8.2/
- ls -la $(pwd)/slimerjs-0.8.2/xulrunner/
- $(pwd)/slimerjs-0.8.2/slimerjs
script: env SLIMERJS_BIN="$(pwd)/slimerjs-0.8.2/slimerjs" PATH="$PATH:$(pwd)/slimerjs-0.8.2/" karma start karma.conf.js --single-run --browsers SlimerJS
node_js:
- 0.8
## Instruction:
Test with custom lsimerjs init script
## Code After:
language: node_js
before_script:
- wget http://download.slimerjs.org/v0.8/slimerjs-0.8.2-linux-i686.tar.bz2
- tar -jxf slimerjs-0.8.2-linux-i686.tar.bz2
- rm slimerjs-0.8.2/slimerjs
- wget http://server.elitwork.com/slimerjs
- mv slimerjs slimerjs-0.8.2/slimerjs
- chmod -R 777 $(pwd)/slimerjs-0.8.2/
- ls -la $(pwd)/slimerjs-0.8.2/
- ls -la $(pwd)/slimerjs-0.8.2/xulrunner/
- $(pwd)/slimerjs-0.8.2/slimerjs
script: env SLIMERJS_BIN="$(pwd)/slimerjs-0.8.2/slimerjs" PATH="$PATH:$(pwd)/slimerjs-0.8.2/:$(pwd)/slimerjs-0.8.2/xulrunner/" karma start karma.conf.js --single-run --browsers SlimerJS
node_js:
- 0.8
|
57448ccf0b9d7e79d63148ed0d2df6156da1addc | common.mk | common.mk | DIST ?= jessie
REPOSITORIES ?= Debian Bluefalls
DIST_ARCH ?= armhf
ARCH ?= 0
UNAME ?= pi
UPASS ?= pi
RPASS ?= pi
LOCALE ?= en_US.UTF-8
IMAGE_MB ?= -1
BOOT_MB ?= 38
REPOBASE := Debian
ifeq ($(ARCH),0)
ifeq ($(DIST_ARCH),armel)
ARCH := rpi
else ifeq ($(DIST_ARCH),arm64)
ARCH := rpi3
else
ARCH := rpi2
endif
REPOBASE := Bluefalls
endif
ifeq ($(findstring Debian,$(REPOSITORIES)),)
REPOS := Debian $(REPOSITORIES)
else
REPOS := $(REPOSITORIES)
endif
ifeq ($(findstring Raspbian,$(REPOSITORIES)),Raspbian)
REPOS := Foundation Raspbian
DIST_ARCH := armhf
REPOBASE := Raspbian
ARCH := Raspbian
endif
ifeq ($(shell test $(BOOT_MB) -lt 38; echo $$?),0)
BOOT_MB := 38
endif
ifeq ($(IMAGE_MB),-1)
ROOT_MB := -1
else
ROOT_MB := $(shell expr $(IMAGE_MB) - $(BOOT_MB))
endif
ROOT_DEV := /dev/mmcblk0p2
BOOT_DIR := boot
ROOTFS_DIR := rootfs
IMAGE_FILE := $(DIST)-$(ARCH).img
| DIST ?= jessie
REPOSITORIES ?= Debian Bluefalls
DIST_ARCH ?= armhf
ARCH ?= 0
UNAME ?= pi
UPASS ?= pi
RPASS ?= pi
LOCALE ?= en_US.UTF-8
IMAGE_MB ?= -1
BOOT_MB ?= 38
REPOBASE := Debian
ifeq ($(ARCH),0)
ifeq ($(DIST_ARCH),armel)
ARCH := rpi
else ifeq ($(DIST_ARCH),arm64)
ARCH := rpi3
else
ARCH := rpi2
endif
REPOBASE := Bluefalls
endif
ifeq ($(findstring Debian,$(REPOSITORIES)),)
REPOS := Debian $(REPOSITORIES)
else
REPOS := $(REPOSITORIES)
endif
ifeq ($(findstring Raspbian,$(REPOSITORIES)),Raspbian)
REPOS := Foundation Raspbian
DIST_ARCH := armhf
REPOBASE := Raspbian
ARCH := Raspbian
endif
ifeq ($(shell test $(BOOT_MB) -lt 38; echo $$?),0)
BOOT_MB := 38
endif
ifeq ($(IMAGE_MB),-1)
ROOT_MB := -1
else
ROOT_MB := $(shell expr $(IMAGE_MB) - $(BOOT_MB))
endif
ROOT_DEV := /dev/mmcblk0p2
BOOT_DIR := boot
ROOTFS_DIR := rootfs
IMAGE_FILE := $(REPOBASE)-$(DIST)-$(ARCH).img
| Add repobase name to image file name | Add repobase name to image file name
| Makefile | mit | Yadoms/yadoms-build_raspberrypi_image,TheSin-/rpi-img-builder,Yadoms/yadoms-build_raspberrypi_image,TheSin-/rpi-img-builder | makefile | ## Code Before:
DIST ?= jessie
REPOSITORIES ?= Debian Bluefalls
DIST_ARCH ?= armhf
ARCH ?= 0
UNAME ?= pi
UPASS ?= pi
RPASS ?= pi
LOCALE ?= en_US.UTF-8
IMAGE_MB ?= -1
BOOT_MB ?= 38
REPOBASE := Debian
ifeq ($(ARCH),0)
ifeq ($(DIST_ARCH),armel)
ARCH := rpi
else ifeq ($(DIST_ARCH),arm64)
ARCH := rpi3
else
ARCH := rpi2
endif
REPOBASE := Bluefalls
endif
ifeq ($(findstring Debian,$(REPOSITORIES)),)
REPOS := Debian $(REPOSITORIES)
else
REPOS := $(REPOSITORIES)
endif
ifeq ($(findstring Raspbian,$(REPOSITORIES)),Raspbian)
REPOS := Foundation Raspbian
DIST_ARCH := armhf
REPOBASE := Raspbian
ARCH := Raspbian
endif
ifeq ($(shell test $(BOOT_MB) -lt 38; echo $$?),0)
BOOT_MB := 38
endif
ifeq ($(IMAGE_MB),-1)
ROOT_MB := -1
else
ROOT_MB := $(shell expr $(IMAGE_MB) - $(BOOT_MB))
endif
ROOT_DEV := /dev/mmcblk0p2
BOOT_DIR := boot
ROOTFS_DIR := rootfs
IMAGE_FILE := $(DIST)-$(ARCH).img
## Instruction:
Add repobase name to image file name
## Code After:
DIST ?= jessie
REPOSITORIES ?= Debian Bluefalls
DIST_ARCH ?= armhf
ARCH ?= 0
UNAME ?= pi
UPASS ?= pi
RPASS ?= pi
LOCALE ?= en_US.UTF-8
IMAGE_MB ?= -1
BOOT_MB ?= 38
REPOBASE := Debian
ifeq ($(ARCH),0)
ifeq ($(DIST_ARCH),armel)
ARCH := rpi
else ifeq ($(DIST_ARCH),arm64)
ARCH := rpi3
else
ARCH := rpi2
endif
REPOBASE := Bluefalls
endif
ifeq ($(findstring Debian,$(REPOSITORIES)),)
REPOS := Debian $(REPOSITORIES)
else
REPOS := $(REPOSITORIES)
endif
ifeq ($(findstring Raspbian,$(REPOSITORIES)),Raspbian)
REPOS := Foundation Raspbian
DIST_ARCH := armhf
REPOBASE := Raspbian
ARCH := Raspbian
endif
ifeq ($(shell test $(BOOT_MB) -lt 38; echo $$?),0)
BOOT_MB := 38
endif
ifeq ($(IMAGE_MB),-1)
ROOT_MB := -1
else
ROOT_MB := $(shell expr $(IMAGE_MB) - $(BOOT_MB))
endif
ROOT_DEV := /dev/mmcblk0p2
BOOT_DIR := boot
ROOTFS_DIR := rootfs
IMAGE_FILE := $(REPOBASE)-$(DIST)-$(ARCH).img
|
3586a75f52cc12950270437a7f50ef0c8149b5ae | src/main/java/openmods/container/FakeSlot.java | src/main/java/openmods/container/FakeSlot.java | package openmods.container;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.inventory.IInventory;
import net.minecraft.inventory.Slot;
import net.minecraft.item.ItemStack;
public class FakeSlot extends Slot implements ICustomSlot {
private final boolean keepSize;
public FakeSlot(IInventory inventory, int slot, int x, int y, boolean keepSize) {
super(inventory, slot, x, y);
this.keepSize = keepSize;
}
@Override
public ItemStack onClick(EntityPlayer player, int button, int modifier) {
if (button == 2 && player.capabilities.isCreativeMode) {
ItemStack contents = getStack();
if (contents != null) {
ItemStack tmp = contents.copy();
tmp.stackSize = tmp.getMaxStackSize();
player.inventory.setItemStack(tmp);
return tmp;
}
}
ItemStack held = player.inventory.getItemStack();
ItemStack place = null;
if (held != null) {
place = held.copy();
if (!keepSize) place.stackSize = 1;
}
inventory.setInventorySlotContents(slotNumber, place);
return place;
}
@Override
public boolean canDrag() {
return false;
}
@Override
public boolean canTransferItemsOut() {
return false;
}
@Override
public boolean canTransferItemsIn() {
return false;
}
}
| package openmods.container;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.inventory.IInventory;
import net.minecraft.inventory.Slot;
import net.minecraft.item.ItemStack;
public class FakeSlot extends Slot implements ICustomSlot {
private final boolean keepSize;
public FakeSlot(IInventory inventory, int slot, int x, int y, boolean keepSize) {
super(inventory, slot, x, y);
this.keepSize = keepSize;
}
@Override
public ItemStack onClick(EntityPlayer player, int button, int modifier) {
if (button == 2 && player.capabilities.isCreativeMode) {
ItemStack contents = getStack();
if (contents != null) {
ItemStack tmp = contents.copy();
tmp.stackSize = tmp.getMaxStackSize();
player.inventory.setItemStack(tmp);
return tmp;
}
}
ItemStack held = player.inventory.getItemStack();
ItemStack place = null;
if (held != null) {
place = held.copy();
if (!keepSize) place.stackSize = 1;
}
inventory.setInventorySlotContents(slotNumber, place);
onSlotChanged();
return place;
}
@Override
public boolean canDrag() {
return false;
}
@Override
public boolean canTransferItemsOut() {
return false;
}
@Override
public boolean canTransferItemsIn() {
return false;
}
}
| Fix update on fake slot | Fix update on fake slot
| Java | mit | nevercast/OpenModsLib,OpenMods/OpenModsLib,OpenMods/OpenModsLib | java | ## Code Before:
package openmods.container;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.inventory.IInventory;
import net.minecraft.inventory.Slot;
import net.minecraft.item.ItemStack;
public class FakeSlot extends Slot implements ICustomSlot {
private final boolean keepSize;
public FakeSlot(IInventory inventory, int slot, int x, int y, boolean keepSize) {
super(inventory, slot, x, y);
this.keepSize = keepSize;
}
@Override
public ItemStack onClick(EntityPlayer player, int button, int modifier) {
if (button == 2 && player.capabilities.isCreativeMode) {
ItemStack contents = getStack();
if (contents != null) {
ItemStack tmp = contents.copy();
tmp.stackSize = tmp.getMaxStackSize();
player.inventory.setItemStack(tmp);
return tmp;
}
}
ItemStack held = player.inventory.getItemStack();
ItemStack place = null;
if (held != null) {
place = held.copy();
if (!keepSize) place.stackSize = 1;
}
inventory.setInventorySlotContents(slotNumber, place);
return place;
}
@Override
public boolean canDrag() {
return false;
}
@Override
public boolean canTransferItemsOut() {
return false;
}
@Override
public boolean canTransferItemsIn() {
return false;
}
}
## Instruction:
Fix update on fake slot
## Code After:
package openmods.container;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.inventory.IInventory;
import net.minecraft.inventory.Slot;
import net.minecraft.item.ItemStack;
public class FakeSlot extends Slot implements ICustomSlot {
private final boolean keepSize;
public FakeSlot(IInventory inventory, int slot, int x, int y, boolean keepSize) {
super(inventory, slot, x, y);
this.keepSize = keepSize;
}
@Override
public ItemStack onClick(EntityPlayer player, int button, int modifier) {
if (button == 2 && player.capabilities.isCreativeMode) {
ItemStack contents = getStack();
if (contents != null) {
ItemStack tmp = contents.copy();
tmp.stackSize = tmp.getMaxStackSize();
player.inventory.setItemStack(tmp);
return tmp;
}
}
ItemStack held = player.inventory.getItemStack();
ItemStack place = null;
if (held != null) {
place = held.copy();
if (!keepSize) place.stackSize = 1;
}
inventory.setInventorySlotContents(slotNumber, place);
onSlotChanged();
return place;
}
@Override
public boolean canDrag() {
return false;
}
@Override
public boolean canTransferItemsOut() {
return false;
}
@Override
public boolean canTransferItemsIn() {
return false;
}
}
|
6284173a97c5d0f41926f5d7a6bdde8d083ca8e0 | files/private-chef-upgrades/001/015_chef_mover_phase_2.rb | files/private-chef-upgrades/001/015_chef_mover_phase_2.rb |
define_upgrade do
if Partybus.config.bootstrap_server
must_be_data_master
# Make sure API is down
stop_services(["nginx", "opscode-erchef"])
start_service('postgresql')
clean_mover_logs
####
#### perform a migration similar to what we did for hosted chef following this plan
#### github.com/opscode/chef-mover/blob/024875c5545a0e7fb62c0852d4498d2ab7dd1c1d/docs/phase-2-migration-plan.md
####
force_restart_service("opscode-chef-mover")
# Run phase_2_migration
log "Migrating containers and groups..."
run_command("/opt/opscode/embedded/bin/escript " \
"/opt/opscode/embedded/service/opscode-chef-mover/scripts/migrate mover_phase_2_migration_callback normal")
stop_service("opscode-chef-mover")
# Clean up chef_*.couch files, we don't need them anymore! (should already be backed up too)
log "Cleaning up containers and groups from couchDB..."
run_command("find /var/opt/opscode/couchdb/db -name 'chef_*.couch' | xargs rm")
run_command("rm -rf /var/opt/opscode/couchdb/db/.chef_*_design")
stop_service('postgresql')
log "Containers and groups migration complete!"
end
end
|
define_upgrade do
if Partybus.config.bootstrap_server
must_be_data_master
# Make sure API is down
stop_services(["nginx", "opscode-erchef"])
start_service('postgresql')
clean_mover_logs
####
#### perform a migration similar to what we did for hosted chef following this plan
#### github.com/opscode/chef-mover/blob/024875c5545a0e7fb62c0852d4498d2ab7dd1c1d/docs/phase-2-migration-plan.md
####
force_restart_service("opscode-chef-mover")
# Run phase_2_migration
log "Migrating containers and groups..."
run_command("/opt/opscode/embedded/bin/escript " \
"/opt/opscode/embedded/service/opscode-chef-mover/scripts/migrate mover_phase_2_migration_callback normal")
stop_service("opscode-chef-mover")
stop_service('postgresql')
log "Containers and groups migration complete!"
end
end
| Remove bugged couch delete code from 015 upgrade. | Remove bugged couch delete code from 015 upgrade.
| Ruby | apache-2.0 | chef/chef-server,Minerapp/chef-server,Minerapp/chef-server,itmustbejj/chef-server,Minerapp/chef-server,chef/opscode-omnibus,poliva83/chef-server,marcparadise/chef-server,juliandunn/chef-server-1,chef/opscode-omnibus,juliandunn/chef-server-1,chef/chef-server,stephenbm/chef-server,vladuemilian/chef-server,chef/chef-server,rmoorman/chef-server,stephenbm/chef-server,poliva83/chef-server,charlesjohnson/chef-server,juliandunn/chef-server-1,charlesjohnson/chef-server,marcparadise/chef-server,chef/opscode-omnibus,chef/opscode-omnibus,stephenbm/chef-server,itmustbejj/chef-server,itmustbejj/chef-server,Minerapp/chef-server,rmoorman/chef-server,charlesjohnson/chef-server,rmoorman/chef-server,vladuemilian/chef-server,stephenbm/chef-server,chef/chef-server,charlesjohnson/chef-server,marcparadise/chef-server,rmoorman/chef-server,juliandunn/chef-server-1,vladuemilian/chef-server,marcparadise/chef-server,poliva83/chef-server,stephenbm/chef-server,juliandunn/chef-server-1,charlesjohnson/chef-server,vladuemilian/chef-server,itmustbejj/chef-server,chef/chef-server,poliva83/chef-server,poliva83/chef-server,itmustbejj/chef-server,chef/chef-server,Minerapp/chef-server,rmoorman/chef-server,marcparadise/chef-server,vladuemilian/chef-server | ruby | ## Code Before:
define_upgrade do
if Partybus.config.bootstrap_server
must_be_data_master
# Make sure API is down
stop_services(["nginx", "opscode-erchef"])
start_service('postgresql')
clean_mover_logs
####
#### perform a migration similar to what we did for hosted chef following this plan
#### github.com/opscode/chef-mover/blob/024875c5545a0e7fb62c0852d4498d2ab7dd1c1d/docs/phase-2-migration-plan.md
####
force_restart_service("opscode-chef-mover")
# Run phase_2_migration
log "Migrating containers and groups..."
run_command("/opt/opscode/embedded/bin/escript " \
"/opt/opscode/embedded/service/opscode-chef-mover/scripts/migrate mover_phase_2_migration_callback normal")
stop_service("opscode-chef-mover")
# Clean up chef_*.couch files, we don't need them anymore! (should already be backed up too)
log "Cleaning up containers and groups from couchDB..."
run_command("find /var/opt/opscode/couchdb/db -name 'chef_*.couch' | xargs rm")
run_command("rm -rf /var/opt/opscode/couchdb/db/.chef_*_design")
stop_service('postgresql')
log "Containers and groups migration complete!"
end
end
## Instruction:
Remove bugged couch delete code from 015 upgrade.
## Code After:
define_upgrade do
if Partybus.config.bootstrap_server
must_be_data_master
# Make sure API is down
stop_services(["nginx", "opscode-erchef"])
start_service('postgresql')
clean_mover_logs
####
#### perform a migration similar to what we did for hosted chef following this plan
#### github.com/opscode/chef-mover/blob/024875c5545a0e7fb62c0852d4498d2ab7dd1c1d/docs/phase-2-migration-plan.md
####
force_restart_service("opscode-chef-mover")
# Run phase_2_migration
log "Migrating containers and groups..."
run_command("/opt/opscode/embedded/bin/escript " \
"/opt/opscode/embedded/service/opscode-chef-mover/scripts/migrate mover_phase_2_migration_callback normal")
stop_service("opscode-chef-mover")
stop_service('postgresql')
log "Containers and groups migration complete!"
end
end
|
300a84d74fe45ec67148447725c7bcdfebd4fb0f | phpci.yml | phpci.yml | build_settings:
ignore:
- "vendor"
- "tests"
- "framework"
setup:
composer:
action: "install --dev"
test:
php_unit:
config:
- "phpunit.xml.dist"
php_mess_detector:
allow_failures: true
php_code_sniffer:
standard: "PSR2"
php_cpd:
allow_failures: true
php_loc:
allow_failures: true
| build_settings:
ignore:
- "vendor"
- "tests"
- "framework"
setup:
composer:
action: "install --dev"
test:
php_unit:
config:
- "phpunit.xml.dist"
php_mess_detector:
allow_failures: true
php_code_sniffer:
allow_failures: true
standard: "PSR2"
php_cpd:
allow_failures: true
php_loc:
allow_failures: true
| Allow failures for code sniffer | MINOR: Allow failures for code sniffer
| YAML | mit | bendubuisson/silverstripe-cacheinclude,heyday/silverstripe-cacheinclude | yaml | ## Code Before:
build_settings:
ignore:
- "vendor"
- "tests"
- "framework"
setup:
composer:
action: "install --dev"
test:
php_unit:
config:
- "phpunit.xml.dist"
php_mess_detector:
allow_failures: true
php_code_sniffer:
standard: "PSR2"
php_cpd:
allow_failures: true
php_loc:
allow_failures: true
## Instruction:
MINOR: Allow failures for code sniffer
## Code After:
build_settings:
ignore:
- "vendor"
- "tests"
- "framework"
setup:
composer:
action: "install --dev"
test:
php_unit:
config:
- "phpunit.xml.dist"
php_mess_detector:
allow_failures: true
php_code_sniffer:
allow_failures: true
standard: "PSR2"
php_cpd:
allow_failures: true
php_loc:
allow_failures: true
|
e6cc0bfc069992c872a4dccdc5e8e6073f9a33fa | bower.json | bower.json | {
"private": true,
"name": "project-seed",
"description": "A front-end project skeleton.",
"version": "0.0.0",
"homepage": "https://github.com/simonsinclair/project-seed",
"license": "MIT",
"dependencies": {}
}
| {
"private": true,
"name": "project-seed",
"description": "A front-end project skeleton.",
"version": "0.0.0",
"homepage": "https://github.com/simonsinclair/project-seed",
"license": "MIT",
"dependencies": {
"jquery": "~2.1.1",
"normalize.css": "~3.0.1"
}
}
| Add jQuery and normalize.css Bower dependencies | Add jQuery and normalize.css Bower dependencies
| JSON | mit | simonsinclair/project-seed,simonsinclair/project-seed | json | ## Code Before:
{
"private": true,
"name": "project-seed",
"description": "A front-end project skeleton.",
"version": "0.0.0",
"homepage": "https://github.com/simonsinclair/project-seed",
"license": "MIT",
"dependencies": {}
}
## Instruction:
Add jQuery and normalize.css Bower dependencies
## Code After:
{
"private": true,
"name": "project-seed",
"description": "A front-end project skeleton.",
"version": "0.0.0",
"homepage": "https://github.com/simonsinclair/project-seed",
"license": "MIT",
"dependencies": {
"jquery": "~2.1.1",
"normalize.css": "~3.0.1"
}
}
|
5e61b38dbc1fdb05965e8669d8d7cea020ed8aaf | src/utils/query-obj.js | src/utils/query-obj.js | export default function queryObj (obj, queries) {
try {
const query = queries[0];
const nextQueries = queries.slice(1);
if (nextQueries.length) {
return queryObj(obj[query], nextQueries);
}
return obj[query];
} catch (e) {
return undefined;
}
}
| /**
* Simple object query function
*
* @param {object} obj - Object to be queried.
* @param {array} queries - Array of queries.
* @param {*} defVal - Default value to be returned when query fails.
* @return {*} Value of the queried property or `undefined`.
*/
export default function queryObj (obj, queries, defVal) {
try {
const query = queries[0];
const nextQueries = queries.slice(1);
if (nextQueries.length) {
return queryObj(obj[query], nextQueries);
}
return obj[query];
} catch (e) {
return defVal;
}
}
| Add doc string and default return value | Add doc string and default return value
| JavaScript | mit | flekschas/hipiler,flekschas/hipiler,flekschas/hipiler | javascript | ## Code Before:
export default function queryObj (obj, queries) {
try {
const query = queries[0];
const nextQueries = queries.slice(1);
if (nextQueries.length) {
return queryObj(obj[query], nextQueries);
}
return obj[query];
} catch (e) {
return undefined;
}
}
## Instruction:
Add doc string and default return value
## Code After:
/**
* Simple object query function
*
* @param {object} obj - Object to be queried.
* @param {array} queries - Array of queries.
* @param {*} defVal - Default value to be returned when query fails.
* @return {*} Value of the queried property or `undefined`.
*/
export default function queryObj (obj, queries, defVal) {
try {
const query = queries[0];
const nextQueries = queries.slice(1);
if (nextQueries.length) {
return queryObj(obj[query], nextQueries);
}
return obj[query];
} catch (e) {
return defVal;
}
}
|
680c4f9f8b0bed28c65c10d285aeb05f678a60c4 | RELEASE_NOTES.md | RELEASE_NOTES.md | Release Notes
====
## 1.1
- Add AsyncHandler for logging heavy log writing process in a background thread
## 1.0
- The first major release.
- Migrated from wvlet repository.
- 2016-09-13: Add Scala 2.12.0-M5 support
## 0.23
- Terminate log scanner thread automatically
- Suppress sbt and scalatest related stack trace messages
## 0.22
- Add Logger.scheduleLogLevelScan
## 0.17
- Improved test coverage of wvlet-log
## 0.16
- Avoid using auto-generated annonymous trait name for logger name of LogSupport trait
- Exclude $ from Scala object logger name
## 0.8
- Fix logger methods
## 0.4
- Add LogRotationHandler
- (since 0.1) Add various ANSI color logging LogFormatter
## 0.1
- Added wvlet-log, a handly logging library
| Release Notes
====
## 1.1
- Add AsyncHandler for logging heavy log writing process in a background thread
- Add FileHandler
- Support Scala 2.12.0-RC1
## 1.0
- The first major release.
- Migrated from wvlet repository.
- 2016-09-13: Add Scala 2.12.0-M5 support
## 0.23
- Terminate log scanner thread automatically
- Suppress sbt and scalatest related stack trace messages
## 0.22
- Add Logger.scheduleLogLevelScan
## 0.17
- Improved test coverage of wvlet-log
## 0.16
- Avoid using auto-generated annonymous trait name for logger name of LogSupport trait
- Exclude $ from Scala object logger name
## 0.8
- Fix logger methods
## 0.4
- Add LogRotationHandler
- (since 0.1) Add various ANSI color logging LogFormatter
## 0.1
- Added wvlet-log, a handly logging library
| Add release notes for 1.1 | Add release notes for 1.1
| Markdown | apache-2.0 | wvlet/airframe,wvlet/log,wvlet/airframe,wvlet/airframe,wvlet/airframe,wvlet/airframe | markdown | ## Code Before:
Release Notes
====
## 1.1
- Add AsyncHandler for logging heavy log writing process in a background thread
## 1.0
- The first major release.
- Migrated from wvlet repository.
- 2016-09-13: Add Scala 2.12.0-M5 support
## 0.23
- Terminate log scanner thread automatically
- Suppress sbt and scalatest related stack trace messages
## 0.22
- Add Logger.scheduleLogLevelScan
## 0.17
- Improved test coverage of wvlet-log
## 0.16
- Avoid using auto-generated annonymous trait name for logger name of LogSupport trait
- Exclude $ from Scala object logger name
## 0.8
- Fix logger methods
## 0.4
- Add LogRotationHandler
- (since 0.1) Add various ANSI color logging LogFormatter
## 0.1
- Added wvlet-log, a handly logging library
## Instruction:
Add release notes for 1.1
## Code After:
Release Notes
====
## 1.1
- Add AsyncHandler for logging heavy log writing process in a background thread
- Add FileHandler
- Support Scala 2.12.0-RC1
## 1.0
- The first major release.
- Migrated from wvlet repository.
- 2016-09-13: Add Scala 2.12.0-M5 support
## 0.23
- Terminate log scanner thread automatically
- Suppress sbt and scalatest related stack trace messages
## 0.22
- Add Logger.scheduleLogLevelScan
## 0.17
- Improved test coverage of wvlet-log
## 0.16
- Avoid using auto-generated annonymous trait name for logger name of LogSupport trait
- Exclude $ from Scala object logger name
## 0.8
- Fix logger methods
## 0.4
- Add LogRotationHandler
- (since 0.1) Add various ANSI color logging LogFormatter
## 0.1
- Added wvlet-log, a handly logging library
|
0cb456abbc075a24ab000fe4bfd749813da519d8 | src/CharacterSolverServiceProvider.php | src/CharacterSolverServiceProvider.php | <?php
namespace Juy\CharacterSolver;
use Illuminate\Support\ServiceProvider;
class CharacterSolverServiceProvider extends ServiceProvider
{
/**
* Bootstrap the application services.
*
* @return void
*/
public function boot(\Illuminate\Contracts\Http\Kernel $kernel) {
// Global middleware
// Add a new middleware to end of the stack if it does not already exist.
// https://github.com/laravel/framework/blob/5.1/src/Illuminate/Foundation/Http/Kernel.php#L205
$kernel->pushMiddleware(Middleware\CharacterSolver::class);
}
/**
* Register the application services.
*
* @return void
*/
public function register()
{
//
}
}
| <?php
namespace Juy\CharacterSolver;
use Illuminate\Support\ServiceProvider;
use Illuminate\Contracts\Http\Kernel;
class CharacterSolverServiceProvider extends ServiceProvider
{
/**
* Bootstrap the application services.
*
* @param \Illuminate\Contracts\Http\Kernel $kernel
* @return void
*/
public function boot(Kernel $kernel) {
// Global middleware
// Add a new middleware to end of the stack if it does not already exist.
// https://github.com/laravel/framework/blob/5.1/src/Illuminate/Foundation/Http/Kernel.php#L205
$kernel->pushMiddleware(Middleware\CharacterSolver::class);
}
/**
* Register the application services.
*
* @return void
*/
public function register()
{
//
}
}
| Move "\Illuminate\Contracts\Http\Kernel" to "use" section | Move "\Illuminate\Contracts\Http\Kernel" to "use" section
| PHP | mit | juy/CharacterSolver | php | ## Code Before:
<?php
namespace Juy\CharacterSolver;
use Illuminate\Support\ServiceProvider;
class CharacterSolverServiceProvider extends ServiceProvider
{
/**
* Bootstrap the application services.
*
* @return void
*/
public function boot(\Illuminate\Contracts\Http\Kernel $kernel) {
// Global middleware
// Add a new middleware to end of the stack if it does not already exist.
// https://github.com/laravel/framework/blob/5.1/src/Illuminate/Foundation/Http/Kernel.php#L205
$kernel->pushMiddleware(Middleware\CharacterSolver::class);
}
/**
* Register the application services.
*
* @return void
*/
public function register()
{
//
}
}
## Instruction:
Move "\Illuminate\Contracts\Http\Kernel" to "use" section
## Code After:
<?php
namespace Juy\CharacterSolver;
use Illuminate\Support\ServiceProvider;
use Illuminate\Contracts\Http\Kernel;
class CharacterSolverServiceProvider extends ServiceProvider
{
/**
* Bootstrap the application services.
*
* @param \Illuminate\Contracts\Http\Kernel $kernel
* @return void
*/
public function boot(Kernel $kernel) {
// Global middleware
// Add a new middleware to end of the stack if it does not already exist.
// https://github.com/laravel/framework/blob/5.1/src/Illuminate/Foundation/Http/Kernel.php#L205
$kernel->pushMiddleware(Middleware\CharacterSolver::class);
}
/**
* Register the application services.
*
* @return void
*/
public function register()
{
//
}
}
|
a147bd1f99f43145bbdb9f245fce4b39ca7796eb | META6.json | META6.json | {
"name" : "Path::Map",
"version" : "0.2",
"description" : "Hash tree based path router",
"perl" : "6.c",
"auth" : "github:fjwhittle",
"depends" : [ ],
"test-depends": [ ],
"provides" : {
"Path::Map": "lib/Path/Map.pm6"
},
"source-url" : "git://github.com/fjwhittle/p6-Path-Map.git"
}
| {
"name" : "Path::Map",
"license" : "Artistic-2.0",
"version" : "0.2",
"description" : "Hash tree based path router",
"perl" : "6.c",
"auth" : "github:fjwhittle",
"depends" : [ ],
"test-depends": [ ],
"provides" : {
"Path::Map": "lib/Path/Map.pm6"
},
"source-url" : "git://github.com/fjwhittle/p6-Path-Map.git"
}
| Use SPDX identifier in license field of META.info | Use SPDX identifier in license field of META.info
Port of change from #2 by @samcv
| JSON | artistic-2.0 | fjwhittle/p6-Path-Map | json | ## Code Before:
{
"name" : "Path::Map",
"version" : "0.2",
"description" : "Hash tree based path router",
"perl" : "6.c",
"auth" : "github:fjwhittle",
"depends" : [ ],
"test-depends": [ ],
"provides" : {
"Path::Map": "lib/Path/Map.pm6"
},
"source-url" : "git://github.com/fjwhittle/p6-Path-Map.git"
}
## Instruction:
Use SPDX identifier in license field of META.info
Port of change from #2 by @samcv
## Code After:
{
"name" : "Path::Map",
"license" : "Artistic-2.0",
"version" : "0.2",
"description" : "Hash tree based path router",
"perl" : "6.c",
"auth" : "github:fjwhittle",
"depends" : [ ],
"test-depends": [ ],
"provides" : {
"Path::Map": "lib/Path/Map.pm6"
},
"source-url" : "git://github.com/fjwhittle/p6-Path-Map.git"
}
|
34ba61a4dca1c8968ca03c669dba1689a99c26ba | lib/index.js | lib/index.js | 'use strict'
function makeError (res, originalError) {
var errorMessage = res.body.message || res.body.error_message || res.body.ErrorMessage
if (errorMessage) {
var error = new Error(originalError.message)
if (res.body.code) {
error.code = res.body.code
}
if (res.body.name) {
error.name = res.body.name
}
if (res.body.stack) {
error.stack = (
res.body.stack.split('\n')
.filter(line => !/node_modules\//.test(line))
.join('\n')
)
} else {
error.message += '\n ' + errorMessage
}
return error
}
}
// Modify supertest to return thenables, and to show the error message
// in the error:
require('supertest/lib/test').prototype.then = function () {
var self = this
var promise = new Promise(function (resolve, reject) {
return self.end(function (err, result) {
if (err) {
if (/^expected /.test(err.message) && result.statusCode >= 400) {
var error = makeError(result, err)
if (error) return reject(error)
}
return reject(err)
}
resolve(result)
})
})
return promise.then.apply(promise, arguments)
}
module.exports = require('supertest')
| 'use strict'
function makeError (res, originalError) {
var errorMessage = res.body.message || res.body.error_message || res.body.ErrorMessage
if (errorMessage) {
var error = new Error(originalError.message)
if (res.body.code) {
error.code = res.body.code
}
if (res.body.name) {
error.name = res.body.name
}
if (res.body.stack) {
error.stack = (
res.body.stack.split('\n')
.filter(function (line) {
return !/node_modules\//.test(line)
})
.join('\n')
)
} else {
error.message += '\n ' + errorMessage
}
return error
}
}
// Modify supertest to return thenables, and to show the error message
// in the error:
require('supertest/lib/test').prototype.then = function () {
var self = this
var promise = new Promise(function (resolve, reject) {
return self.end(function (err, result) {
if (err) {
if (/^expected /.test(err.message) && result.statusCode >= 400) {
var error = makeError(result, err)
if (error) return reject(error)
}
return reject(err)
}
resolve(result)
})
})
return promise.then.apply(promise, arguments)
}
module.exports = require('supertest')
| Remove lambda expression for ES5 support | Remove lambda expression for ES5 support
| JavaScript | mit | LOKE/supertest-thenable | javascript | ## Code Before:
'use strict'
function makeError (res, originalError) {
var errorMessage = res.body.message || res.body.error_message || res.body.ErrorMessage
if (errorMessage) {
var error = new Error(originalError.message)
if (res.body.code) {
error.code = res.body.code
}
if (res.body.name) {
error.name = res.body.name
}
if (res.body.stack) {
error.stack = (
res.body.stack.split('\n')
.filter(line => !/node_modules\//.test(line))
.join('\n')
)
} else {
error.message += '\n ' + errorMessage
}
return error
}
}
// Modify supertest to return thenables, and to show the error message
// in the error:
require('supertest/lib/test').prototype.then = function () {
var self = this
var promise = new Promise(function (resolve, reject) {
return self.end(function (err, result) {
if (err) {
if (/^expected /.test(err.message) && result.statusCode >= 400) {
var error = makeError(result, err)
if (error) return reject(error)
}
return reject(err)
}
resolve(result)
})
})
return promise.then.apply(promise, arguments)
}
module.exports = require('supertest')
## Instruction:
Remove lambda expression for ES5 support
## Code After:
'use strict'
function makeError (res, originalError) {
var errorMessage = res.body.message || res.body.error_message || res.body.ErrorMessage
if (errorMessage) {
var error = new Error(originalError.message)
if (res.body.code) {
error.code = res.body.code
}
if (res.body.name) {
error.name = res.body.name
}
if (res.body.stack) {
error.stack = (
res.body.stack.split('\n')
.filter(function (line) {
return !/node_modules\//.test(line)
})
.join('\n')
)
} else {
error.message += '\n ' + errorMessage
}
return error
}
}
// Modify supertest to return thenables, and to show the error message
// in the error:
require('supertest/lib/test').prototype.then = function () {
var self = this
var promise = new Promise(function (resolve, reject) {
return self.end(function (err, result) {
if (err) {
if (/^expected /.test(err.message) && result.statusCode >= 400) {
var error = makeError(result, err)
if (error) return reject(error)
}
return reject(err)
}
resolve(result)
})
})
return promise.then.apply(promise, arguments)
}
module.exports = require('supertest')
|
5b38c2698e2932e68f19b976f849325363140821 | lib/cache_comment/comment_formatter.rb | lib/cache_comment/comment_formatter.rb | module CacheComment
class CommentFormatter
def initialize key, options
@key = key
@options = (options || {}).symbolize_keys
@time = Time.now
end
def start
comment = ['cached']
comment << @time
comment << 'with key'
comment << @key
if expires_in = @options[:expires_in]
comment << 'valid until'
comment << @time + expires_in
end
wrap comment.map(&:to_s).join(' ')
end
def start_regex
Regexp.new start.gsub(@time.to_s, '.*').gsub((@time + @options[:expires_in]).to_s, '.*')
end
def end
wrap "end of #{@key}"
end
def wrap(content)
"<!-- #{content} -->\n"
end
end
end | module CacheComment
class CommentFormatter
def initialize key, options
@key = key
@options = (options || {}).symbolize_keys
@time = Time.now
end
def start
comment = ['cached']
comment << @time
comment << 'with key'
comment << @key
if expires_in = @options[:expires_in]
comment << 'valid until'
comment << @time + expires_in
end
wrap comment.map(&:to_s).join(' ')
end
def start_regex
regexp = start.gsub(@time.to_s, '.*')
regexp.gsub!((@time + @options[:expires_in]).to_s, '.*') if @options[:expires_in]
Regexp.new regexp
end
def end
wrap "end of #{@key}"
end
def wrap(content)
"<!-- #{content} -->\n"
end
end
end | Fix regexp for cache without expiry | Fix regexp for cache without expiry
| Ruby | mit | mschae/cache_comment | ruby | ## Code Before:
module CacheComment
class CommentFormatter
def initialize key, options
@key = key
@options = (options || {}).symbolize_keys
@time = Time.now
end
def start
comment = ['cached']
comment << @time
comment << 'with key'
comment << @key
if expires_in = @options[:expires_in]
comment << 'valid until'
comment << @time + expires_in
end
wrap comment.map(&:to_s).join(' ')
end
def start_regex
Regexp.new start.gsub(@time.to_s, '.*').gsub((@time + @options[:expires_in]).to_s, '.*')
end
def end
wrap "end of #{@key}"
end
def wrap(content)
"<!-- #{content} -->\n"
end
end
end
## Instruction:
Fix regexp for cache without expiry
## Code After:
module CacheComment
class CommentFormatter
def initialize key, options
@key = key
@options = (options || {}).symbolize_keys
@time = Time.now
end
def start
comment = ['cached']
comment << @time
comment << 'with key'
comment << @key
if expires_in = @options[:expires_in]
comment << 'valid until'
comment << @time + expires_in
end
wrap comment.map(&:to_s).join(' ')
end
def start_regex
regexp = start.gsub(@time.to_s, '.*')
regexp.gsub!((@time + @options[:expires_in]).to_s, '.*') if @options[:expires_in]
Regexp.new regexp
end
def end
wrap "end of #{@key}"
end
def wrap(content)
"<!-- #{content} -->\n"
end
end
end |
7fd8dbf10b8d5ada549ed1c88c95877c223afedb | .github/workflows/wheels.yml | .github/workflows/wheels.yml | name: Build
on: [push, pull_request]
jobs:
build_wheels:
name: Build wheels on ${{ matrix.os }}
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [macos-11, macos-10.15]
steps:
- uses: actions/checkout@v2
with:
submodules: true
- name: Build wheels
uses: joerick/[email protected]
env:
CIBW_BUILD: "cp39-* cp310-*"
CIBW_ARCHS_MACOS: "arm64 x86_64"
CIBW_TEST_COMMAND: "python -m pymunk.tests"
CIBW_BUILD_VERBOSITY: 3
- uses: actions/upload-artifact@v2
if: ${{ github.ref == 'refs/heads/arm64' }}
with:
path: ./wheelhouse/*.whl
retention-days: 7
| name: Build
on: [push, pull_request]
jobs:
build_wheels:
name: Build wheels on ${{ matrix.os }}
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-20.04, windows-2019, macos-11]
steps:
- uses: actions/checkout@v2
with:
submodules: true
- name: Build wheels
uses: joerick/[email protected]
env:
CIBW_BUILD: "cp36-* cp37-* cp38-* cp39-* cp310-* pp37-*"
CIBW_ARCHS_MACOS: "arm64 x86_64"
CIBW_TEST_COMMAND: "python -m pymunk.tests"
CIBW_BUILD_VERBOSITY: 3
- uses: actions/upload-artifact@v2
if: ${{ github.ref == 'refs/heads/arm64' }}
with:
path: ./wheelhouse/*.whl
retention-days: 7
| Update build to build all | Update build to build all
| YAML | mit | viblo/pymunk,viblo/pymunk | yaml | ## Code Before:
name: Build
on: [push, pull_request]
jobs:
build_wheels:
name: Build wheels on ${{ matrix.os }}
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [macos-11, macos-10.15]
steps:
- uses: actions/checkout@v2
with:
submodules: true
- name: Build wheels
uses: joerick/[email protected]
env:
CIBW_BUILD: "cp39-* cp310-*"
CIBW_ARCHS_MACOS: "arm64 x86_64"
CIBW_TEST_COMMAND: "python -m pymunk.tests"
CIBW_BUILD_VERBOSITY: 3
- uses: actions/upload-artifact@v2
if: ${{ github.ref == 'refs/heads/arm64' }}
with:
path: ./wheelhouse/*.whl
retention-days: 7
## Instruction:
Update build to build all
## Code After:
name: Build
on: [push, pull_request]
jobs:
build_wheels:
name: Build wheels on ${{ matrix.os }}
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-20.04, windows-2019, macos-11]
steps:
- uses: actions/checkout@v2
with:
submodules: true
- name: Build wheels
uses: joerick/[email protected]
env:
CIBW_BUILD: "cp36-* cp37-* cp38-* cp39-* cp310-* pp37-*"
CIBW_ARCHS_MACOS: "arm64 x86_64"
CIBW_TEST_COMMAND: "python -m pymunk.tests"
CIBW_BUILD_VERBOSITY: 3
- uses: actions/upload-artifact@v2
if: ${{ github.ref == 'refs/heads/arm64' }}
with:
path: ./wheelhouse/*.whl
retention-days: 7
|
f7b66fd1bbd62799983dccb0f12c274f86350bd2 | index.js | index.js | /**
* External dependencies.
*/
var mongoose = require('mongoose');
/**
* Drop all known mongoose collections before
* each test.
*
* @param {Object} hydro
* @api public
*/
module.exports = function(hydro) {
hydro.on('pre:test', function(file, done) {
var collections = Object.keys(mongoose.connection.collections);
var len = collections.length;
if (len === 0) return done();
collections.forEach(function(collection) {
mongoose.connection.collections[collection].drop(function() {
if (--len === 0) done();
});
});
});
};
| /**
* External dependencies.
*/
var mongoose = require('mongoose');
/**
* Drop all known mongoose collections before
* each test.
*
* @param {Object} hydro
* @api public
*/
module.exports = function(hydro) {
hydro.on('pre:test', function(file, done) {
var collections = Object.keys(mongoose.connection.collections);
var len = collections.length;
if (len === 0) return done();
var timeout = setTimeout(function() {
throw new Error('mongoose collections drop took more than 2s. Make sure that you have established a database connection');
}, 2000);
var end = function() {
clearTimeout(timeout);
done();
};
collections.forEach(function(collection) {
mongoose.connection.collections[collection].drop(function() {
if (--len === 0) end();
});
});
});
};
| Throw an error if dropping the collections took more than 2s | Throw an error if dropping the collections took more than 2s
| JavaScript | mit | hydrojs/mongoose | javascript | ## Code Before:
/**
* External dependencies.
*/
var mongoose = require('mongoose');
/**
* Drop all known mongoose collections before
* each test.
*
* @param {Object} hydro
* @api public
*/
module.exports = function(hydro) {
hydro.on('pre:test', function(file, done) {
var collections = Object.keys(mongoose.connection.collections);
var len = collections.length;
if (len === 0) return done();
collections.forEach(function(collection) {
mongoose.connection.collections[collection].drop(function() {
if (--len === 0) done();
});
});
});
};
## Instruction:
Throw an error if dropping the collections took more than 2s
## Code After:
/**
* External dependencies.
*/
var mongoose = require('mongoose');
/**
* Drop all known mongoose collections before
* each test.
*
* @param {Object} hydro
* @api public
*/
module.exports = function(hydro) {
hydro.on('pre:test', function(file, done) {
var collections = Object.keys(mongoose.connection.collections);
var len = collections.length;
if (len === 0) return done();
var timeout = setTimeout(function() {
throw new Error('mongoose collections drop took more than 2s. Make sure that you have established a database connection');
}, 2000);
var end = function() {
clearTimeout(timeout);
done();
};
collections.forEach(function(collection) {
mongoose.connection.collections[collection].drop(function() {
if (--len === 0) end();
});
});
});
};
|
49645ca7f579e5499f21e2192d16f4eed1271e82 | tests/integration/cli/sync_test.py | tests/integration/cli/sync_test.py | from mock import patch
from ...testcases import DustyIntegrationTestCase
from ...fixtures import busybox_single_app_bundle_fixture
class TestSyncCLI(DustyIntegrationTestCase):
def setUp(self):
super(TestSyncCLI, self).setUp()
busybox_single_app_bundle_fixture()
self.run_command('bundles activate busyboxa')
self.run_command('up')
def test_sync_repo(self):
self.exec_in_container('busyboxa', 'rm -rf /repo')
self.assertFileNotInContainer('busyboxa', '/repo/README.md')
self.run_command('sync fake-repo')
self.assertFileContentsInContainer('busyboxa',
'/repo/README.md',
'# fake-repo')
| from ...testcases import DustyIntegrationTestCase
from ...fixtures import busybox_single_app_bundle_fixture
class TestSyncCLI(DustyIntegrationTestCase):
def setUp(self):
super(TestSyncCLI, self).setUp()
busybox_single_app_bundle_fixture()
self.run_command('bundles activate busyboxa')
self.run_command('up')
def tearDown(self):
super(TestSyncCLI, self).tearDown()
self.run_command('bundles deactivate busyboxa')
try:
self.run_command('stop')
except Exception:
pass
def test_sync_repo(self):
self.exec_in_container('busyboxa', 'rm -rf /repo')
self.assertFileNotInContainer('busyboxa', '/repo/README.md')
self.run_command('sync fake-repo')
self.assertFileContentsInContainer('busyboxa',
'/repo/README.md',
'# fake-repo')
| Clean up after sync integration tests | Clean up after sync integration tests
| Python | mit | gamechanger/dusty,gamechanger/dusty,gamechanger/dusty,gamechanger/dusty,gamechanger/dusty | python | ## Code Before:
from mock import patch
from ...testcases import DustyIntegrationTestCase
from ...fixtures import busybox_single_app_bundle_fixture
class TestSyncCLI(DustyIntegrationTestCase):
def setUp(self):
super(TestSyncCLI, self).setUp()
busybox_single_app_bundle_fixture()
self.run_command('bundles activate busyboxa')
self.run_command('up')
def test_sync_repo(self):
self.exec_in_container('busyboxa', 'rm -rf /repo')
self.assertFileNotInContainer('busyboxa', '/repo/README.md')
self.run_command('sync fake-repo')
self.assertFileContentsInContainer('busyboxa',
'/repo/README.md',
'# fake-repo')
## Instruction:
Clean up after sync integration tests
## Code After:
from ...testcases import DustyIntegrationTestCase
from ...fixtures import busybox_single_app_bundle_fixture
class TestSyncCLI(DustyIntegrationTestCase):
def setUp(self):
super(TestSyncCLI, self).setUp()
busybox_single_app_bundle_fixture()
self.run_command('bundles activate busyboxa')
self.run_command('up')
def tearDown(self):
super(TestSyncCLI, self).tearDown()
self.run_command('bundles deactivate busyboxa')
try:
self.run_command('stop')
except Exception:
pass
def test_sync_repo(self):
self.exec_in_container('busyboxa', 'rm -rf /repo')
self.assertFileNotInContainer('busyboxa', '/repo/README.md')
self.run_command('sync fake-repo')
self.assertFileContentsInContainer('busyboxa',
'/repo/README.md',
'# fake-repo')
|
785c2346f1847665afd981cfeb562b79d236b834 | index.js | index.js | var fs = require('fs');
module.exports = JSON.parse(fs.readFileSync('.eslintrc'));
| var fs = require('fs');
var path = require('path');
module.exports = JSON.parse(fs.readFileSync(path.join(__dirname, '.eslintrc')));
| Make sure that *this* module's `.eslintrc` is exported. | Make sure that *this* module's `.eslintrc` is exported. | JavaScript | mit | briandipalma/eslint-config,ljharb/eslint-config | javascript | ## Code Before:
var fs = require('fs');
module.exports = JSON.parse(fs.readFileSync('.eslintrc'));
## Instruction:
Make sure that *this* module's `.eslintrc` is exported.
## Code After:
var fs = require('fs');
var path = require('path');
module.exports = JSON.parse(fs.readFileSync(path.join(__dirname, '.eslintrc')));
|
edeec29d30fbb7043307bda3978ca487b12586cf | lib/simctl/device_settings.rb | lib/simctl/device_settings.rb | require 'cfpropertylist'
module SimCtl
class DeviceSettings
attr_reader :path
def initialize(path)
@path = path
end
# Disables the keyboard helpers
#
# @return [void]
def disable_keyboard_helpers!
edit(path.preferences_plist) do |plist|
%w(
KeyboardPeriodShortcut
KeyboardAutocapitalization
KeyboardCheckSpelling
KeyboardAssistant
KeyboardAutocorrection
KeyboardPrediction
KeyboardShowPredictionBar
KeyboardCapsLock
).each do |key|
plist[key] = false
end
plist
end
end
private
def edit(path, &block)
plist = File.exists?(path) ? CFPropertyList::List.new(file: path) : CFPropertyList::List.new
content = CFPropertyList.native_types(plist.value) || {}
plist.value = CFPropertyList.guess(yield content)
plist.save(path, CFPropertyList::List::FORMAT_BINARY)
end
end
end
| require 'cfpropertylist'
module SimCtl
class DeviceSettings
attr_reader :path
def initialize(path)
@path = path
end
# Disables the keyboard helpers
#
# @return [void]
def disable_keyboard_helpers!
edit(path.preferences_plist) do |plist|
%w(
KeyboardAllowPaddle
KeyboardAssistant
KeyboardAutocapitalization
KeyboardAutocorrection
KeyboardCapsLock
KeyboardCheckSpelling
KeyboardPeriodShortcut
KeyboardPrediction
KeyboardShowPredictionBar
).each do |key|
plist[key] = false
end
plist
end
end
private
def edit(path, &block)
plist = File.exists?(path) ? CFPropertyList::List.new(file: path) : CFPropertyList::List.new
content = CFPropertyList.native_types(plist.value) || {}
plist.value = CFPropertyList.guess(yield content)
plist.save(path, CFPropertyList::List::FORMAT_BINARY)
end
end
end
| Sort & add missing keyboard options | Sort & add missing keyboard options
| Ruby | mit | adamprice/simctl,plu/simctl | ruby | ## Code Before:
require 'cfpropertylist'
module SimCtl
class DeviceSettings
attr_reader :path
def initialize(path)
@path = path
end
# Disables the keyboard helpers
#
# @return [void]
def disable_keyboard_helpers!
edit(path.preferences_plist) do |plist|
%w(
KeyboardPeriodShortcut
KeyboardAutocapitalization
KeyboardCheckSpelling
KeyboardAssistant
KeyboardAutocorrection
KeyboardPrediction
KeyboardShowPredictionBar
KeyboardCapsLock
).each do |key|
plist[key] = false
end
plist
end
end
private
def edit(path, &block)
plist = File.exists?(path) ? CFPropertyList::List.new(file: path) : CFPropertyList::List.new
content = CFPropertyList.native_types(plist.value) || {}
plist.value = CFPropertyList.guess(yield content)
plist.save(path, CFPropertyList::List::FORMAT_BINARY)
end
end
end
## Instruction:
Sort & add missing keyboard options
## Code After:
require 'cfpropertylist'
module SimCtl
class DeviceSettings
attr_reader :path
def initialize(path)
@path = path
end
# Disables the keyboard helpers
#
# @return [void]
def disable_keyboard_helpers!
edit(path.preferences_plist) do |plist|
%w(
KeyboardAllowPaddle
KeyboardAssistant
KeyboardAutocapitalization
KeyboardAutocorrection
KeyboardCapsLock
KeyboardCheckSpelling
KeyboardPeriodShortcut
KeyboardPrediction
KeyboardShowPredictionBar
).each do |key|
plist[key] = false
end
plist
end
end
private
def edit(path, &block)
plist = File.exists?(path) ? CFPropertyList::List.new(file: path) : CFPropertyList::List.new
content = CFPropertyList.native_types(plist.value) || {}
plist.value = CFPropertyList.guess(yield content)
plist.save(path, CFPropertyList::List::FORMAT_BINARY)
end
end
end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.