commit
stringlengths 40
40
| old_file
stringlengths 4
237
| new_file
stringlengths 4
237
| old_contents
stringlengths 1
4.24k
| new_contents
stringlengths 5
4.84k
| subject
stringlengths 15
778
| message
stringlengths 16
6.86k
| lang
stringlengths 1
30
| license
stringclasses 13
values | repos
stringlengths 5
116k
| config
stringlengths 1
30
| content
stringlengths 105
8.72k
|
---|---|---|---|---|---|---|---|---|---|---|---|
ee93af3d9eacb5661191338edfb7d81ed685bcac
|
builds/truenas-9.10-releng/freenas.cfg
|
builds/truenas-9.10-releng/freenas.cfg
|
GITFNASURL="https://github.com/freenas/freenas-build.git"
export GITFNASURL
# Branch of the FreeNAS repo to build with
# Available (freenas10)
GITFNASBRANCH="TN-9.10.1-U2.1"
export GITFNASBRANCH
# Set location for release to be stored
TARGETREL="truenas-9.10-releng"
export TARGETREL
# This is a 9.10 build, Legacy middleware + new build
FREENASLEGACY="910"
export FREENASLEGACY
# Set which MILESTONE to set
BUILDOPTS="PRODUCTION=yes PRODUCT=TrueNAS"
export BUILDOPTS
|
GITFNASURL="https://github.com/freenas/freenas-build.git"
export GITFNASURL
# Branch of the FreeNAS repo to build with
# Available (freenas10)
GITFNASBRANCH="9.10-STABLE"
export GITFNASBRANCH
# Set location for release to be stored
TARGETREL="truenas-9.10-releng"
export TARGETREL
# This is a 9.10 build, Legacy middleware + new build
FREENASLEGACY="910"
export FREENASLEGACY
# Set which MILESTONE to set
BUILDOPTS="PRODUCTION=yes PRODUCT=TrueNAS"
export BUILDOPTS
|
Revert "Revert this change after releasing TrueNAS-9.10.1-U2.1"
|
Revert "Revert this change after releasing TrueNAS-9.10.1-U2.1"
This reverts commit c88efba7a5f05ee19d21bbba55383d7b331d2328.
|
INI
|
bsd-2-clause
|
iXsystems/ixbuild,iXsystems/ix-tests,iXsystems/ix-tests
|
ini
|
## Code Before:
GITFNASURL="https://github.com/freenas/freenas-build.git"
export GITFNASURL
# Branch of the FreeNAS repo to build with
# Available (freenas10)
GITFNASBRANCH="TN-9.10.1-U2.1"
export GITFNASBRANCH
# Set location for release to be stored
TARGETREL="truenas-9.10-releng"
export TARGETREL
# This is a 9.10 build, Legacy middleware + new build
FREENASLEGACY="910"
export FREENASLEGACY
# Set which MILESTONE to set
BUILDOPTS="PRODUCTION=yes PRODUCT=TrueNAS"
export BUILDOPTS
## Instruction:
Revert "Revert this change after releasing TrueNAS-9.10.1-U2.1"
This reverts commit c88efba7a5f05ee19d21bbba55383d7b331d2328.
## Code After:
GITFNASURL="https://github.com/freenas/freenas-build.git"
export GITFNASURL
# Branch of the FreeNAS repo to build with
# Available (freenas10)
GITFNASBRANCH="9.10-STABLE"
export GITFNASBRANCH
# Set location for release to be stored
TARGETREL="truenas-9.10-releng"
export TARGETREL
# This is a 9.10 build, Legacy middleware + new build
FREENASLEGACY="910"
export FREENASLEGACY
# Set which MILESTONE to set
BUILDOPTS="PRODUCTION=yes PRODUCT=TrueNAS"
export BUILDOPTS
|
cb89d3e13f63f46ba25c3549ba7d9609f4f5c145
|
Grid/Column/ActionsColumn.php
|
Grid/Column/ActionsColumn.php
|
<?php
/*
* This file is part of the DataGridBundle.
*
* (c) Stanislav Turza <[email protected]>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace Sorien\DataGridBundle\Grid\Column;
use Sorien\DataGridBundle\Grid\Action\RowAction;
class ActionsColumn extends Column
{
private $rowActions;
public function __construct($column, $title, array $rowActions = array())
{
$this->rowActions = $rowActions;
parent::__construct(array('id' => $column, 'title' => $title, 'sortable' => false, 'source' => false));
}
public function getRouteParameters($row, $action)
{
$actionParameters = $action->getRouteParameters();
if(!empty($actionParameters)){
$routeParameters = array();
foreach ($actionParameters as $name => $parameter) {
if(is_numeric($name)){
$routeParameters[$parameter] = $row->getField($parameter);
} else {
$routeParameters[$name] = $parameter;
}
}
return $routeParameters;
}
$routeParameters = array_merge(
array($row->getPrimaryField() => $row->getPrimaryFieldValue()),
$action->getRouteParameters()
);
return $routeParameters;
}
public function getRowActions()
{
return $this->rowActions;
}
public function setRowActions(array $rowActions) {
$this->rowActions = $rowActions;
}
public function getType()
{
return 'actions';
}
}
|
<?php
/*
* This file is part of the DataGridBundle.
*
* (c) Stanislav Turza <[email protected]>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace Sorien\DataGridBundle\Grid\Column;
use Sorien\DataGridBundle\Grid\Action\RowAction;
class ActionsColumn extends Column
{
private $rowActions;
public function __construct($column, $title, array $rowActions = array())
{
$this->rowActions = $rowActions;
parent::__construct(array('id' => $column, 'title' => $title, 'sortable' => false, 'source' => false));
}
public function getRouteParameters($row, $action)
{
$actionParameters = $action->getRouteParameters();
if(!empty($actionParameters)){
$routeParameters = array();
foreach ($actionParameters as $name => $parameter) {
if(is_numeric($name)){
$routeParameters[$parameter] = $row->getField($parameter);
} else {
$routeParameters[$name] = $parameter;
}
}
}
return array_merge(
array($row->getPrimaryField() => $row->getPrimaryFieldValue()),
$routeParameters
);
}
public function getRowActions()
{
return $this->rowActions;
}
public function setRowActions(array $rowActions) {
$this->rowActions = $rowActions;
}
public function getType()
{
return 'actions';
}
}
|
Fix route parameters for a row action
|
Fix route parameters for a row action
|
PHP
|
mit
|
lashus/APYDataGridBundle,ibuildingsnl/APYDataGridBundle,matthieuauger/APYDataGridBundle,CiscoVE/APYDataGridBundle,rafacouto/APYDataGridBundle,rogamoore/APYDataGridBundle,Abhoryo/APYDataGridBundle,ustrugany/APYDataGridBundle,andreia/APYDataGridBundle,ibuildingsnl/APYDataGridBundle,APY/APYDataGridBundle,lukaszsobieraj/APYDataGridBundle,ufik/APYDataGridBundle,lukaszsobieraj/APYDataGridBundle,b-durand/APYDataGridBundle,tunght13488/APYDataGridBundle,unrealevil/GvnGridBundle,medinadato/APYDataGridBundle,LewisW/APYDataGridBundle,pierredup/APYDataGridBundle,maximechagnolleau/APYDataGridBundle,fastsupply/APYDataGridBundle,sygnisoft/APYDataGridBundle,tamago-db/APYDataGridBundle,qferr/APYDataGridBundle,sixty-nine/APYDataGridBundle,mdzzohrabi/APYDataGridBundle,dnacreative/APYDataGridBundle,darnel/APYDataGridBundle,mdzzohrabi/APYDataGridBundle,qferr/APYDataGridBundle,delasallejean/APYDataGridBundle,unrealevil/GvnGridBundle,fastsupply/APYDataGridBundle,tomcyr/APYDataGridBundle,Abhoryo/APYDataGridBundle,delasallejean/APYDataGridBundle,CiscoVE/APYDataGridBundle,dnacreative/APYDataGridBundle,ip512/APYDataGridBundle
|
php
|
## Code Before:
<?php
/*
* This file is part of the DataGridBundle.
*
* (c) Stanislav Turza <[email protected]>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace Sorien\DataGridBundle\Grid\Column;
use Sorien\DataGridBundle\Grid\Action\RowAction;
class ActionsColumn extends Column
{
private $rowActions;
public function __construct($column, $title, array $rowActions = array())
{
$this->rowActions = $rowActions;
parent::__construct(array('id' => $column, 'title' => $title, 'sortable' => false, 'source' => false));
}
public function getRouteParameters($row, $action)
{
$actionParameters = $action->getRouteParameters();
if(!empty($actionParameters)){
$routeParameters = array();
foreach ($actionParameters as $name => $parameter) {
if(is_numeric($name)){
$routeParameters[$parameter] = $row->getField($parameter);
} else {
$routeParameters[$name] = $parameter;
}
}
return $routeParameters;
}
$routeParameters = array_merge(
array($row->getPrimaryField() => $row->getPrimaryFieldValue()),
$action->getRouteParameters()
);
return $routeParameters;
}
public function getRowActions()
{
return $this->rowActions;
}
public function setRowActions(array $rowActions) {
$this->rowActions = $rowActions;
}
public function getType()
{
return 'actions';
}
}
## Instruction:
Fix route parameters for a row action
## Code After:
<?php
/*
* This file is part of the DataGridBundle.
*
* (c) Stanislav Turza <[email protected]>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace Sorien\DataGridBundle\Grid\Column;
use Sorien\DataGridBundle\Grid\Action\RowAction;
class ActionsColumn extends Column
{
private $rowActions;
public function __construct($column, $title, array $rowActions = array())
{
$this->rowActions = $rowActions;
parent::__construct(array('id' => $column, 'title' => $title, 'sortable' => false, 'source' => false));
}
public function getRouteParameters($row, $action)
{
$actionParameters = $action->getRouteParameters();
if(!empty($actionParameters)){
$routeParameters = array();
foreach ($actionParameters as $name => $parameter) {
if(is_numeric($name)){
$routeParameters[$parameter] = $row->getField($parameter);
} else {
$routeParameters[$name] = $parameter;
}
}
}
return array_merge(
array($row->getPrimaryField() => $row->getPrimaryFieldValue()),
$routeParameters
);
}
public function getRowActions()
{
return $this->rowActions;
}
public function setRowActions(array $rowActions) {
$this->rowActions = $rowActions;
}
public function getType()
{
return 'actions';
}
}
|
6ec8f078f11623db1215ea548374874191c5dc88
|
docs/html-jsx.md
|
docs/html-jsx.md
|
---
layout: default
title: HTML to JSX
id: html-jsx
---
<div class="jsxCompiler">
<h1>HTML to JSX Compiler</h1>
<div id="jsxCompiler"></div>
<script src="https://reactjs.github.io/react-magic/htmltojsx.min.js"></script>
<script src="js/html-jsx.js"></script>
</div>
|
---
layout: default
title: HTML to JSX
id: html-jsx
---
<div class="jsxCompiler">
<h1>HTML to JSX Compiler</h1>
<div id="jsxCompiler"></div>
<script src="https://reactcommunity.org/react-magic/htmltojsx.min.js"></script>
<script src="js/html-jsx.js"></script>
</div>
|
Change script src for htmltojsx.min.js to referral target to fix https error
|
Change script src for htmltojsx.min.js to referral target to fix https error
|
Markdown
|
bsd-3-clause
|
kaushik94/react,TheBlasfem/react,TaaKey/react,empyrical/react,mgmcdermott/react,mjackson/react,salzhrani/react,nhunzaker/react,chicoxyzzy/react,rlugojr/react,yangshun/react,mhhegazy/react,reactjs-vn/reactjs_vndev,yungsters/react,krasimir/react,quip/react,chippieTV/react,rricard/react,apaatsio/react,acdlite/react,billfeller/react,aickin/react,yungsters/react,bspaulding/react,nathanmarks/react,salzhrani/react,niubaba63/react,roth1002/react,ericyang321/react,claudiopro/react,terminatorheart/react,apaatsio/react,salzhrani/react,ameyms/react,richiethomas/react,mhhegazy/react,brigand/react,sekiyaeiji/react,with-git/react,acdlite/react,maxschmeling/react,kaushik94/react,rricard/react,quip/react,joecritch/react,cpojer/react,tomocchino/react,TaaKey/react,Simek/react,facebook/react,flarnie/react,sekiyaeiji/react,Simek/react,chicoxyzzy/react,reactjs-vn/reactjs_vndev,reactkr/react,krasimir/react,camsong/react,cpojer/react,facebook/react,pswai/react,theseyi/react,aickin/react,salzhrani/react,rohannair/react,ericyang321/react,VioletLife/react,pandoraui/react,leexiaosi/react,bspaulding/react,camsong/react,prometheansacrifice/react,salier/react,theseyi/react,yuhualingfeng/react,mgmcdermott/react,TaaKey/react,rickbeerendonk/react,jdlehman/react,aickin/react,with-git/react,ericyang321/react,apaatsio/react,dilidili/react,VioletLife/react,yangshun/react,flipactual/react,richiethomas/react,quip/react,sekiyaeiji/react,empyrical/react,chippieTV/react,VioletLife/react,bspaulding/react,iamchenxin/react,ArunTesco/react,staltz/react,cpojer/react,apaatsio/react,jontewks/react,usgoodus/react,syranide/react,flipactual/react,STRML/react,yungsters/react,yungsters/react,nathanmarks/react,reactjs-vn/reactjs_vndev,jordanpapaleo/react,theseyi/react,roth1002/react,roth1002/react,hejld/react,flarnie/react,ericyang321/react,rlugojr/react,acdlite/react,pswai/react,yungsters/react,flarnie/react,shergin/react,andrerpena/react,niubaba63/react,ericyang321/react,rlugojr/react,eoin/react,with-git/react,quip/react,empyrical/react,microlv/react,jimfb/react,trueadm/react,chicoxyzzy/react,jordanpapaleo/react,microlv/react,eoin/react,ameyms/react,rohannair/react,chippieTV/react,krasimir/react,andrerpena/react,syranide/react,ericyang321/react,kaushik94/react,empyrical/react,yuhualingfeng/react,yiminghe/react,jameszhan/react,billfeller/react,staltz/react,nathanmarks/react,rohannair/react,TaaKey/react,prometheansacrifice/react,jdlehman/react,mhhegazy/react,Simek/react,rlugojr/react,richiethomas/react,yangshun/react,edvinerikson/react,jameszhan/react,jordanpapaleo/react,reactkr/react,tom-wang/react,camsong/react,acdlite/react,jimfb/react,joecritch/react,VioletLife/react,jorrit/react,dmitriiabramov/react,jordanpapaleo/react,aickin/react,cpojer/react,jordanpapaleo/react,iamchenxin/react,syranide/react,eoin/react,brigand/react,jorrit/react,bspaulding/react,mosoft521/react,jimfb/react,jimfb/react,tom-wang/react,krasimir/react,theseyi/react,reactkr/react,rohannair/react,nhunzaker/react,yangshun/react,wmydz1/react,dmitriiabramov/react,billfeller/react,VioletLife/react,AlmeroSteyn/react,jameszhan/react,nhunzaker/react,jzmq/react,tom-wang/react,facebook/react,joecritch/react,jzmq/react,theseyi/react,TaaKey/react,niubaba63/react,ameyms/react,jdlehman/react,edvinerikson/react,shergin/react,jontewks/react,edvinerikson/react,tom-wang/react,eoin/react,terminatorheart/react,mhhegazy/react,usgoodus/react,bspaulding/react,dmitriiabramov/react,quip/react,apaatsio/react,brigand/react,aaron-goshine/react,aickin/react,ameyms/react,pswai/react,pyitphyoaung/react,niubaba63/react,glenjamin/react,VioletLife/react,niubaba63/react,tom-wang/react,yiminghe/react,roth1002/react,anushreesubramani/react,cpojer/react,tomocchino/react,camsong/react,microlv/react,AlmeroSteyn/react,brigand/react,acdlite/react,jzmq/react,claudiopro/react,chicoxyzzy/react,dilidili/react,TaaKey/react,maxschmeling/react,maxschmeling/react,andrerpena/react,eoin/react,chenglou/react,yungsters/react,dmitriiabramov/react,andrerpena/react,STRML/react,anushreesubramani/react,jzmq/react,bspaulding/react,chenglou/react,mosoft521/react,aickin/react,AlmeroSteyn/react,silvestrijonathan/react,joecritch/react,billfeller/react,anushreesubramani/react,joecritch/react,reactkr/react,kaushik94/react,Simek/react,pswai/react,acdlite/react,dilidili/react,iamchenxin/react,rlugojr/react,yuhualingfeng/react,glenjamin/react,pandoraui/react,anushreesubramani/react,camsong/react,TheBlasfem/react,tomocchino/react,kaushik94/react,empyrical/react,joecritch/react,richiethomas/react,aickin/react,mhhegazy/react,jorrit/react,yuhualingfeng/react,rickbeerendonk/react,salier/react,empyrical/react,airondumael/react,terminatorheart/react,dmitriiabramov/react,edvinerikson/react,aaron-goshine/react,glenjamin/react,chenglou/react,jontewks/react,prometheansacrifice/react,trueadm/react,reactjs-vn/reactjs_vndev,STRML/react,rickbeerendonk/react,niubaba63/react,wmydz1/react,maxschmeling/react,chippieTV/react,prometheansacrifice/react,shergin/react,wmydz1/react,terminatorheart/react,billfeller/react,claudiopro/react,STRML/react,TheBlasfem/react,facebook/react,ericyang321/react,maxschmeling/react,VioletLife/react,silvestrijonathan/react,pyitphyoaung/react,theseyi/react,nathanmarks/react,mjackson/react,camsong/react,anushreesubramani/react,pyitphyoaung/react,jquense/react,rohannair/react,ameyms/react,aaron-goshine/react,AlmeroSteyn/react,flarnie/react,aaron-goshine/react,yiminghe/react,microlv/react,trueadm/react,kaushik94/react,rricard/react,microlv/react,chicoxyzzy/react,silvestrijonathan/react,staltz/react,edvinerikson/react,with-git/react,billfeller/react,bspaulding/react,AlmeroSteyn/react,mjackson/react,nhunzaker/react,jdlehman/react,claudiopro/react,andrerpena/react,roth1002/react,with-git/react,acdlite/react,rricard/react,maxschmeling/react,jdlehman/react,nathanmarks/react,usgoodus/react,STRML/react,iamchenxin/react,AlmeroSteyn/react,hejld/react,shergin/react,silvestrijonathan/react,chicoxyzzy/react,flarnie/react,facebook/react,wmydz1/react,jimfb/react,jontewks/react,maxschmeling/react,ameyms/react,glenjamin/react,Simek/react,rlugojr/react,facebook/react,mjackson/react,rickbeerendonk/react,mosoft521/react,tomocchino/react,pswai/react,richiethomas/react,jzmq/react,claudiopro/react,mhhegazy/react,jimfb/react,apaatsio/react,jameszhan/react,rickbeerendonk/react,pyitphyoaung/react,tomocchino/react,silvestrijonathan/react,chicoxyzzy/react,krasimir/react,cpojer/react,airondumael/react,wmydz1/react,dmitriiabramov/react,STRML/react,shergin/react,roth1002/react,pyitphyoaung/react,reactkr/react,prometheansacrifice/react,richiethomas/react,mjackson/react,claudiopro/react,chippieTV/react,chippieTV/react,silvestrijonathan/react,yungsters/react,salier/react,TheBlasfem/react,yiminghe/react,mjackson/react,TheBlasfem/react,krasimir/react,roth1002/react,glenjamin/react,terminatorheart/react,airondumael/react,AlmeroSteyn/react,shergin/react,cpojer/react,aaron-goshine/react,brigand/react,yuhualingfeng/react,jameszhan/react,silvestrijonathan/react,trueadm/react,pyitphyoaung/react,Simek/react,jorrit/react,hejld/react,pswai/react,hejld/react,reactjs-vn/reactjs_vndev,mgmcdermott/react,jameszhan/react,TaaKey/react,jquense/react,jordanpapaleo/react,yangshun/react,camsong/react,salier/react,staltz/react,flarnie/react,mgmcdermott/react,prometheansacrifice/react,niubaba63/react,anushreesubramani/react,empyrical/react,aaron-goshine/react,jontewks/react,dilidili/react,jquense/react,glenjamin/react,jontewks/react,yiminghe/react,prometheansacrifice/react,ArunTesco/react,usgoodus/react,jorrit/react,anushreesubramani/react,mgmcdermott/react,brigand/react,hejld/react,jquense/react,jquense/react,jdlehman/react,rohannair/react,nhunzaker/react,trueadm/react,nhunzaker/react,rricard/react,jorrit/react,pandoraui/react,apaatsio/react,TaaKey/react,wmydz1/react,rricard/react,edvinerikson/react,mosoft521/react,hejld/react,yiminghe/react,mgmcdermott/react,trueadm/react,quip/react,jzmq/react,jquense/react,eoin/react,jameszhan/react,trueadm/react,leexiaosi/react,reactkr/react,airondumael/react,salzhrani/react,Simek/react,tomocchino/react,mosoft521/react,TaaKey/react,salier/react,wmydz1/react,chenglou/react,pyitphyoaung/react,jdlehman/react,rlugojr/react,leexiaosi/react,nathanmarks/react,reactjs-vn/reactjs_vndev,rickbeerendonk/react,tom-wang/react,chenglou/react,glenjamin/react,staltz/react,rickbeerendonk/react,iamchenxin/react,dilidili/react,edvinerikson/react,usgoodus/react,with-git/react,with-git/react,jordanpapaleo/react,pandoraui/react,quip/react,flipactual/react,TaaKey/react,microlv/react,terminatorheart/react,airondumael/react,mosoft521/react,yangshun/react,usgoodus/react,dilidili/react,chenglou/react,TaaKey/react,claudiopro/react,syranide/react,TheBlasfem/react,rohannair/react,chenglou/react,STRML/react,salzhrani/react,richiethomas/react,jorrit/react,jzmq/react,facebook/react,mhhegazy/react,yiminghe/react,krasimir/react,flarnie/react,shergin/react,jquense/react,billfeller/react,staltz/react,tomocchino/react,yangshun/react,mosoft521/react,mjackson/react,pandoraui/react,joecritch/react,ArunTesco/react,dilidili/react,brigand/react,nhunzaker/react,andrerpena/react,iamchenxin/react,salier/react,kaushik94/react
|
markdown
|
## Code Before:
---
layout: default
title: HTML to JSX
id: html-jsx
---
<div class="jsxCompiler">
<h1>HTML to JSX Compiler</h1>
<div id="jsxCompiler"></div>
<script src="https://reactjs.github.io/react-magic/htmltojsx.min.js"></script>
<script src="js/html-jsx.js"></script>
</div>
## Instruction:
Change script src for htmltojsx.min.js to referral target to fix https error
## Code After:
---
layout: default
title: HTML to JSX
id: html-jsx
---
<div class="jsxCompiler">
<h1>HTML to JSX Compiler</h1>
<div id="jsxCompiler"></div>
<script src="https://reactcommunity.org/react-magic/htmltojsx.min.js"></script>
<script src="js/html-jsx.js"></script>
</div>
|
fcb643f589b4168405bfe6d6b85ee9ad0aa29682
|
tests/cleanup.js
|
tests/cleanup.js
|
/**
* @author EmmanuelOlaojo
* @since 8/13/16
*/
module.exports = function(){
return Promise.all([
utils.dropCollection(TEST_COLLECTION_A)
, utils.dropCollection(TEST_COLLECTION_B)
]);
};
|
/**
* @author EmmanuelOlaojo
* @since 8/13/16
*/
module.exports = function(){
return Promise.all([
utils.dropCollection(TEST_COLLECTION_A)
, utils.dropCollection(TEST_COLLECTION_B)
, utils.dropCollection("fs.files")
, utils.dropCollection("fs.chunks")
]);
};
|
Drop files collection after tests
|
Drop files collection after tests
|
JavaScript
|
mit
|
e-oj/Fawn
|
javascript
|
## Code Before:
/**
* @author EmmanuelOlaojo
* @since 8/13/16
*/
module.exports = function(){
return Promise.all([
utils.dropCollection(TEST_COLLECTION_A)
, utils.dropCollection(TEST_COLLECTION_B)
]);
};
## Instruction:
Drop files collection after tests
## Code After:
/**
* @author EmmanuelOlaojo
* @since 8/13/16
*/
module.exports = function(){
return Promise.all([
utils.dropCollection(TEST_COLLECTION_A)
, utils.dropCollection(TEST_COLLECTION_B)
, utils.dropCollection("fs.files")
, utils.dropCollection("fs.chunks")
]);
};
|
9c41c296e48fb5f1d18725b822a740e3d4787143
|
lib/application_responder.rb
|
lib/application_responder.rb
|
class ApplicationResponder < ActionController::Responder
include Responders::FlashResponder
include Responders::HttpCacheResponder
# Redirects resources to the collection path (index action) instead
# of the resource path (show action) for POST/PUT/DELETE requests.
# include Responders::CollectionResponder
end
|
class ApplicationResponder < ActionController::Responder
include Responders::FlashResponder
include Responders::HttpCacheResponder
end
|
Remove default comments from ApplicationResponder
|
Remove default comments from ApplicationResponder
|
Ruby
|
agpl-3.0
|
Matt-Yorkley/openfoodnetwork,mkllnk/openfoodnetwork,lin-d-hop/openfoodnetwork,openfoodfoundation/openfoodnetwork,mkllnk/openfoodnetwork,mkllnk/openfoodnetwork,lin-d-hop/openfoodnetwork,openfoodfoundation/openfoodnetwork,lin-d-hop/openfoodnetwork,Matt-Yorkley/openfoodnetwork,Matt-Yorkley/openfoodnetwork,Matt-Yorkley/openfoodnetwork,lin-d-hop/openfoodnetwork,openfoodfoundation/openfoodnetwork,openfoodfoundation/openfoodnetwork,mkllnk/openfoodnetwork
|
ruby
|
## Code Before:
class ApplicationResponder < ActionController::Responder
include Responders::FlashResponder
include Responders::HttpCacheResponder
# Redirects resources to the collection path (index action) instead
# of the resource path (show action) for POST/PUT/DELETE requests.
# include Responders::CollectionResponder
end
## Instruction:
Remove default comments from ApplicationResponder
## Code After:
class ApplicationResponder < ActionController::Responder
include Responders::FlashResponder
include Responders::HttpCacheResponder
end
|
8c047e20ac4993e31e355a8c969caa06a4a04503
|
.travis.yml
|
.travis.yml
|
language: node_js
node_js:
- 0.10
git:
depth: 10
|
os: linux
dist: focal
group: edge
arch: arm64-graviton2
virm: vm
services: xvfb
env:
- CHROMIUM=true
cache:
directories:
- $HOME/.m2/repository
before_install:
- lscpu
- sudo apt-get install -y chromium-browser fonts-ipafont-gothic fonts-wqy-zenhei fonts-thai-tlwg fonts-kacst fonts-freefont-ttf gconf-service libasound2 libatk1.0-0 libatk-bridge2.0-0 libc6 libcairo2 libcups2 libdbus-1-3 libexpat1 libfontconfig1 libgcc1 libgconf-2-4 libgdk-pixbuf2.0-0 libglib2.0-0 libgtk-3-0 libnspr4 libpango-1.0-0 libpangocairo-1.0-0 libstdc++6 libx11-6 libx11-xcb1 libxcb1 libxcomposite1 libxcursor1 libxdamage1 libxext6 libxfixes3 libxi6 libxrandr2 libxrender1 libxss1 libxtst6 ca-certificates fonts-liberation libappindicator1 libnss3 lsb-release xdg-utils wget libappindicator3-1 libgbm1
- export JAVA_HOME="/usr/lib/jvm/java-11-openjdk-${TRAVIS_CPU_ARCH}"
- export PATH="$JAVA_HOME/bin:$PATH"
- java -version
- mvn -version
script:
- mvn clean verify -Pjs-test
|
Use TravisCI to run Wicket's Java & JS tests on ARM64 architecture
|
Use TravisCI to run Wicket's Java & JS tests on ARM64 architecture
|
YAML
|
apache-2.0
|
apache/wicket,apache/wicket,mosoft521/wicket,mosoft521/wicket,mosoft521/wicket,apache/wicket,mosoft521/wicket,apache/wicket,mosoft521/wicket,apache/wicket
|
yaml
|
## Code Before:
language: node_js
node_js:
- 0.10
git:
depth: 10
## Instruction:
Use TravisCI to run Wicket's Java & JS tests on ARM64 architecture
## Code After:
os: linux
dist: focal
group: edge
arch: arm64-graviton2
virm: vm
services: xvfb
env:
- CHROMIUM=true
cache:
directories:
- $HOME/.m2/repository
before_install:
- lscpu
- sudo apt-get install -y chromium-browser fonts-ipafont-gothic fonts-wqy-zenhei fonts-thai-tlwg fonts-kacst fonts-freefont-ttf gconf-service libasound2 libatk1.0-0 libatk-bridge2.0-0 libc6 libcairo2 libcups2 libdbus-1-3 libexpat1 libfontconfig1 libgcc1 libgconf-2-4 libgdk-pixbuf2.0-0 libglib2.0-0 libgtk-3-0 libnspr4 libpango-1.0-0 libpangocairo-1.0-0 libstdc++6 libx11-6 libx11-xcb1 libxcb1 libxcomposite1 libxcursor1 libxdamage1 libxext6 libxfixes3 libxi6 libxrandr2 libxrender1 libxss1 libxtst6 ca-certificates fonts-liberation libappindicator1 libnss3 lsb-release xdg-utils wget libappindicator3-1 libgbm1
- export JAVA_HOME="/usr/lib/jvm/java-11-openjdk-${TRAVIS_CPU_ARCH}"
- export PATH="$JAVA_HOME/bin:$PATH"
- java -version
- mvn -version
script:
- mvn clean verify -Pjs-test
|
6749f3be0cd8a8a087918a9e78811b0fbac90787
|
src/test/scala/IndexResourceSpec.scala
|
src/test/scala/IndexResourceSpec.scala
|
package com.bryanjswift.web.resources
import org.scalatest.WordSpec
import org.scalatest.matchers.ShouldMatchers
import scala.collection.mutable.Stack
class IndexSpec extends WordSpec with ShouldMatchers {
"Index" should {
val index = new IndexResource
"render Hello from Jersey" in {
assert(index.message === "<h1>Hello Bryan, From Jersey</h1>")
}
"render Hello from Velocity" in {
assert(index.velocity.indexOf("<h1>Hello Bryan, from Velocity</h1>") != 1)
}
}
}
|
package com.bryanjswift.web.resources
import org.scalatest.WordSpec
import org.scalatest.matchers.ShouldMatchers
class IndexResourceSpec extends WordSpec with ShouldMatchers {
"Index" should {
val index = new IndexResource
"render Hello from Jersey" in {
assert(index.message == "<h1>Hello Bryan, From Jersey</h1>")
}
"render Hello from Velocity" in {
assert(index.velocity.indexOf("<h1>Hello Bryan, from Velocity</h1>") != 1)
}
}
}
|
Remove stack, rename class, and clarify test
|
Remove stack, rename class, and clarify test
|
Scala
|
mit
|
bryanjswift/bryanjswift.com
|
scala
|
## Code Before:
package com.bryanjswift.web.resources
import org.scalatest.WordSpec
import org.scalatest.matchers.ShouldMatchers
import scala.collection.mutable.Stack
class IndexSpec extends WordSpec with ShouldMatchers {
"Index" should {
val index = new IndexResource
"render Hello from Jersey" in {
assert(index.message === "<h1>Hello Bryan, From Jersey</h1>")
}
"render Hello from Velocity" in {
assert(index.velocity.indexOf("<h1>Hello Bryan, from Velocity</h1>") != 1)
}
}
}
## Instruction:
Remove stack, rename class, and clarify test
## Code After:
package com.bryanjswift.web.resources
import org.scalatest.WordSpec
import org.scalatest.matchers.ShouldMatchers
class IndexResourceSpec extends WordSpec with ShouldMatchers {
"Index" should {
val index = new IndexResource
"render Hello from Jersey" in {
assert(index.message == "<h1>Hello Bryan, From Jersey</h1>")
}
"render Hello from Velocity" in {
assert(index.velocity.indexOf("<h1>Hello Bryan, from Velocity</h1>") != 1)
}
}
}
|
f5b64b1e8609efd4b53ec84d5d57bf2da7ee332c
|
Modules/Bridge/VtkGlue/itk-module-init.cmake
|
Modules/Bridge/VtkGlue/itk-module-init.cmake
|
find_package(VTK REQUIRED)
set(VERSION_MIN "5.9.20110419")
if (${VTK_VERSION} VERSION_LESS ${VERSION_MIN})
message(ERROR " VtkGlue requires VTK version ${VERSION_MIN} or newer but the current version is ${VTK_VERSION}")
endif()
# The VTK DICOMParser and vtkmetaio includes conflict with the ITK
# versions. Here we remove them from the include directories.
#
string(REGEX REPLACE "[^;]*MetaIO;"
"" VTK_INCLUDE_DIRS "${VTK_INCLUDE_DIRS}")
string(REGEX REPLACE "[^;]*vtkmetaio;"
"" VTK_INCLUDE_DIRS "${VTK_INCLUDE_DIRS}")
string(REGEX REPLACE "[^;]*DICOMParser;"
"" VTK_INCLUDE_DIRS "${VTK_INCLUDE_DIRS}")
include(${VTK_USE_FILE})
|
set(VERSION_MIN "5.9.20110419")
# Look for VTK
find_package(VTK REQUIRED)
# Older versions of VTK (VTK 5.5 for example) do not have VTK_VERSION, in this
# case it needs to be defined manually
if(NOT VTK_VERSION)
set(VTK_VERSION "${VTK_MAJOR_VERSION}.${VTK_MINOR_VERSION}.${VTK_BUILD_VERSION}")
endif()
if (${VTK_VERSION} VERSION_LESS ${VERSION_MIN})
message(ERROR " VtkGlue requires VTK version ${VERSION_MIN} or newer but the current version is ${VTK_VERSION}")
endif()
# The VTK DICOMParser and vtkmetaio includes conflict with the ITK
# versions. Here we remove them from the include directories.
#
string(REGEX REPLACE "[^;]*MetaIO;"
"" VTK_INCLUDE_DIRS "${VTK_INCLUDE_DIRS}")
string(REGEX REPLACE "[^;]*vtkmetaio;"
"" VTK_INCLUDE_DIRS "${VTK_INCLUDE_DIRS}")
string(REGEX REPLACE "[^;]*DICOMParser;"
"" VTK_INCLUDE_DIRS "${VTK_INCLUDE_DIRS}")
include(${VTK_USE_FILE})
|
Add VTK_VERSION for older VTK versions
|
BUG: Add VTK_VERSION for older VTK versions
For older VTK's, VTK_VERSION is not defined. It needs to be setup manually.
See ITK-3304
Change-Id: I5efec656190e7417c7a2cbfa1b27b8b545261ca0
|
CMake
|
apache-2.0
|
richardbeare/ITK,fbudin69500/ITK,LucHermitte/ITK,spinicist/ITK,blowekamp/ITK,ajjl/ITK,jmerkow/ITK,richardbeare/ITK,hjmjohnson/ITK,LucasGandel/ITK,spinicist/ITK,LucHermitte/ITK,jmerkow/ITK,hjmjohnson/ITK,thewtex/ITK,jmerkow/ITK,biotrump/ITK,blowekamp/ITK,BRAINSia/ITK,malaterre/ITK,PlutoniumHeart/ITK,PlutoniumHeart/ITK,hjmjohnson/ITK,heimdali/ITK,LucHermitte/ITK,vfonov/ITK,eile/ITK,BlueBrain/ITK,fbudin69500/ITK,fedral/ITK,heimdali/ITK,jcfr/ITK,hendradarwin/ITK,spinicist/ITK,hendradarwin/ITK,jcfr/ITK,LucasGandel/ITK,ajjl/ITK,atsnyder/ITK,jcfr/ITK,InsightSoftwareConsortium/ITK,BlueBrain/ITK,stnava/ITK,biotrump/ITK,stnava/ITK,jmerkow/ITK,blowekamp/ITK,atsnyder/ITK,LucasGandel/ITK,heimdali/ITK,vfonov/ITK,richardbeare/ITK,zachary-williamson/ITK,LucasGandel/ITK,fedral/ITK,PlutoniumHeart/ITK,atsnyder/ITK,Kitware/ITK,heimdali/ITK,hendradarwin/ITK,jcfr/ITK,malaterre/ITK,vfonov/ITK,thewtex/ITK,PlutoniumHeart/ITK,atsnyder/ITK,PlutoniumHeart/ITK,jmerkow/ITK,BRAINSia/ITK,LucHermitte/ITK,stnava/ITK,BRAINSia/ITK,Kitware/ITK,thewtex/ITK,hendradarwin/ITK,heimdali/ITK,fbudin69500/ITK,zachary-williamson/ITK,richardbeare/ITK,malaterre/ITK,jmerkow/ITK,vfonov/ITK,malaterre/ITK,Kitware/ITK,zachary-williamson/ITK,blowekamp/ITK,malaterre/ITK,heimdali/ITK,fedral/ITK,malaterre/ITK,LucasGandel/ITK,InsightSoftwareConsortium/ITK,zachary-williamson/ITK,ajjl/ITK,BlueBrain/ITK,LucHermitte/ITK,biotrump/ITK,stnava/ITK,fedral/ITK,ajjl/ITK,zachary-williamson/ITK,jcfr/ITK,spinicist/ITK,stnava/ITK,malaterre/ITK,eile/ITK,LucasGandel/ITK,Kitware/ITK,fbudin69500/ITK,ajjl/ITK,atsnyder/ITK,LucHermitte/ITK,msmolens/ITK,fedral/ITK,hjmjohnson/ITK,stnava/ITK,atsnyder/ITK,BRAINSia/ITK,thewtex/ITK,fedral/ITK,InsightSoftwareConsortium/ITK,eile/ITK,Kitware/ITK,hendradarwin/ITK,Kitware/ITK,ajjl/ITK,malaterre/ITK,fbudin69500/ITK,hendradarwin/ITK,BlueBrain/ITK,malaterre/ITK,hendradarwin/ITK,stnava/ITK,heimdali/ITK,hendradarwin/ITK,stnava/ITK,heimdali/ITK,jcfr/ITK,blowekamp/ITK,blowekamp/ITK,fbudin69500/ITK,fbudin69500/ITK,richardbeare/ITK,jcfr/ITK,atsnyder/ITK,spinicist/ITK,InsightSoftwareConsortium/ITK,atsnyder/ITK,stnava/ITK,spinicist/ITK,BRAINSia/ITK,biotrump/ITK,eile/ITK,Kitware/ITK,InsightSoftwareConsortium/ITK,thewtex/ITK,fedral/ITK,jmerkow/ITK,BlueBrain/ITK,blowekamp/ITK,BlueBrain/ITK,BlueBrain/ITK,ajjl/ITK,msmolens/ITK,eile/ITK,hjmjohnson/ITK,msmolens/ITK,fedral/ITK,richardbeare/ITK,fbudin69500/ITK,eile/ITK,thewtex/ITK,jmerkow/ITK,thewtex/ITK,msmolens/ITK,biotrump/ITK,eile/ITK,zachary-williamson/ITK,PlutoniumHeart/ITK,LucHermitte/ITK,InsightSoftwareConsortium/ITK,vfonov/ITK,spinicist/ITK,hjmjohnson/ITK,spinicist/ITK,LucasGandel/ITK,BRAINSia/ITK,biotrump/ITK,hjmjohnson/ITK,vfonov/ITK,eile/ITK,LucHermitte/ITK,atsnyder/ITK,zachary-williamson/ITK,PlutoniumHeart/ITK,vfonov/ITK,vfonov/ITK,blowekamp/ITK,zachary-williamson/ITK,jcfr/ITK,LucasGandel/ITK,InsightSoftwareConsortium/ITK,msmolens/ITK,richardbeare/ITK,vfonov/ITK,ajjl/ITK,zachary-williamson/ITK,eile/ITK,BRAINSia/ITK,spinicist/ITK,biotrump/ITK,msmolens/ITK,BlueBrain/ITK,PlutoniumHeart/ITK,msmolens/ITK,msmolens/ITK,biotrump/ITK
|
cmake
|
## Code Before:
find_package(VTK REQUIRED)
set(VERSION_MIN "5.9.20110419")
if (${VTK_VERSION} VERSION_LESS ${VERSION_MIN})
message(ERROR " VtkGlue requires VTK version ${VERSION_MIN} or newer but the current version is ${VTK_VERSION}")
endif()
# The VTK DICOMParser and vtkmetaio includes conflict with the ITK
# versions. Here we remove them from the include directories.
#
string(REGEX REPLACE "[^;]*MetaIO;"
"" VTK_INCLUDE_DIRS "${VTK_INCLUDE_DIRS}")
string(REGEX REPLACE "[^;]*vtkmetaio;"
"" VTK_INCLUDE_DIRS "${VTK_INCLUDE_DIRS}")
string(REGEX REPLACE "[^;]*DICOMParser;"
"" VTK_INCLUDE_DIRS "${VTK_INCLUDE_DIRS}")
include(${VTK_USE_FILE})
## Instruction:
BUG: Add VTK_VERSION for older VTK versions
For older VTK's, VTK_VERSION is not defined. It needs to be setup manually.
See ITK-3304
Change-Id: I5efec656190e7417c7a2cbfa1b27b8b545261ca0
## Code After:
set(VERSION_MIN "5.9.20110419")
# Look for VTK
find_package(VTK REQUIRED)
# Older versions of VTK (VTK 5.5 for example) do not have VTK_VERSION, in this
# case it needs to be defined manually
if(NOT VTK_VERSION)
set(VTK_VERSION "${VTK_MAJOR_VERSION}.${VTK_MINOR_VERSION}.${VTK_BUILD_VERSION}")
endif()
if (${VTK_VERSION} VERSION_LESS ${VERSION_MIN})
message(ERROR " VtkGlue requires VTK version ${VERSION_MIN} or newer but the current version is ${VTK_VERSION}")
endif()
# The VTK DICOMParser and vtkmetaio includes conflict with the ITK
# versions. Here we remove them from the include directories.
#
string(REGEX REPLACE "[^;]*MetaIO;"
"" VTK_INCLUDE_DIRS "${VTK_INCLUDE_DIRS}")
string(REGEX REPLACE "[^;]*vtkmetaio;"
"" VTK_INCLUDE_DIRS "${VTK_INCLUDE_DIRS}")
string(REGEX REPLACE "[^;]*DICOMParser;"
"" VTK_INCLUDE_DIRS "${VTK_INCLUDE_DIRS}")
include(${VTK_USE_FILE})
|
93fea72f1cef8c3b4c83af358f05a162833d7f62
|
client/views/blog/show.html
|
client/views/blog/show.html
|
<template name="blogShow">
<div id="meteor-blog" class="blog-single">
<div class="container">
<div class="row">
<div class="col-md-12">
{{> blogShowBody}}
</div>
</div>
</div>
</div>
</template>
<template name="blogShowBody">
<article class="post">
<h2>{{title}}</h2>
<div class="post-meta">
By {{authorName}} on {{blogFormatDate publishedAt}}
</div>
<div class="the-content">
{{{html}}}
</div>
</article>
<div class="blog-back-link pull-left">
<a href="{{pathFor 'blogIndex'}}"><i class="icon-caret-left"></i>Back to the Blog</a>
</div>
<div class="share-buttons pull-right">
<a target="_blank" class="btn-facebook fb-share" href="#"><i class="icon-facebook"></i>Share on Facebook </a>
<a class="btn-twitter tw-share" href="#"><i class="icon-twitter"></i>Share on Twitter </a>
</div>
</template>
|
<template name="blogShow">
<div id="meteor-blog" class="blog-single">
<div class="container">
<div class="row">
<div class="col-md-8 col-md-offset-2">
{{> blogShowBody}}
</div>
</div>
</div>
</div>
</template>
<template name="blogShowBody">
<article class="post">
<h2>{{title}}</h2>
<div class="post-meta">
By {{authorName}} on {{blogFormatDate publishedAt}}
</div>
<div class="the-content">
{{{html}}}
</div>
</article>
<div class="blog-back-link pull-left">
<a href="{{pathFor 'blogIndex'}}"><i class="icon-caret-left"></i>Back to the Blog</a>
</div>
<div class="share-buttons pull-right">
<a target="_blank" class="btn-facebook fb-share" href="#"><i class="icon-facebook"></i>Share on Facebook </a>
<a class="btn-twitter tw-share" href="#"><i class="icon-twitter"></i>Share on Twitter </a>
</div>
</template>
|
Make default theme more narrow
|
Make default theme more narrow
|
HTML
|
mit
|
NIKKTTO/meteor-blog,dubvfan87/meteor-blog,DavidSichau/meteor-blog,waitingkuo/meteor-blog,tranc99/meteor-blog-1,hwillson/meteor-blog,SierraGolf/meteor-blog,jmwenda/meteor-blog,Differential/meteor-blog,dj0nes/meteor-blog,hwillson/meteor-blog,NIKKTTO/meteor-blog,johnschult/meteor-blog,discdiver/meteor-blog,dubvfan87/meteor-blog,mcknightg/meteor-blog,waitingkuo/meteor-blog,dj0nes/meteor-blog,parkeasz/meteor-blog,spencercarli/meteor-blog,dandv/meteor-blog,DavidSichau/meteor-blog,Differential/meteor-blog,IQ2022/meteor-blog,johnschult/meteor-blog,meteor-blog/meteor-blog,bhirsch42/meteor-blog,meteor-blog/meteor-blog,samcorcos/meteor-blog,IQ2022/meteor-blog,bhirsch42/meteor-blog,issacting93/avenue,mcknightg/meteor-blog,jmwenda/meteor-blog,discdiver/meteor-blog,yalexx/meteor-blog,yalexx/meteor-blog,parkeasz/meteor-blog,dandv/meteor-blog,tranc99/meteor-blog-1,SierraGolf/meteor-blog,bubuzzz/meteor-blog
|
html
|
## Code Before:
<template name="blogShow">
<div id="meteor-blog" class="blog-single">
<div class="container">
<div class="row">
<div class="col-md-12">
{{> blogShowBody}}
</div>
</div>
</div>
</div>
</template>
<template name="blogShowBody">
<article class="post">
<h2>{{title}}</h2>
<div class="post-meta">
By {{authorName}} on {{blogFormatDate publishedAt}}
</div>
<div class="the-content">
{{{html}}}
</div>
</article>
<div class="blog-back-link pull-left">
<a href="{{pathFor 'blogIndex'}}"><i class="icon-caret-left"></i>Back to the Blog</a>
</div>
<div class="share-buttons pull-right">
<a target="_blank" class="btn-facebook fb-share" href="#"><i class="icon-facebook"></i>Share on Facebook </a>
<a class="btn-twitter tw-share" href="#"><i class="icon-twitter"></i>Share on Twitter </a>
</div>
</template>
## Instruction:
Make default theme more narrow
## Code After:
<template name="blogShow">
<div id="meteor-blog" class="blog-single">
<div class="container">
<div class="row">
<div class="col-md-8 col-md-offset-2">
{{> blogShowBody}}
</div>
</div>
</div>
</div>
</template>
<template name="blogShowBody">
<article class="post">
<h2>{{title}}</h2>
<div class="post-meta">
By {{authorName}} on {{blogFormatDate publishedAt}}
</div>
<div class="the-content">
{{{html}}}
</div>
</article>
<div class="blog-back-link pull-left">
<a href="{{pathFor 'blogIndex'}}"><i class="icon-caret-left"></i>Back to the Blog</a>
</div>
<div class="share-buttons pull-right">
<a target="_blank" class="btn-facebook fb-share" href="#"><i class="icon-facebook"></i>Share on Facebook </a>
<a class="btn-twitter tw-share" href="#"><i class="icon-twitter"></i>Share on Twitter </a>
</div>
</template>
|
e625831fa28cc3be4dcec07284f88907bf83cc30
|
requirements.txt
|
requirements.txt
|
Flask==0.10.1
Flask-Cors==2.1.2
pymongo==3.2.2
gevent==1.1.1
# Uncomment below , when 2.0.13 is released See https://github.com/gliderlabs/docker-alpine/issues/158#issuecomment-205401343
# uWSGI==2.0.13
https://github.com/unbit/uwsgi/archive/uwsgi-2.0.zip#egg=uwsgi
python-etcd==0.3.2
future==0.15.2
pytz==2016.3
requests[security]==2.9.1
urllib3==1.15
celery[mongodb]==3.1.23
https://github.com/totem/fleet-py/archive/0.1.6.tar.gz
https://github.com/totem/yoda-py/archive/v0.1.8b2.tar.gz
https://github.com/totem/flask-hyperschema/archive/0.2.0.tar.gz
https://github.com/totem/totem-encrypt/archive/master.tar.gz
https://github.com/dlitz/pycrypto/archive/v2.7a1.tar.gz
|
Flask==0.10.1
Flask-Cors==2.1.2
pymongo==3.2.2
gevent==1.1.1
uWSGI==2.0.13.1
python-etcd==0.3.2
future==0.15.2
pytz==2016.3
requests[security]==2.9.1
urllib3==1.15
celery[mongodb]==3.1.23
https://github.com/totem/fleet-py/archive/0.1.6.tar.gz
https://github.com/totem/yoda-py/archive/v0.1.8b2.tar.gz
https://github.com/totem/flask-hyperschema/archive/0.2.0.tar.gz
https://github.com/totem/totem-encrypt/archive/master.tar.gz
https://github.com/dlitz/pycrypto/archive/v2.7a1.tar.gz
|
Fix CoreOS 1010.5 upgrade (uwsgi kernel issue)
|
Fix CoreOS 1010.5 upgrade (uwsgi kernel issue)
Fix CoreOS 1010.5 upgrade (uwsgi kernel issue)
|
Text
|
mit
|
totem/cluster-deployer,totem/cluster-deployer,totem/cluster-deployer
|
text
|
## Code Before:
Flask==0.10.1
Flask-Cors==2.1.2
pymongo==3.2.2
gevent==1.1.1
# Uncomment below , when 2.0.13 is released See https://github.com/gliderlabs/docker-alpine/issues/158#issuecomment-205401343
# uWSGI==2.0.13
https://github.com/unbit/uwsgi/archive/uwsgi-2.0.zip#egg=uwsgi
python-etcd==0.3.2
future==0.15.2
pytz==2016.3
requests[security]==2.9.1
urllib3==1.15
celery[mongodb]==3.1.23
https://github.com/totem/fleet-py/archive/0.1.6.tar.gz
https://github.com/totem/yoda-py/archive/v0.1.8b2.tar.gz
https://github.com/totem/flask-hyperschema/archive/0.2.0.tar.gz
https://github.com/totem/totem-encrypt/archive/master.tar.gz
https://github.com/dlitz/pycrypto/archive/v2.7a1.tar.gz
## Instruction:
Fix CoreOS 1010.5 upgrade (uwsgi kernel issue)
Fix CoreOS 1010.5 upgrade (uwsgi kernel issue)
## Code After:
Flask==0.10.1
Flask-Cors==2.1.2
pymongo==3.2.2
gevent==1.1.1
uWSGI==2.0.13.1
python-etcd==0.3.2
future==0.15.2
pytz==2016.3
requests[security]==2.9.1
urllib3==1.15
celery[mongodb]==3.1.23
https://github.com/totem/fleet-py/archive/0.1.6.tar.gz
https://github.com/totem/yoda-py/archive/v0.1.8b2.tar.gz
https://github.com/totem/flask-hyperschema/archive/0.2.0.tar.gz
https://github.com/totem/totem-encrypt/archive/master.tar.gz
https://github.com/dlitz/pycrypto/archive/v2.7a1.tar.gz
|
a6b4678e5a2ef3b85b9b42349669565b49c10136
|
example_app_generator/no_active_record/app/models/in_memory/model.rb
|
example_app_generator/no_active_record/app/models/in_memory/model.rb
|
require 'active_model'
raise "ActiveRecord is defined but should not be!" if defined?(::ActiveRecord)
module InMemory
module Persistence
def all
@all_records ||= []
end
def count
all.length
end
alias_method :size, :count
alias_method :length, :count
def create!(attributes = {})
with_id = { :id => next_id, :persisted => true }
all << record = new(with_id.merge(attributes))
record
end
def next_id
@id_count ||= 0
@id_count += 1
end
end
class Model
extend Persistence
if defined?(::ActiveModel::Model)
include ::ActiveModel::Model
else
extend ::ActiveModel::Naming
include ::ActiveModel::Conversion
include ::ActiveModel::Validations
def initialize(attributes = {})
attributes.each do |name, value|
send("#{name}=", value)
end
end
end
attr_accessor :id, :persisted
alias_method :persisted?, :persisted
def new_record?
!persisted?
end
def to_param
id.to_s
end
end
end
|
require 'active_model'
raise "ActiveRecord is defined but should not be!" if defined?(::ActiveRecord)
module InMemory
module Persistence
def all
@all_records ||= []
end
def count
all.length
end
alias_method :size, :count
alias_method :length, :count
def last
all.last
end
def find(id)
id = id.to_i
all.find { |record| record.id == id } || raise
end
def create!(attributes = {})
record = new(attributes)
record.save
record
end
def next_id
@id_count ||= 0
@id_count += 1
end
end
class Model
extend Persistence
if defined?(::ActiveModel::Model)
include ::ActiveModel::Model
else
extend ::ActiveModel::Naming
include ::ActiveModel::Conversion
include ::ActiveModel::Validations
def initialize(attributes = {})
assign_attributes(attributes)
end
end
attr_accessor :id, :persisted
alias_method :persisted?, :persisted
def update(attributes)
assign_attributes(attributes)
save
end
alias_method :update_attributes, :update
def assign_attributes(attributes)
attributes.each do |name, value|
__send__("#{name}=", value)
end
end
def save(*)
self.id = self.class.next_id
self.class.all << self
true
end
def destroy
self.class.all.delete(self)
true
end
def reload(*)
self
end
def ==(other)
other.is_a?(self.class) && id == other.id
end
def persisted?
!id.nil?
end
def new_record?
!persisted?
end
def to_param
id.to_s
end
end
end
|
Enhance InMemory::Model in the no AR example app
|
Enhance InMemory::Model in the no AR example app
|
Ruby
|
mit
|
rspec/rspec-rails,rspec/rspec-rails,rspec/rspec-rails
|
ruby
|
## Code Before:
require 'active_model'
raise "ActiveRecord is defined but should not be!" if defined?(::ActiveRecord)
module InMemory
module Persistence
def all
@all_records ||= []
end
def count
all.length
end
alias_method :size, :count
alias_method :length, :count
def create!(attributes = {})
with_id = { :id => next_id, :persisted => true }
all << record = new(with_id.merge(attributes))
record
end
def next_id
@id_count ||= 0
@id_count += 1
end
end
class Model
extend Persistence
if defined?(::ActiveModel::Model)
include ::ActiveModel::Model
else
extend ::ActiveModel::Naming
include ::ActiveModel::Conversion
include ::ActiveModel::Validations
def initialize(attributes = {})
attributes.each do |name, value|
send("#{name}=", value)
end
end
end
attr_accessor :id, :persisted
alias_method :persisted?, :persisted
def new_record?
!persisted?
end
def to_param
id.to_s
end
end
end
## Instruction:
Enhance InMemory::Model in the no AR example app
## Code After:
require 'active_model'
raise "ActiveRecord is defined but should not be!" if defined?(::ActiveRecord)
module InMemory
module Persistence
def all
@all_records ||= []
end
def count
all.length
end
alias_method :size, :count
alias_method :length, :count
def last
all.last
end
def find(id)
id = id.to_i
all.find { |record| record.id == id } || raise
end
def create!(attributes = {})
record = new(attributes)
record.save
record
end
def next_id
@id_count ||= 0
@id_count += 1
end
end
class Model
extend Persistence
if defined?(::ActiveModel::Model)
include ::ActiveModel::Model
else
extend ::ActiveModel::Naming
include ::ActiveModel::Conversion
include ::ActiveModel::Validations
def initialize(attributes = {})
assign_attributes(attributes)
end
end
attr_accessor :id, :persisted
alias_method :persisted?, :persisted
def update(attributes)
assign_attributes(attributes)
save
end
alias_method :update_attributes, :update
def assign_attributes(attributes)
attributes.each do |name, value|
__send__("#{name}=", value)
end
end
def save(*)
self.id = self.class.next_id
self.class.all << self
true
end
def destroy
self.class.all.delete(self)
true
end
def reload(*)
self
end
def ==(other)
other.is_a?(self.class) && id == other.id
end
def persisted?
!id.nil?
end
def new_record?
!persisted?
end
def to_param
id.to_s
end
end
end
|
058b32fb65f9c5dbdbba35d3b554be4f4078a335
|
README.md
|
README.md
|
Reactive Streams: AMQP
====
Experimental implementation of [Reactive Streams](http://www.reactive-streams.org) for AMQP based on [RabbitMQ](https://www.rabbitmq.com/) library.
Available at Maven Central:
libraryDependencies += "io.scalac" % "reactive-rabbit_2.11" % "0.2.0"
Examples
----
#### Akka Streams (0.9)
```Scala
import akka.actor.ActorSystem
import akka.stream.scaladsl2.{FlowMaterializer, Sink, Source}
import io.scalac.amqp.Connection
// streaming invoices to Accounting Department
val connection = Connection()
val queue = connection.consume(queue = "invoices")
val exchange = connection.publish(exchange = "accounting_department",
routingKey = "invoices")
implicit val system = ActorSystem()
implicit val materializer = FlowMaterializer()
Source(queue).map(_.message).connect(Sink(exchange)).run()
```
|
Reactive Streams: AMQP
====
Experimental implementation of [Reactive Streams](http://www.reactive-streams.org) for AMQP based on [RabbitMQ](https://www.rabbitmq.com/) library.
Available at Maven Central:
libraryDependencies += "io.scalac" % "reactive-rabbit_2.11" % "0.2.0"
Examples
----
#### Akka Streams (0.10)
```Scala
import akka.actor.ActorSystem
import akka.stream.FlowMaterializer
import akka.stream.scaladsl.{Sink, Source}
import io.scalac.amqp.Connection
// streaming invoices to Accounting Department
val connection = Connection()
val queue = connection.consume(queue = "invoices")
val exchange = connection.publish(exchange = "accounting_department",
routingKey = "invoices")
implicit val system = ActorSystem()
implicit val materializer = FlowMaterializer()
Source(queue).map(_.message).to(Sink(exchange)).run()
```
|
Update Akka Streams example to version 0.10
|
Update Akka Streams example to version 0.10
|
Markdown
|
apache-2.0
|
davidwrpayne/reactive-rabbit,ScalaConsultants/reactive-rabbit
|
markdown
|
## Code Before:
Reactive Streams: AMQP
====
Experimental implementation of [Reactive Streams](http://www.reactive-streams.org) for AMQP based on [RabbitMQ](https://www.rabbitmq.com/) library.
Available at Maven Central:
libraryDependencies += "io.scalac" % "reactive-rabbit_2.11" % "0.2.0"
Examples
----
#### Akka Streams (0.9)
```Scala
import akka.actor.ActorSystem
import akka.stream.scaladsl2.{FlowMaterializer, Sink, Source}
import io.scalac.amqp.Connection
// streaming invoices to Accounting Department
val connection = Connection()
val queue = connection.consume(queue = "invoices")
val exchange = connection.publish(exchange = "accounting_department",
routingKey = "invoices")
implicit val system = ActorSystem()
implicit val materializer = FlowMaterializer()
Source(queue).map(_.message).connect(Sink(exchange)).run()
```
## Instruction:
Update Akka Streams example to version 0.10
## Code After:
Reactive Streams: AMQP
====
Experimental implementation of [Reactive Streams](http://www.reactive-streams.org) for AMQP based on [RabbitMQ](https://www.rabbitmq.com/) library.
Available at Maven Central:
libraryDependencies += "io.scalac" % "reactive-rabbit_2.11" % "0.2.0"
Examples
----
#### Akka Streams (0.10)
```Scala
import akka.actor.ActorSystem
import akka.stream.FlowMaterializer
import akka.stream.scaladsl.{Sink, Source}
import io.scalac.amqp.Connection
// streaming invoices to Accounting Department
val connection = Connection()
val queue = connection.consume(queue = "invoices")
val exchange = connection.publish(exchange = "accounting_department",
routingKey = "invoices")
implicit val system = ActorSystem()
implicit val materializer = FlowMaterializer()
Source(queue).map(_.message).to(Sink(exchange)).run()
```
|
8a653894f9ebb2601f13df43c5a6cf41300a512b
|
lib/electric_sheep/commands/database/mysql_dump.rb
|
lib/electric_sheep/commands/database/mysql_dump.rb
|
module ElectricSheep
module Commands
module Database
class MySQLDump
include Command
include Helpers::ShellSafe
register as: "mysql_dump"
option :user
option :password, secret: true
def perform!
logger.info "Creating a dump of the \"#{input.basename}\" MySQL database"
file_resource(host, extension: '.sql').tap do |dump|
shell.exec cmd(input.name, option(:user), option(:password), dump)
end
end
def stat_database(input)
cmd=database_size_cmd(input.name, option(:user), option(:password))
shell.exec(cmd)[:out].chomp.to_i
end
private
def cmd(db, user, password, dump)
"mysqldump" +
" #{credentials(user, password)}" +
" #{shell_safe(db)} > #{shell.expand_path(dump.path)}"
end
def database_size_cmd(db, user, password)
"echo \"#{database_size_query(db)}\" | " +
"mysql --skip-column-names #{credentials(user, password)}"
end
def database_size_query(db)
"SELECT sum(data_length+index_length) FROM information_schema.tables" +
" WHERE table_schema='#{shell_safe(db)}'" +
" GROUP BY table_schema"
end
def credentials(user, password)
user.nil? && "" ||
"--user=#{shell_safe(user)} --password=#{shell_safe(password)}"
end
end
end
end
end
|
module ElectricSheep
module Commands
module Database
class MySQLDump
include Command
include Helpers::ShellSafe
register as: "mysql_dump"
option :user
option :password, secret: true
def perform!
logger.info "Creating a dump of the \"#{input.basename}\" MySQL database"
file_resource(host, extension: '.sql').tap do |dump|
shell.exec cmd(dump)
end
end
def stat_database(input)
cmd=database_size_cmd(input)
shell.exec(cmd)[:out].chomp.to_i
end
private
def cmd(dump)
"mysqldump" +
" #{credentials}" +
" #{shell_safe(input.name)} > #{shell.expand_path(dump.path)}"
end
def database_size_cmd(input)
"echo \"#{database_size_query(input.name)}\" | " +
"mysql --skip-column-names #{credentials}"
end
def database_size_query(db)
"SELECT sum(data_length+index_length) FROM information_schema.tables" +
" WHERE table_schema='#{shell_safe(db)}'" +
" GROUP BY table_schema"
end
def credentials
option(:user).nil? && "" ||
"--user=#{shell_safe(option(:user))} " +
"--password=#{shell_safe(option(:password))}"
end
end
end
end
end
|
Use options instead of arguments
|
Use options instead of arguments
|
Ruby
|
mit
|
ehartmann/electric_sheep,servebox/electric_sheep,benitoDeLaCasita/electric_sheep,ehartmann/electric_sheep,benitoDeLaCasita/electric_sheep,ehartmann/electric_sheep,servebox/electric_sheep,servebox/electric_sheep
|
ruby
|
## Code Before:
module ElectricSheep
module Commands
module Database
class MySQLDump
include Command
include Helpers::ShellSafe
register as: "mysql_dump"
option :user
option :password, secret: true
def perform!
logger.info "Creating a dump of the \"#{input.basename}\" MySQL database"
file_resource(host, extension: '.sql').tap do |dump|
shell.exec cmd(input.name, option(:user), option(:password), dump)
end
end
def stat_database(input)
cmd=database_size_cmd(input.name, option(:user), option(:password))
shell.exec(cmd)[:out].chomp.to_i
end
private
def cmd(db, user, password, dump)
"mysqldump" +
" #{credentials(user, password)}" +
" #{shell_safe(db)} > #{shell.expand_path(dump.path)}"
end
def database_size_cmd(db, user, password)
"echo \"#{database_size_query(db)}\" | " +
"mysql --skip-column-names #{credentials(user, password)}"
end
def database_size_query(db)
"SELECT sum(data_length+index_length) FROM information_schema.tables" +
" WHERE table_schema='#{shell_safe(db)}'" +
" GROUP BY table_schema"
end
def credentials(user, password)
user.nil? && "" ||
"--user=#{shell_safe(user)} --password=#{shell_safe(password)}"
end
end
end
end
end
## Instruction:
Use options instead of arguments
## Code After:
module ElectricSheep
module Commands
module Database
class MySQLDump
include Command
include Helpers::ShellSafe
register as: "mysql_dump"
option :user
option :password, secret: true
def perform!
logger.info "Creating a dump of the \"#{input.basename}\" MySQL database"
file_resource(host, extension: '.sql').tap do |dump|
shell.exec cmd(dump)
end
end
def stat_database(input)
cmd=database_size_cmd(input)
shell.exec(cmd)[:out].chomp.to_i
end
private
def cmd(dump)
"mysqldump" +
" #{credentials}" +
" #{shell_safe(input.name)} > #{shell.expand_path(dump.path)}"
end
def database_size_cmd(input)
"echo \"#{database_size_query(input.name)}\" | " +
"mysql --skip-column-names #{credentials}"
end
def database_size_query(db)
"SELECT sum(data_length+index_length) FROM information_schema.tables" +
" WHERE table_schema='#{shell_safe(db)}'" +
" GROUP BY table_schema"
end
def credentials
option(:user).nil? && "" ||
"--user=#{shell_safe(option(:user))} " +
"--password=#{shell_safe(option(:password))}"
end
end
end
end
end
|
a10624147621e9646589d820010140f0b5ea409b
|
openssl-musl/plan.sh
|
openssl-musl/plan.sh
|
source ../openssl/plan.sh
pkg_name=openssl-musl
pkg_maintainer="The Habitat Maintainers <[email protected]>"
pkg_deps=(core/musl core/zlib-musl core/cacerts)
do_prepare() {
PLAN_CONTEXT=$(abspath $PLAN_CONTEXT/../openssl) _common_prepare
dynamic_linker="$(pkg_path_for musl)/lib/ld-musl-x86_64.so.1"
LDFLAGS="$LDFLAGS -Wl,--dynamic-linker=$dynamic_linker"
export BUILD_CC=musl-gcc
build_line "Setting BUILD_CC=$BUILD_CC"
}
|
source ../openssl/plan.sh
pkg_name=openssl-musl
pkg_origin=core
pkg_maintainer="The Habitat Maintainers <[email protected]>"
pkg_description="\
OpenSSL is an open source project that provides a robust, commercial-grade, \
and full-featured toolkit for the Transport Layer Security (TLS) and Secure \
Sockets Layer (SSL) protocols. It is also a general-purpose cryptography \
library.\
"
pkg_upstream_url="https://www.openssl.org"
pkg_license=('OpenSSL')
pkg_deps=(
core/musl
core/zlib-musl
core/cacerts
)
do_prepare() {
PLAN_CONTEXT="$(abspath "$PLAN_CONTEXT/../openssl")" _common_prepare
dynamic_linker="$(pkg_path_for musl)/lib/ld-musl-x86_64.so.1"
LDFLAGS="$LDFLAGS -Wl,--dynamic-linker=$dynamic_linker"
export BUILD_CC=musl-gcc
build_line "Setting BUILD_CC=$BUILD_CC"
}
|
Update & modernize Plan style.
|
[openssl-musl] Update & modernize Plan style.
Signed-off-by: Fletcher Nichol <[email protected]>
|
Shell
|
apache-2.0
|
be-plans/be,be-plans/be,be-plans/be,be-plans/be
|
shell
|
## Code Before:
source ../openssl/plan.sh
pkg_name=openssl-musl
pkg_maintainer="The Habitat Maintainers <[email protected]>"
pkg_deps=(core/musl core/zlib-musl core/cacerts)
do_prepare() {
PLAN_CONTEXT=$(abspath $PLAN_CONTEXT/../openssl) _common_prepare
dynamic_linker="$(pkg_path_for musl)/lib/ld-musl-x86_64.so.1"
LDFLAGS="$LDFLAGS -Wl,--dynamic-linker=$dynamic_linker"
export BUILD_CC=musl-gcc
build_line "Setting BUILD_CC=$BUILD_CC"
}
## Instruction:
[openssl-musl] Update & modernize Plan style.
Signed-off-by: Fletcher Nichol <[email protected]>
## Code After:
source ../openssl/plan.sh
pkg_name=openssl-musl
pkg_origin=core
pkg_maintainer="The Habitat Maintainers <[email protected]>"
pkg_description="\
OpenSSL is an open source project that provides a robust, commercial-grade, \
and full-featured toolkit for the Transport Layer Security (TLS) and Secure \
Sockets Layer (SSL) protocols. It is also a general-purpose cryptography \
library.\
"
pkg_upstream_url="https://www.openssl.org"
pkg_license=('OpenSSL')
pkg_deps=(
core/musl
core/zlib-musl
core/cacerts
)
do_prepare() {
PLAN_CONTEXT="$(abspath "$PLAN_CONTEXT/../openssl")" _common_prepare
dynamic_linker="$(pkg_path_for musl)/lib/ld-musl-x86_64.so.1"
LDFLAGS="$LDFLAGS -Wl,--dynamic-linker=$dynamic_linker"
export BUILD_CC=musl-gcc
build_line "Setting BUILD_CC=$BUILD_CC"
}
|
3c6afaa29e1723246355e3646b7c93e5de31a204
|
app/models/attachment.rb
|
app/models/attachment.rb
|
require 'attachable'
class Attachment
include Mongoid::Document
include Mongoid::Timestamps
include Attachable
field :title
field :filename
attaches :file
def snippet
"[InlineAttachment:#{filename}]"
end
end
|
require 'attachable'
class Attachment
include Mongoid::Document
include Mongoid::Timestamps
include Attachable
field :title
field :filename
attaches :file
validates_with SafeHtml
def snippet
"[InlineAttachment:#{filename}]"
end
end
|
Validate Attachments with SafeHtml validator
|
Validate Attachments with SafeHtml validator
|
Ruby
|
mit
|
dinuksha/Test,alphagov/govuk_content_models,dinuksha/Test
|
ruby
|
## Code Before:
require 'attachable'
class Attachment
include Mongoid::Document
include Mongoid::Timestamps
include Attachable
field :title
field :filename
attaches :file
def snippet
"[InlineAttachment:#{filename}]"
end
end
## Instruction:
Validate Attachments with SafeHtml validator
## Code After:
require 'attachable'
class Attachment
include Mongoid::Document
include Mongoid::Timestamps
include Attachable
field :title
field :filename
attaches :file
validates_with SafeHtml
def snippet
"[InlineAttachment:#{filename}]"
end
end
|
60df4f04aab2f20959f7e74204d59db33133e220
|
compiler_source/6_actions_perl/template-runtime-code-for-action-get-count-of-words.txt
|
compiler_source/6_actions_perl/template-runtime-code-for-action-get-count-of-words.txt
|
template-runtime-code-for-every-action-begin
$global_number_of_operands = number-of-operands ; <new_line>
runtime-code-check-operand-initialize
check-operand-one-is-phrase-name
if ( $global_yes_or_no_operand_error == $global_no ) { <new_line>
$global_source_text = code-get-or-put-phrase-definition-begin runtime-code-for-operand-number-one code-get-or-put-phrase-definition-end ; <new_line>
&function__get_count_of_words( ) ; <new_line>
runtime-code-storage-item-result = $global_action_result ; <new_line>
} else { <new_line>
template-runtime-code-store-result-for-case-error
} <new_line>
template-runtime-code-for-every-action-end
|
template-runtime-code-for-every-action-begin
$global_number_of_operands = number-of-operands ; <new_line>
runtime-code-check-operand-initialize
check-operand-one-is-phrase-name
if ( $global_yes_or_no_operand_error == $global_no ) { <new_line>
$global_source_text = code-get-or-put-phrase-definition-begin runtime-code-for-operand-number-one code-get-or-put-phrase-definition-end ; <new_line>
&function__get_count_of_words_old_version( ) ; <new_line>
runtime-code-storage-item-result = $global_action_result ; <new_line>
} else { <new_line>
template-runtime-code-store-result-for-case-error
} <new_line>
template-runtime-code-for-every-action-end
|
Switch to "old version" of runtime function
|
Switch to "old version" of runtime function
|
Text
|
artistic-2.0
|
cpsolver/Dashrep-language,cpsolver/Dashrep-language,cpsolver/Dashrep-language
|
text
|
## Code Before:
template-runtime-code-for-every-action-begin
$global_number_of_operands = number-of-operands ; <new_line>
runtime-code-check-operand-initialize
check-operand-one-is-phrase-name
if ( $global_yes_or_no_operand_error == $global_no ) { <new_line>
$global_source_text = code-get-or-put-phrase-definition-begin runtime-code-for-operand-number-one code-get-or-put-phrase-definition-end ; <new_line>
&function__get_count_of_words( ) ; <new_line>
runtime-code-storage-item-result = $global_action_result ; <new_line>
} else { <new_line>
template-runtime-code-store-result-for-case-error
} <new_line>
template-runtime-code-for-every-action-end
## Instruction:
Switch to "old version" of runtime function
## Code After:
template-runtime-code-for-every-action-begin
$global_number_of_operands = number-of-operands ; <new_line>
runtime-code-check-operand-initialize
check-operand-one-is-phrase-name
if ( $global_yes_or_no_operand_error == $global_no ) { <new_line>
$global_source_text = code-get-or-put-phrase-definition-begin runtime-code-for-operand-number-one code-get-or-put-phrase-definition-end ; <new_line>
&function__get_count_of_words_old_version( ) ; <new_line>
runtime-code-storage-item-result = $global_action_result ; <new_line>
} else { <new_line>
template-runtime-code-store-result-for-case-error
} <new_line>
template-runtime-code-for-every-action-end
|
1987b85ee8a4d7c29fe2d334c8585339568a5b96
|
gremlin-console/src/main/resources/com/tinkerpop/gremlin/console/plugin/UtilitiesGremlinPluginScript.groovy
|
gremlin-console/src/main/resources/com/tinkerpop/gremlin/console/plugin/UtilitiesGremlinPluginScript.groovy
|
/**
* @author Daniel Kuppitz (http://thinkaurelius.com)
* @author Marko A. Rodriguez (http://markorodriguez.com)
*/
clock = { int loops = 100, Closure closure ->
closure.call() // warmup
(1..loops).collect {
t = System.nanoTime()
closure.call()
((System.nanoTime() - t) * 0.000001)
}.mean()
}
|
/**
* @author Daniel Kuppitz (http://thinkaurelius.com)
* @author Marko A. Rodriguez (http://markorodriguez.com)
* @author Stephen Mallette (http://stephen.genoprime.com)
*/
clock = { int loops = 100, Closure closure ->
closure.call() // warmup
(1..loops).collect {
t = System.nanoTime()
closure.call()
((System.nanoTime() - t) * 0.000001)
}.mean()
}
describeGraph = { Class<? extends com.tinkerpop.gremlin.structure.Graph> c ->
def lf = System.getProperty("line.separator")
def optIns = c.getAnnotationsByType(com.tinkerpop.gremlin.structure.Graph.OptIn)
def optOuts = c.getAnnotationsByType(com.tinkerpop.gremlin.structure.Graph.OptOut)
def optInCount = optIns != null ? optIns.size() : 0
def optOutCount = optOuts != null ? optOuts.size() : 0
def suitesSupported = optIns != null && optIns.size() > 0 ? optIns.collect{"> " + it.value()}.join(lf) : "> none"
def testsOptedOut = optOuts != null && optOuts.size() > 0 ? optOuts.collect{"> " + it.test() + "#" + it.method() + " [" + it.reason() + "]"}.join(lf) : "> none"
"""
IMPLEMENTATION - ${c.getCanonicalName()}
TINKERPOP TEST SUITE
- Compliant with ($optInCount of 4 suites)
$suitesSupported
- Opts out of $optOutCount individual tests
$testsOptedOut
$lf
- NOTE -
The describeGraph() function shows information about a Graph implementation.
It users information found in Java Annotations on the implementation itself to
determine this output and does not assess the actual code of the test cases of
the implementation itself. Compliant implementations will faithfully and
honestly supply these Annotations to provide the most accurate depiction of
their support."""
}
|
Add describeGraph utility function to plugin.
|
Add describeGraph utility function to plugin.
|
Groovy
|
apache-2.0
|
rmagen/incubator-tinkerpop,dalaro/incubator-tinkerpop,Lab41/tinkerpop3,velo/incubator-tinkerpop,n-tran/incubator-tinkerpop,BrynCooke/incubator-tinkerpop,RussellSpitzer/incubator-tinkerpop,gdelafosse/incubator-tinkerpop,artem-aliev/tinkerpop,jorgebay/tinkerpop,jorgebay/tinkerpop,apache/incubator-tinkerpop,krlohnes/tinkerpop,n-tran/incubator-tinkerpop,RedSeal-co/incubator-tinkerpop,apache/tinkerpop,gdelafosse/incubator-tinkerpop,newkek/incubator-tinkerpop,artem-aliev/tinkerpop,mpollmeier/tinkerpop3,rmagen/incubator-tinkerpop,pluradj/incubator-tinkerpop,dalaro/incubator-tinkerpop,pluradj/incubator-tinkerpop,PommeVerte/incubator-tinkerpop,apache/tinkerpop,apache/incubator-tinkerpop,apache/tinkerpop,apache/tinkerpop,robertdale/tinkerpop,RedSeal-co/incubator-tinkerpop,apache/tinkerpop,robertdale/tinkerpop,edgarRd/incubator-tinkerpop,jorgebay/tinkerpop,artem-aliev/tinkerpop,krlohnes/tinkerpop,RedSeal-co/incubator-tinkerpop,dalaro/incubator-tinkerpop,RussellSpitzer/incubator-tinkerpop,robertdale/tinkerpop,newkek/incubator-tinkerpop,BrynCooke/incubator-tinkerpop,gdelafosse/incubator-tinkerpop,vtslab/incubator-tinkerpop,apache/incubator-tinkerpop,PommeVerte/incubator-tinkerpop,artem-aliev/tinkerpop,edgarRd/incubator-tinkerpop,vtslab/incubator-tinkerpop,newkek/incubator-tinkerpop,velo/incubator-tinkerpop,samiunn/incubator-tinkerpop,apache/tinkerpop,RussellSpitzer/incubator-tinkerpop,pluradj/incubator-tinkerpop,mike-tr-adamson/incubator-tinkerpop,PommeVerte/incubator-tinkerpop,apache/tinkerpop,artem-aliev/tinkerpop,samiunn/incubator-tinkerpop,samiunn/incubator-tinkerpop,krlohnes/tinkerpop,BrynCooke/incubator-tinkerpop,mike-tr-adamson/incubator-tinkerpop,vtslab/incubator-tinkerpop,krlohnes/tinkerpop,robertdale/tinkerpop,velo/incubator-tinkerpop,robertdale/tinkerpop,rmagen/incubator-tinkerpop,krlohnes/tinkerpop,mike-tr-adamson/incubator-tinkerpop,Lab41/tinkerpop3,n-tran/incubator-tinkerpop,mpollmeier/tinkerpop3,jorgebay/tinkerpop,edgarRd/incubator-tinkerpop
|
groovy
|
## Code Before:
/**
* @author Daniel Kuppitz (http://thinkaurelius.com)
* @author Marko A. Rodriguez (http://markorodriguez.com)
*/
clock = { int loops = 100, Closure closure ->
closure.call() // warmup
(1..loops).collect {
t = System.nanoTime()
closure.call()
((System.nanoTime() - t) * 0.000001)
}.mean()
}
## Instruction:
Add describeGraph utility function to plugin.
## Code After:
/**
* @author Daniel Kuppitz (http://thinkaurelius.com)
* @author Marko A. Rodriguez (http://markorodriguez.com)
* @author Stephen Mallette (http://stephen.genoprime.com)
*/
clock = { int loops = 100, Closure closure ->
closure.call() // warmup
(1..loops).collect {
t = System.nanoTime()
closure.call()
((System.nanoTime() - t) * 0.000001)
}.mean()
}
describeGraph = { Class<? extends com.tinkerpop.gremlin.structure.Graph> c ->
def lf = System.getProperty("line.separator")
def optIns = c.getAnnotationsByType(com.tinkerpop.gremlin.structure.Graph.OptIn)
def optOuts = c.getAnnotationsByType(com.tinkerpop.gremlin.structure.Graph.OptOut)
def optInCount = optIns != null ? optIns.size() : 0
def optOutCount = optOuts != null ? optOuts.size() : 0
def suitesSupported = optIns != null && optIns.size() > 0 ? optIns.collect{"> " + it.value()}.join(lf) : "> none"
def testsOptedOut = optOuts != null && optOuts.size() > 0 ? optOuts.collect{"> " + it.test() + "#" + it.method() + " [" + it.reason() + "]"}.join(lf) : "> none"
"""
IMPLEMENTATION - ${c.getCanonicalName()}
TINKERPOP TEST SUITE
- Compliant with ($optInCount of 4 suites)
$suitesSupported
- Opts out of $optOutCount individual tests
$testsOptedOut
$lf
- NOTE -
The describeGraph() function shows information about a Graph implementation.
It users information found in Java Annotations on the implementation itself to
determine this output and does not assess the actual code of the test cases of
the implementation itself. Compliant implementations will faithfully and
honestly supply these Annotations to provide the most accurate depiction of
their support."""
}
|
bc9d6c12e4d8732facdb7198adc0359705612773
|
my-swot/eng.md
|
my-swot/eng.md
|
<permalink>eng</permalink>
<month>7</month>
<year>2021</year>
# My personal SWOT
in one of the assignatures by professor Daniel Holt (@DanielTHolt)
SWOT, or Strength, Weaknesses, Opportunities and Threats is a tool generally used by companies to asses themselves and their competitors.
A SWOT helps potential creditors take important investing decisions. But this analysis can be applied to multiple entities, not only companies, but also individuals. In this article I publish my personal SWOT, an analysis of my own strengths, weaknesses, opportunities and threats. After all each of us is also a company itself.
We are the CEO of our own lives!
> You are the CEO of your own life, you are the captain of your ship, you are the commander. - Extract from [Source](https://www.awakenthegreatnesswithin.com/)
| | Helpful | Harmful |
| --------------- | ------- | ------- |
| Internal origin | | Title |
| External origin | | |

<hidden>fast drafting</hidden>
<hidden>themes & ideas</hidden>
|
<permalink>eng</permalink>
<month>7</month>
<year>2021</year>
# My personal SWOT
I started the Innovation MBA about one month ago at the University Of Louisville in Kentucky. The innovation [MBA is the ranked #25 in the U.S. by the Princeton Review](https://business.louisville.edu/academics-programs/graduate-programs/imba/#:~:text=Ranked%20%2325%20in%20the%20U.S.,only%2030%20students%20per%20cohort.), admission to the program is competitive, with only 30 students per cohort.
---
SWOT, or Strength, Weaknesses, Opportunities and Threats is a tool generally used by companies to asses themselves and their competitors.
A SWOT helps potential creditors take important investing decisions. But this analysis can be applied to multiple entities, not only companies, but also individuals. In this article I publish my personal SWOT, an analysis of my own strengths, weaknesses, opportunities and threats. After all each of us is also a company itself.
We are the CEO of our own lives!
> You are the CEO of your own life, you are the captain of your ship, you are the commander. - From [this blog](https://www.awakenthegreatnesswithin.com/)
### SWOT
| | Helpful | Harmful |
| --------------- | ------- | ------- |
| Internal origin | | Title |
| External origin | | |
Further actions:
- Follow @DanielTHolt, @ATGWSuccess, or myself at @adelriosantiago.

<hidden>fast drafting</hidden>
<hidden>themes & ideas</hidden>
|
Add MBA info and add quote
|
Add MBA info and add quote
|
Markdown
|
mit
|
adelriosantiago/adelriosantiago-blog
|
markdown
|
## Code Before:
<permalink>eng</permalink>
<month>7</month>
<year>2021</year>
# My personal SWOT
in one of the assignatures by professor Daniel Holt (@DanielTHolt)
SWOT, or Strength, Weaknesses, Opportunities and Threats is a tool generally used by companies to asses themselves and their competitors.
A SWOT helps potential creditors take important investing decisions. But this analysis can be applied to multiple entities, not only companies, but also individuals. In this article I publish my personal SWOT, an analysis of my own strengths, weaknesses, opportunities and threats. After all each of us is also a company itself.
We are the CEO of our own lives!
> You are the CEO of your own life, you are the captain of your ship, you are the commander. - Extract from [Source](https://www.awakenthegreatnesswithin.com/)
| | Helpful | Harmful |
| --------------- | ------- | ------- |
| Internal origin | | Title |
| External origin | | |

<hidden>fast drafting</hidden>
<hidden>themes & ideas</hidden>
## Instruction:
Add MBA info and add quote
## Code After:
<permalink>eng</permalink>
<month>7</month>
<year>2021</year>
# My personal SWOT
I started the Innovation MBA about one month ago at the University Of Louisville in Kentucky. The innovation [MBA is the ranked #25 in the U.S. by the Princeton Review](https://business.louisville.edu/academics-programs/graduate-programs/imba/#:~:text=Ranked%20%2325%20in%20the%20U.S.,only%2030%20students%20per%20cohort.), admission to the program is competitive, with only 30 students per cohort.
---
SWOT, or Strength, Weaknesses, Opportunities and Threats is a tool generally used by companies to asses themselves and their competitors.
A SWOT helps potential creditors take important investing decisions. But this analysis can be applied to multiple entities, not only companies, but also individuals. In this article I publish my personal SWOT, an analysis of my own strengths, weaknesses, opportunities and threats. After all each of us is also a company itself.
We are the CEO of our own lives!
> You are the CEO of your own life, you are the captain of your ship, you are the commander. - From [this blog](https://www.awakenthegreatnesswithin.com/)
### SWOT
| | Helpful | Harmful |
| --------------- | ------- | ------- |
| Internal origin | | Title |
| External origin | | |
Further actions:
- Follow @DanielTHolt, @ATGWSuccess, or myself at @adelriosantiago.

<hidden>fast drafting</hidden>
<hidden>themes & ideas</hidden>
|
64d8427a7b970ec3243179044870ddcf23dc2f09
|
README.md
|
README.md
|
<img src="https://github.com/Larpon/QtFirebase/blob/master/logo.png" align="right"/>
# QtFirebaseExample
Example Qt app for the QtFirebase project
# Quick start
1. Clone the example app and the [QtFirebase](https://github.com/Larpon/QtFirebase) project
* **Clone example project**
```
cd /path/to/projects
git clone [email protected]:Larpon/QtFirebaseExample.git
```
* **Clone the QtFirebase project**
Clone into the "extensions" folder or into other folder of your choice
```
cd /path/to/projects/QtFirebaseExample/extensions
git clone [email protected]:Larpon/QtFirebase.git
```
2. Follow the instructions in [SETUP.md](https://github.com/Larpon/QtFirebase/blob/master/SETUP.md) on how to setup QtFirebase
|
<img src="https://github.com/Larpon/QtFirebase/blob/master/docs/img/logo.png" align="right"/>
# QtFirebaseExample
Example Qt app for the QtFirebase project
# Quick start
1. Clone the example app and the [QtFirebase](https://github.com/Larpon/QtFirebase) project
* **Clone example project**
```
cd /path/to/projects
git clone [email protected]:Larpon/QtFirebaseExample.git
```
* **Clone the QtFirebase project**
Clone into the "extensions" folder or into other folder of your choice
```
cd /path/to/projects/QtFirebaseExample/extensions
git clone [email protected]:Larpon/QtFirebase.git
```
2. Follow the instructions in [SETUP.md](https://github.com/Larpon/QtFirebase/blob/master/docs/SETUP.md) on how to setup QtFirebase
|
Update references to moved files in QtFirebase project
|
Update references to moved files in QtFirebase project
|
Markdown
|
mit
|
Larpon/QtFirebaseExample,Larpon/QtFirebaseExample
|
markdown
|
## Code Before:
<img src="https://github.com/Larpon/QtFirebase/blob/master/logo.png" align="right"/>
# QtFirebaseExample
Example Qt app for the QtFirebase project
# Quick start
1. Clone the example app and the [QtFirebase](https://github.com/Larpon/QtFirebase) project
* **Clone example project**
```
cd /path/to/projects
git clone [email protected]:Larpon/QtFirebaseExample.git
```
* **Clone the QtFirebase project**
Clone into the "extensions" folder or into other folder of your choice
```
cd /path/to/projects/QtFirebaseExample/extensions
git clone [email protected]:Larpon/QtFirebase.git
```
2. Follow the instructions in [SETUP.md](https://github.com/Larpon/QtFirebase/blob/master/SETUP.md) on how to setup QtFirebase
## Instruction:
Update references to moved files in QtFirebase project
## Code After:
<img src="https://github.com/Larpon/QtFirebase/blob/master/docs/img/logo.png" align="right"/>
# QtFirebaseExample
Example Qt app for the QtFirebase project
# Quick start
1. Clone the example app and the [QtFirebase](https://github.com/Larpon/QtFirebase) project
* **Clone example project**
```
cd /path/to/projects
git clone [email protected]:Larpon/QtFirebaseExample.git
```
* **Clone the QtFirebase project**
Clone into the "extensions" folder or into other folder of your choice
```
cd /path/to/projects/QtFirebaseExample/extensions
git clone [email protected]:Larpon/QtFirebase.git
```
2. Follow the instructions in [SETUP.md](https://github.com/Larpon/QtFirebase/blob/master/docs/SETUP.md) on how to setup QtFirebase
|
0714442631fe28747a0a3de606cf1d290e4bc305
|
spec.hs
|
spec.hs
|
-- | Unit test for PassphraseGenerator functions.
import Test.Hspec
import qualified Data.Map.Strict as Map
import PassphraseGenerator
filetext :: String
filetext = "aaa\t1900\t2\naaa\t1950\t3\nbbb\t1950\t5\nbbb_VERB\t1980\t9"
y :: Int
y = 1950
main :: IO()
main = hspec $ do
describe "countDescending" $ do
it "returns LT when the first tuple should come before the second tuple" $
(countDescending ("aaa", 9) ("bbb", 6)) `shouldBe` LT
it "returns GT when first tuple comes after second tuple" $
(countDescending ("aaa", 3) ("bbb", 6)) `shouldBe` GT
it "returns EQ when no order can be determined between tuples" $
(countDescending ("aaa", 6) ("bbb", 6)) `shouldBe` EQ
describe "wordCounts" $ do
it "aaa should occur 3 times" $ do
(Map.lookup "aaa" (wordCounts y (lines filetext) Map.empty)) `shouldBe` (Just 3)
it "bbb should occur 14 times" $ do
(Map.lookup "bbb" (wordCounts y (lines filetext) Map.empty)) `shouldBe` (Just 14)
|
-- | Unit test for PassphraseGenerator functions.
import Test.Hspec
import qualified Data.Map.Strict as Map
import PassphraseGenerator
filetext :: String
filetext = "aaa\t1900\t2\naaa\t1950\t3\nAAA\t1951\t7\nbbb\t1950\t5\nbbb_VERB\t1980\t9"
y :: Int
y = 1950
main :: IO()
main = hspec $ do
describe "countDescending" $ do
it "returns LT when the first tuple should come before the second tuple" $
(countDescending ("aaa", 9) ("bbb", 6)) `shouldBe` LT
it "returns GT when first tuple comes after second tuple" $
(countDescending ("aaa", 3) ("bbb", 6)) `shouldBe` GT
it "returns EQ when no order can be determined between tuples" $
(countDescending ("aaa", 6) ("bbb", 6)) `shouldBe` EQ
describe "wordCounts" $ do
it "aaa should occur 10 times" $ do
(Map.lookup "aaa" (wordCounts y (lines filetext) Map.empty)) `shouldBe` (Just 10)
it "bbb should occur 14 times" $ do
(Map.lookup "bbb" (wordCounts y (lines filetext) Map.empty)) `shouldBe` (Just 14)
|
Test recently-added case-folding behavior of mostFrequent.hs
|
Test recently-added case-folding behavior of mostFrequent.hs
|
Haskell
|
mit
|
JohnL4/PassphraseGenerator
|
haskell
|
## Code Before:
-- | Unit test for PassphraseGenerator functions.
import Test.Hspec
import qualified Data.Map.Strict as Map
import PassphraseGenerator
filetext :: String
filetext = "aaa\t1900\t2\naaa\t1950\t3\nbbb\t1950\t5\nbbb_VERB\t1980\t9"
y :: Int
y = 1950
main :: IO()
main = hspec $ do
describe "countDescending" $ do
it "returns LT when the first tuple should come before the second tuple" $
(countDescending ("aaa", 9) ("bbb", 6)) `shouldBe` LT
it "returns GT when first tuple comes after second tuple" $
(countDescending ("aaa", 3) ("bbb", 6)) `shouldBe` GT
it "returns EQ when no order can be determined between tuples" $
(countDescending ("aaa", 6) ("bbb", 6)) `shouldBe` EQ
describe "wordCounts" $ do
it "aaa should occur 3 times" $ do
(Map.lookup "aaa" (wordCounts y (lines filetext) Map.empty)) `shouldBe` (Just 3)
it "bbb should occur 14 times" $ do
(Map.lookup "bbb" (wordCounts y (lines filetext) Map.empty)) `shouldBe` (Just 14)
## Instruction:
Test recently-added case-folding behavior of mostFrequent.hs
## Code After:
-- | Unit test for PassphraseGenerator functions.
import Test.Hspec
import qualified Data.Map.Strict as Map
import PassphraseGenerator
filetext :: String
filetext = "aaa\t1900\t2\naaa\t1950\t3\nAAA\t1951\t7\nbbb\t1950\t5\nbbb_VERB\t1980\t9"
y :: Int
y = 1950
main :: IO()
main = hspec $ do
describe "countDescending" $ do
it "returns LT when the first tuple should come before the second tuple" $
(countDescending ("aaa", 9) ("bbb", 6)) `shouldBe` LT
it "returns GT when first tuple comes after second tuple" $
(countDescending ("aaa", 3) ("bbb", 6)) `shouldBe` GT
it "returns EQ when no order can be determined between tuples" $
(countDescending ("aaa", 6) ("bbb", 6)) `shouldBe` EQ
describe "wordCounts" $ do
it "aaa should occur 10 times" $ do
(Map.lookup "aaa" (wordCounts y (lines filetext) Map.empty)) `shouldBe` (Just 10)
it "bbb should occur 14 times" $ do
(Map.lookup "bbb" (wordCounts y (lines filetext) Map.empty)) `shouldBe` (Just 14)
|
0d6a8f3978188f3e343c364806e0bb6e6ac1e643
|
tests/qtcore/qmetaobject_test.py
|
tests/qtcore/qmetaobject_test.py
|
'''Tests for static methos conflicts with class methods'''
import unittest
from PySide.QtCore import *
class Foo(QFile):
pass
class qmetaobject_test(unittest.TestCase):
def test_QMetaObject(self):
qobj = QObject()
qobj_metaobj = qobj.metaObject()
self.assertEqual(qobj_metaobj.className(), "QObject")
obj = QFile()
m = obj.metaObject()
self.assertEqual(m.className(), "QFile")
self.assertNotEqual(m.methodCount(), qobj_metaobj.methodCount())
obj = Foo()
m = obj.metaObject()
self.assertEqual(m.className(), "Foo")
self.assertEqual(m.methodCount(), QFile().metaObject().methodCount())
if __name__ == '__main__':
unittest.main()
|
'''Tests for static methos conflicts with class methods'''
import unittest
from PySide.QtCore import *
class Foo(QFile):
pass
class qmetaobject_test(unittest.TestCase):
def test_QMetaObject(self):
qobj = QObject()
qobj_metaobj = qobj.metaObject()
self.assertEqual(qobj_metaobj.className(), "QObject")
obj = QFile()
m = obj.metaObject()
self.assertEqual(m.className(), "QFile")
self.assertNotEqual(m.methodCount(), qobj_metaobj.methodCount())
obj = Foo()
m = obj.metaObject()
self.assertEqual(m.className(), "Foo")
f = QFile()
fm = f.metaObject()
self.assertEqual(m.methodCount(), fm.methodCount())
if __name__ == '__main__':
unittest.main()
|
Fix qmentaobject test to work with dynamic metaobject.
|
Fix qmentaobject test to work with dynamic metaobject.
|
Python
|
lgpl-2.1
|
M4rtinK/pyside-android,pankajp/pyside,enthought/pyside,PySide/PySide,qtproject/pyside-pyside,PySide/PySide,gbaty/pyside2,enthought/pyside,RobinD42/pyside,enthought/pyside,PySide/PySide,BadSingleton/pyside2,M4rtinK/pyside-android,BadSingleton/pyside2,qtproject/pyside-pyside,BadSingleton/pyside2,RobinD42/pyside,pankajp/pyside,gbaty/pyside2,pankajp/pyside,RobinD42/pyside,M4rtinK/pyside-bb10,qtproject/pyside-pyside,enthought/pyside,M4rtinK/pyside-bb10,enthought/pyside,gbaty/pyside2,qtproject/pyside-pyside,M4rtinK/pyside-bb10,M4rtinK/pyside-android,BadSingleton/pyside2,RobinD42/pyside,qtproject/pyside-pyside,IronManMark20/pyside2,M4rtinK/pyside-bb10,IronManMark20/pyside2,BadSingleton/pyside2,M4rtinK/pyside-bb10,pankajp/pyside,IronManMark20/pyside2,RobinD42/pyside,IronManMark20/pyside2,RobinD42/pyside,PySide/PySide,IronManMark20/pyside2,gbaty/pyside2,pankajp/pyside,enthought/pyside,RobinD42/pyside,PySide/PySide,M4rtinK/pyside-android,M4rtinK/pyside-bb10,M4rtinK/pyside-android,gbaty/pyside2,enthought/pyside,M4rtinK/pyside-android
|
python
|
## Code Before:
'''Tests for static methos conflicts with class methods'''
import unittest
from PySide.QtCore import *
class Foo(QFile):
pass
class qmetaobject_test(unittest.TestCase):
def test_QMetaObject(self):
qobj = QObject()
qobj_metaobj = qobj.metaObject()
self.assertEqual(qobj_metaobj.className(), "QObject")
obj = QFile()
m = obj.metaObject()
self.assertEqual(m.className(), "QFile")
self.assertNotEqual(m.methodCount(), qobj_metaobj.methodCount())
obj = Foo()
m = obj.metaObject()
self.assertEqual(m.className(), "Foo")
self.assertEqual(m.methodCount(), QFile().metaObject().methodCount())
if __name__ == '__main__':
unittest.main()
## Instruction:
Fix qmentaobject test to work with dynamic metaobject.
## Code After:
'''Tests for static methos conflicts with class methods'''
import unittest
from PySide.QtCore import *
class Foo(QFile):
pass
class qmetaobject_test(unittest.TestCase):
def test_QMetaObject(self):
qobj = QObject()
qobj_metaobj = qobj.metaObject()
self.assertEqual(qobj_metaobj.className(), "QObject")
obj = QFile()
m = obj.metaObject()
self.assertEqual(m.className(), "QFile")
self.assertNotEqual(m.methodCount(), qobj_metaobj.methodCount())
obj = Foo()
m = obj.metaObject()
self.assertEqual(m.className(), "Foo")
f = QFile()
fm = f.metaObject()
self.assertEqual(m.methodCount(), fm.methodCount())
if __name__ == '__main__':
unittest.main()
|
770206d1ace546b13a3fc4bac949486f860e4197
|
lib/node/request.js
|
lib/node/request.js
|
'use strict';
var request = require('request'), undef;
module.exports = function(options) {
// Prepare options
options.method = options.method.toUpperCase();
options.uri = options.uri.toString();
// Do we have any callback, or is this a fire-and-forgot request?
var hasCallback = !!options.onComplete;
// Run the request
return request(options, hasCallback ? function(err, res, body) {
if (err) {
return options.onComplete(err, res, body);
} else if (res.statusCode >= 400) {
return options.onComplete(res.statusCode, res, body);
}
return options.onComplete(err, res, body);
} : undef);
};
|
'use strict';
var request = require('request');
module.exports = function(options) {
// Prepare options
options.method = options.method.toUpperCase();
options.uri = options.uri.toString();
// Do we have any callback, or is this a fire-and-forgot request?
var hasCallback = !!options.onComplete;
// Run the request
return request(options, hasCallback ? function(err, res, body) {
if (err) {
return options.onComplete(err, res, body);
} else if (res.statusCode >= 400) {
return options.onComplete(res.statusCode, res, body);
}
return options.onComplete(err, res, body);
} : undefined);
};
|
Use 'undefined' instead of setting an undefined variable
|
Use 'undefined' instead of setting an undefined variable
|
JavaScript
|
mit
|
rexxars/imboclient-js,imbo/imboclient-js,rexxars/imboclient-js,rexxars/imboclient-js,imbo/imboclient-js,imbo/imboclient-js
|
javascript
|
## Code Before:
'use strict';
var request = require('request'), undef;
module.exports = function(options) {
// Prepare options
options.method = options.method.toUpperCase();
options.uri = options.uri.toString();
// Do we have any callback, or is this a fire-and-forgot request?
var hasCallback = !!options.onComplete;
// Run the request
return request(options, hasCallback ? function(err, res, body) {
if (err) {
return options.onComplete(err, res, body);
} else if (res.statusCode >= 400) {
return options.onComplete(res.statusCode, res, body);
}
return options.onComplete(err, res, body);
} : undef);
};
## Instruction:
Use 'undefined' instead of setting an undefined variable
## Code After:
'use strict';
var request = require('request');
module.exports = function(options) {
// Prepare options
options.method = options.method.toUpperCase();
options.uri = options.uri.toString();
// Do we have any callback, or is this a fire-and-forgot request?
var hasCallback = !!options.onComplete;
// Run the request
return request(options, hasCallback ? function(err, res, body) {
if (err) {
return options.onComplete(err, res, body);
} else if (res.statusCode >= 400) {
return options.onComplete(res.statusCode, res, body);
}
return options.onComplete(err, res, body);
} : undefined);
};
|
affaf334028599fe2db06de986976d1dca8f0e8a
|
app/assets/javascripts/user_form.js
|
app/assets/javascripts/user_form.js
|
$(document).ready(function() {
const $form = $('.edit_user');
$form.on('submit', function(event) {
let url = $form.attr('action');
let method = $form.attr('method');
let data = $form.serialize();
$.ajax({
url: url,
method: method,
data: data,
dataType: "json"
})
.success(function(response) {
console.log(response);
})
.fail(function(error) {
console.log(error);
})
});
$("#profile").on("click", ".edit-profile", function() {
const $modal = $('#userEditModal')[0];
console.log($modal);
$modal.style.display = "block";
$('#closeEditModal').on("click", function() {
$modal.style.display = "none";
});
});
});
|
$(document).ready(function() {
$("#profile").on('submit', '.edit-user', function(event) {
const $form = $('.edit-user');
let url = $form.attr('action');
let method = $form.attr('method');
let data = $form.serialize();
$.ajax({
url: url,
method: method,
data: data,
dataType: "json"
})
.success(function(response) {
console.log(response);
})
.fail(function(error) {
console.log(error);
})
});
$("#profile").on("click", ".edit-profile", function() {
const $modal = $('#userEditModal')[0];
console.log($modal);
$modal.style.display = "block";
$('#closeEditModal').on("click", function() {
$modal.style.display = "none";
});
});
});
|
Fix ajax for user update modal
|
Fix ajax for user update modal
|
JavaScript
|
mit
|
AliasHendrickson/progress,AliasHendrickson/progress,AliasHendrickson/progress
|
javascript
|
## Code Before:
$(document).ready(function() {
const $form = $('.edit_user');
$form.on('submit', function(event) {
let url = $form.attr('action');
let method = $form.attr('method');
let data = $form.serialize();
$.ajax({
url: url,
method: method,
data: data,
dataType: "json"
})
.success(function(response) {
console.log(response);
})
.fail(function(error) {
console.log(error);
})
});
$("#profile").on("click", ".edit-profile", function() {
const $modal = $('#userEditModal')[0];
console.log($modal);
$modal.style.display = "block";
$('#closeEditModal').on("click", function() {
$modal.style.display = "none";
});
});
});
## Instruction:
Fix ajax for user update modal
## Code After:
$(document).ready(function() {
$("#profile").on('submit', '.edit-user', function(event) {
const $form = $('.edit-user');
let url = $form.attr('action');
let method = $form.attr('method');
let data = $form.serialize();
$.ajax({
url: url,
method: method,
data: data,
dataType: "json"
})
.success(function(response) {
console.log(response);
})
.fail(function(error) {
console.log(error);
})
});
$("#profile").on("click", ".edit-profile", function() {
const $modal = $('#userEditModal')[0];
console.log($modal);
$modal.style.display = "block";
$('#closeEditModal').on("click", function() {
$modal.style.display = "none";
});
});
});
|
f9465e5c7c3f4c77e3eabbee88e76585a22cc0eb
|
roles/common/tasks/setup.yml
|
roles/common/tasks/setup.yml
|
---
- name: create user(s)
action: user name=ansible
- name: setup authorized key(s)
action: authorized_key user=ansible key='$FILE(roles/common/files/id_rsa.pub)'
- name: create admin group
action: group name=admin
- name: write the admin sudoers file
action: template src=templates/sudoers/admin dest=/etc/sudoers.d/admin
- name: write the ansible sudoers file
action: template src=templates/sudoers/ansible dest=/etc/sudoers.d/ansible
|
---
- name: create user(s)
action: user name=ansible
- name: setup authorized key(s)
action: authorized_key user=ansible key='$FILE(roles/common/files/id_rsa.pub)'
- name: create admin group
action: group name=admin
- name: write the admin sudoers file
action: template src=templates/sudoers/admin dest=/etc/sudoers.d/admin owner=root group=root mode=0440
- name: write the ansible sudoers file
action: template src=templates/sudoers/ansible dest=/etc/sudoers.d/ansible owner=root group=root mode=0440
|
Extend the case for sudoesw
|
Extend the case for sudoesw
|
YAML
|
mit
|
BrianAker/server-install,BrianAker/server-install
|
yaml
|
## Code Before:
---
- name: create user(s)
action: user name=ansible
- name: setup authorized key(s)
action: authorized_key user=ansible key='$FILE(roles/common/files/id_rsa.pub)'
- name: create admin group
action: group name=admin
- name: write the admin sudoers file
action: template src=templates/sudoers/admin dest=/etc/sudoers.d/admin
- name: write the ansible sudoers file
action: template src=templates/sudoers/ansible dest=/etc/sudoers.d/ansible
## Instruction:
Extend the case for sudoesw
## Code After:
---
- name: create user(s)
action: user name=ansible
- name: setup authorized key(s)
action: authorized_key user=ansible key='$FILE(roles/common/files/id_rsa.pub)'
- name: create admin group
action: group name=admin
- name: write the admin sudoers file
action: template src=templates/sudoers/admin dest=/etc/sudoers.d/admin owner=root group=root mode=0440
- name: write the ansible sudoers file
action: template src=templates/sudoers/ansible dest=/etc/sudoers.d/ansible owner=root group=root mode=0440
|
98ca748996fe462cedf284ad91a74bdd30eb81f3
|
mopidy/__init__.py
|
mopidy/__init__.py
|
from __future__ import absolute_import, unicode_literals
import platform
import sys
import textwrap
import warnings
if not (2, 7) <= sys.version_info < (3,):
sys.exit(
'ERROR: Mopidy requires Python 2.7, but found %s.' %
platform.python_version())
try:
import gobject # noqa
except ImportError:
print(textwrap.dedent("""
ERROR: The gobject Python package was not found.
Mopidy requires GStreamer (and GObject) to work. These are C libraries
with a number of dependencies themselves, and cannot be installed with
the regular Python tools like pip.
Please see http://docs.mopidy.com/en/latest/installation/ for
instructions on how to install the required dependencies.
"""))
raise
warnings.filterwarnings('ignore', 'could not open display')
__version__ = '0.19.4'
|
from __future__ import absolute_import, print_function, unicode_literals
import platform
import sys
import textwrap
import warnings
if not (2, 7) <= sys.version_info < (3,):
sys.exit(
'ERROR: Mopidy requires Python 2.7, but found %s.' %
platform.python_version())
try:
import gobject # noqa
except ImportError:
print(textwrap.dedent("""
ERROR: The gobject Python package was not found.
Mopidy requires GStreamer (and GObject) to work. These are C libraries
with a number of dependencies themselves, and cannot be installed with
the regular Python tools like pip.
Please see http://docs.mopidy.com/en/latest/installation/ for
instructions on how to install the required dependencies.
"""))
raise
warnings.filterwarnings('ignore', 'could not open display')
__version__ = '0.19.4'
|
Use print function instead of print statement
|
py3: Use print function instead of print statement
|
Python
|
apache-2.0
|
jcass77/mopidy,ZenithDK/mopidy,SuperStarPL/mopidy,vrs01/mopidy,jcass77/mopidy,diandiankan/mopidy,dbrgn/mopidy,SuperStarPL/mopidy,mokieyue/mopidy,rawdlite/mopidy,jcass77/mopidy,jmarsik/mopidy,mopidy/mopidy,bencevans/mopidy,mopidy/mopidy,diandiankan/mopidy,jmarsik/mopidy,vrs01/mopidy,mokieyue/mopidy,kingosticks/mopidy,SuperStarPL/mopidy,ali/mopidy,ali/mopidy,tkem/mopidy,hkariti/mopidy,glogiotatidis/mopidy,quartz55/mopidy,kingosticks/mopidy,rawdlite/mopidy,bencevans/mopidy,quartz55/mopidy,bencevans/mopidy,swak/mopidy,rawdlite/mopidy,dbrgn/mopidy,bacontext/mopidy,jodal/mopidy,ZenithDK/mopidy,diandiankan/mopidy,priestd09/mopidy,hkariti/mopidy,kingosticks/mopidy,adamcik/mopidy,jodal/mopidy,pacificIT/mopidy,quartz55/mopidy,mopidy/mopidy,swak/mopidy,priestd09/mopidy,ali/mopidy,pacificIT/mopidy,adamcik/mopidy,glogiotatidis/mopidy,mokieyue/mopidy,bacontext/mopidy,pacificIT/mopidy,pacificIT/mopidy,bacontext/mopidy,tkem/mopidy,hkariti/mopidy,swak/mopidy,mokieyue/mopidy,ZenithDK/mopidy,bacontext/mopidy,rawdlite/mopidy,ZenithDK/mopidy,glogiotatidis/mopidy,dbrgn/mopidy,jmarsik/mopidy,swak/mopidy,diandiankan/mopidy,priestd09/mopidy,SuperStarPL/mopidy,vrs01/mopidy,quartz55/mopidy,adamcik/mopidy,glogiotatidis/mopidy,jodal/mopidy,tkem/mopidy,jmarsik/mopidy,dbrgn/mopidy,hkariti/mopidy,vrs01/mopidy,bencevans/mopidy,tkem/mopidy,ali/mopidy
|
python
|
## Code Before:
from __future__ import absolute_import, unicode_literals
import platform
import sys
import textwrap
import warnings
if not (2, 7) <= sys.version_info < (3,):
sys.exit(
'ERROR: Mopidy requires Python 2.7, but found %s.' %
platform.python_version())
try:
import gobject # noqa
except ImportError:
print(textwrap.dedent("""
ERROR: The gobject Python package was not found.
Mopidy requires GStreamer (and GObject) to work. These are C libraries
with a number of dependencies themselves, and cannot be installed with
the regular Python tools like pip.
Please see http://docs.mopidy.com/en/latest/installation/ for
instructions on how to install the required dependencies.
"""))
raise
warnings.filterwarnings('ignore', 'could not open display')
__version__ = '0.19.4'
## Instruction:
py3: Use print function instead of print statement
## Code After:
from __future__ import absolute_import, print_function, unicode_literals
import platform
import sys
import textwrap
import warnings
if not (2, 7) <= sys.version_info < (3,):
sys.exit(
'ERROR: Mopidy requires Python 2.7, but found %s.' %
platform.python_version())
try:
import gobject # noqa
except ImportError:
print(textwrap.dedent("""
ERROR: The gobject Python package was not found.
Mopidy requires GStreamer (and GObject) to work. These are C libraries
with a number of dependencies themselves, and cannot be installed with
the regular Python tools like pip.
Please see http://docs.mopidy.com/en/latest/installation/ for
instructions on how to install the required dependencies.
"""))
raise
warnings.filterwarnings('ignore', 'could not open display')
__version__ = '0.19.4'
|
319291236024d61bba36ccf648ca72c8e3ab0b52
|
Doc/texinputs/boilerplate.tex
|
Doc/texinputs/boilerplate.tex
|
\author{Guido van Rossum\\
Fred L. Drake, Jr., editor}
\authoraddress{
BeOpen PythonLabs\\
E-mail: \email{[email protected]}
}
\date{September 26, 2000} % XXX update before release!
\release{2.0b2} % software release, not documentation
\setshortversion{2.0} % major.minor only for Python
|
\author{Guido van Rossum\\
Fred L. Drake, Jr., editor}
\authoraddress{
BeOpen PythonLabs\\
E-mail: \email{[email protected]}
}
\date{October 9, 2000} % XXX update before release!
\release{2.0c1} % software release, not documentation
\setshortversion{2.0} % major.minor only for software
|
Update for next planned release.
|
Update for next planned release.
|
TeX
|
mit
|
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
|
tex
|
## Code Before:
\author{Guido van Rossum\\
Fred L. Drake, Jr., editor}
\authoraddress{
BeOpen PythonLabs\\
E-mail: \email{[email protected]}
}
\date{September 26, 2000} % XXX update before release!
\release{2.0b2} % software release, not documentation
\setshortversion{2.0} % major.minor only for Python
## Instruction:
Update for next planned release.
## Code After:
\author{Guido van Rossum\\
Fred L. Drake, Jr., editor}
\authoraddress{
BeOpen PythonLabs\\
E-mail: \email{[email protected]}
}
\date{October 9, 2000} % XXX update before release!
\release{2.0c1} % software release, not documentation
\setshortversion{2.0} % major.minor only for software
|
248b9dca6782da9f2703031f672f88fb3c484a3a
|
README.md
|
README.md
|
Generates lightcurve JSON files and Ouroboros manifest for Planet Hunters.
Example usage:
```
docker run -it -v /data/:/data/ zooniverse/planet-hunters-importer ./generate.py /data/metadata.csv
docker run -it -v /data/:/data/ zooniverse/planet-hunters-importer ./generate-manifest.py
```
|
Generates lightcurve JSON files and Ouroboros manifest for Planet Hunters.
Example usage:
```
docker run -it --rm -v /data/:/data/ zooniverse/planet-hunters-importer ./generate.py /data/metadata.csv
docker run -it --rm -v /data/:/data/ zooniverse/planet-hunters-importer ./generate-manifest.py
```
|
Add --rm to example usage
|
Add --rm to example usage
|
Markdown
|
apache-2.0
|
zooniverse/planet-hunters-importer,zooniverse/planet-hunters-importer,zooniverse/planet-hunters-importer
|
markdown
|
## Code Before:
Generates lightcurve JSON files and Ouroboros manifest for Planet Hunters.
Example usage:
```
docker run -it -v /data/:/data/ zooniverse/planet-hunters-importer ./generate.py /data/metadata.csv
docker run -it -v /data/:/data/ zooniverse/planet-hunters-importer ./generate-manifest.py
```
## Instruction:
Add --rm to example usage
## Code After:
Generates lightcurve JSON files and Ouroboros manifest for Planet Hunters.
Example usage:
```
docker run -it --rm -v /data/:/data/ zooniverse/planet-hunters-importer ./generate.py /data/metadata.csv
docker run -it --rm -v /data/:/data/ zooniverse/planet-hunters-importer ./generate-manifest.py
```
|
e04af44c29e7f83965a1a1a266e60d9b5820c9e1
|
Search/Searchable.swift
|
Search/Searchable.swift
|
//
// Searchable.swift
// Search
//
// Created by David Haynes on 16/03/2016.
// Copyright © 2016 Ordnance Survey. All rights reserved.
//
import Fetch
import OSTransformation
/**
* Searchable protocol
*/
public protocol Searchable {
/**
Find the text specified
- parameter query: The text to find
- parameter completion: Completion block to call
*/
func find(query: String, completion: (Result<SearchResponse> -> Void))
/**
Find the nearest address to the provided grid point. Will find a result
within 100 metres.
- parameter location: The location to query
- parameter completion: Completion block to call
*/
func nearest(location: OSGridPoint, completion: (Result<SearchResponse> -> Void))
}
|
//
// Searchable.swift
// Search
//
// Created by David Haynes on 16/03/2016.
// Copyright © 2016 Ordnance Survey. All rights reserved.
//
import Fetch
import OSTransformation
/**
* Searchable protocol
*/
public protocol Searchable {
/**
Find the text specified
- parameter query: The text to find
- parameter completion: Completion block to call
*/
func find(query: String, completion: (Result<SearchResponse> -> Void))
/**
Find the nearest address to the provided grid point. Will find a result
within 100 metres.
- parameter location: The location to query
- parameter completion: Completion block to call
*/
func nearest(location: OSGridPoint, completion: (Result<SearchResponse> -> Void))
}
/**
* Extension to the searchable protocol that allows for a find query that
* prioritises results within the specified bounding box
*/
public protocol BoundingBoxSearchable: Searchable {
/**
Find the specified text, prioritising results within the bounding box
- parameter query: The text to find
- parameter boundingBox: The bounding box to search within
- parameter completion: Completion block to call
*/
func find(query: String, boundingBox: OSBoundingBox, completion: (Result<SearchResponse> -> Void))
}
|
Add bounding box search method to protocol
|
Add bounding box search method to protocol
|
Swift
|
apache-2.0
|
OrdnanceSurvey/search-swift,OrdnanceSurvey/search-swift
|
swift
|
## Code Before:
//
// Searchable.swift
// Search
//
// Created by David Haynes on 16/03/2016.
// Copyright © 2016 Ordnance Survey. All rights reserved.
//
import Fetch
import OSTransformation
/**
* Searchable protocol
*/
public protocol Searchable {
/**
Find the text specified
- parameter query: The text to find
- parameter completion: Completion block to call
*/
func find(query: String, completion: (Result<SearchResponse> -> Void))
/**
Find the nearest address to the provided grid point. Will find a result
within 100 metres.
- parameter location: The location to query
- parameter completion: Completion block to call
*/
func nearest(location: OSGridPoint, completion: (Result<SearchResponse> -> Void))
}
## Instruction:
Add bounding box search method to protocol
## Code After:
//
// Searchable.swift
// Search
//
// Created by David Haynes on 16/03/2016.
// Copyright © 2016 Ordnance Survey. All rights reserved.
//
import Fetch
import OSTransformation
/**
* Searchable protocol
*/
public protocol Searchable {
/**
Find the text specified
- parameter query: The text to find
- parameter completion: Completion block to call
*/
func find(query: String, completion: (Result<SearchResponse> -> Void))
/**
Find the nearest address to the provided grid point. Will find a result
within 100 metres.
- parameter location: The location to query
- parameter completion: Completion block to call
*/
func nearest(location: OSGridPoint, completion: (Result<SearchResponse> -> Void))
}
/**
* Extension to the searchable protocol that allows for a find query that
* prioritises results within the specified bounding box
*/
public protocol BoundingBoxSearchable: Searchable {
/**
Find the specified text, prioritising results within the bounding box
- parameter query: The text to find
- parameter boundingBox: The bounding box to search within
- parameter completion: Completion block to call
*/
func find(query: String, boundingBox: OSBoundingBox, completion: (Result<SearchResponse> -> Void))
}
|
3abba3bd7a6cd1e1e27fe5603cd3351b563037ce
|
config/sidekiq.yml
|
config/sidekiq.yml
|
---
:logfile: ./log/sidekiq.log
:queues:
- [mailers, 3]
- [default, 2]
- [claims, 2]
- [stats_reports, 1]
- [convert_document, 1]
development:
:verbose: true
:concurrency: 5
production:
:verbose: true
:concurrency: 10
|
---
:logfile: ./log/sidekiq.log
:queues:
- [convert_document, 3]
- [mailers, 3]
- [default, 2]
- [claims, 2]
- [stats_reports, 1]
development:
:verbose: true
:concurrency: 5
production:
:verbose: true
:concurrency: 10
|
Increase priority of convert document job
|
Increase priority of convert document job
|
YAML
|
mit
|
ministryofjustice/advocate-defence-payments,ministryofjustice/advocate-defence-payments,ministryofjustice/advocate-defence-payments,ministryofjustice/advocate-defence-payments
|
yaml
|
## Code Before:
---
:logfile: ./log/sidekiq.log
:queues:
- [mailers, 3]
- [default, 2]
- [claims, 2]
- [stats_reports, 1]
- [convert_document, 1]
development:
:verbose: true
:concurrency: 5
production:
:verbose: true
:concurrency: 10
## Instruction:
Increase priority of convert document job
## Code After:
---
:logfile: ./log/sidekiq.log
:queues:
- [convert_document, 3]
- [mailers, 3]
- [default, 2]
- [claims, 2]
- [stats_reports, 1]
development:
:verbose: true
:concurrency: 5
production:
:verbose: true
:concurrency: 10
|
6a158a110266c40428b485688275981be08eefe6
|
src/app/users/user-profile-short.component.html
|
src/app/users/user-profile-short.component.html
|
<header class="hero-background">
<h2 i18n class="internal-header" (click)="showId=!(showId)">Your Profile</h2>
<span *ngIf="showId">My ID is: {{(auth | async)?.uid}} </span>
</header>
<main class="main-content">
<md-card class="profile-card">
<p i18n *ngIf="user">What's your name?</p>
<form *ngIf="user" (submit)="next(user)" class="user-form" ngNoForm>
<div class="form-layout">
<md-input placeholder="Name" [(ngModel)]="user.name"></md-input>
</div>
<div class="options">
<button i18n md-raised-button color="primary" (click)="next(user)">Next</button>
</div>
</form>
<p i18n *ngIf="!user">We don't yet have any data for your user.</p>
</md-card>
</main>
|
<header class="hero-background">
<h2 i18n class="internal-header" (click)="showId=!(showId)">Your Profile</h2>
<span *ngIf="showId">My ID is: {{(auth | async)?.uid}} </span>
</header>
<main class="main-content">
<md-card class="profile-card">
<p i18n *ngIf="user">Let's get started setting up your profile. What's your full name?</p>
<form *ngIf="user" (submit)="next(user)" class="user-form" ngNoForm>
<div class="form-layout">
<md-input placeholder="Full Name" [(ngModel)]="user.name"></md-input>
</div>
<div class="options">
<button i18n md-raised-button color="primary" (click)="next(user)">Next</button>
</div>
</form>
<p i18n *ngIf="!user">We don't yet have any data for your user.</p>
</md-card>
</main>
|
Clarify new user welcome message
|
Clarify new user welcome message
|
HTML
|
mit
|
StephenFluin/ames,StephenFluin/ames,StephenFluin/ames
|
html
|
## Code Before:
<header class="hero-background">
<h2 i18n class="internal-header" (click)="showId=!(showId)">Your Profile</h2>
<span *ngIf="showId">My ID is: {{(auth | async)?.uid}} </span>
</header>
<main class="main-content">
<md-card class="profile-card">
<p i18n *ngIf="user">What's your name?</p>
<form *ngIf="user" (submit)="next(user)" class="user-form" ngNoForm>
<div class="form-layout">
<md-input placeholder="Name" [(ngModel)]="user.name"></md-input>
</div>
<div class="options">
<button i18n md-raised-button color="primary" (click)="next(user)">Next</button>
</div>
</form>
<p i18n *ngIf="!user">We don't yet have any data for your user.</p>
</md-card>
</main>
## Instruction:
Clarify new user welcome message
## Code After:
<header class="hero-background">
<h2 i18n class="internal-header" (click)="showId=!(showId)">Your Profile</h2>
<span *ngIf="showId">My ID is: {{(auth | async)?.uid}} </span>
</header>
<main class="main-content">
<md-card class="profile-card">
<p i18n *ngIf="user">Let's get started setting up your profile. What's your full name?</p>
<form *ngIf="user" (submit)="next(user)" class="user-form" ngNoForm>
<div class="form-layout">
<md-input placeholder="Full Name" [(ngModel)]="user.name"></md-input>
</div>
<div class="options">
<button i18n md-raised-button color="primary" (click)="next(user)">Next</button>
</div>
</form>
<p i18n *ngIf="!user">We don't yet have any data for your user.</p>
</md-card>
</main>
|
80fa0df141371e5b929726e9eabef959adcfed28
|
schemas/userSchema.js
|
schemas/userSchema.js
|
var mongoose = require('mongoose');
var Schema = mongoose.Schema;
var userSchema = new Schema({
githubId: Number,
name: String,
avatar_url: String,
projects: Array
});
module.exports = mongoose.model('User', userSchema);
|
var mongoose = require('mongoose');
var Schema = mongoose.Schema;
var userSchema = new Schema({
githubId: Number,
name: String,
avatar_url: String,
isDeveloper: Boolean,
projects: Array
});
module.exports = mongoose.model('User', userSchema);
|
Add developer flag to user schema
|
Add developer flag to user schema
|
JavaScript
|
apache-2.0
|
with-regard/regard-website-api
|
javascript
|
## Code Before:
var mongoose = require('mongoose');
var Schema = mongoose.Schema;
var userSchema = new Schema({
githubId: Number,
name: String,
avatar_url: String,
projects: Array
});
module.exports = mongoose.model('User', userSchema);
## Instruction:
Add developer flag to user schema
## Code After:
var mongoose = require('mongoose');
var Schema = mongoose.Schema;
var userSchema = new Schema({
githubId: Number,
name: String,
avatar_url: String,
isDeveloper: Boolean,
projects: Array
});
module.exports = mongoose.model('User', userSchema);
|
3dcb417477615e3fd34ec5d3a311541eead7371c
|
Resources/views/Conversation/list.html.twig
|
Resources/views/Conversation/list.html.twig
|
{% extends 'FDPrivateMessageBundle::layout.html.twig' %}
{% trans_default_domain 'FDPrivateMessageBundle' %}
{% block fd_private_message_content %}
<ul>
{% for conversation in conversations %}
<li>
<a href="{{ url('fdpm_show_conversation', {'conversation': conversation.id}) }}">{{ conversation.subject }}</a> {{ conversation.created | date('Y-m-d H:i:s') }}
</li>
{% else %}
<li>
You have no conversation yet.
</li>
{% endfor %}
</ul>
{% endblock %}
|
{% extends 'FDPrivateMessageBundle::layout.html.twig' %}
{% trans_default_domain 'FDPrivateMessageBundle' %}
{% block fd_private_message_content %}
<a href="{{ url('fdpm_new_conversation') }}">{{ 'conversation.create.new' | trans }}</a>
<ul>
{% for conversation in conversations %}
<li>
<a href="{{ url('fdpm_show_conversation', {'conversation': conversation.id}) }}">{{ conversation.subject }}</a> {{ conversation.created | date('Y-m-d H:i:s') }}
</li>
{% else %}
<li>
You have no conversation yet.
</li>
{% endfor %}
</ul>
{% endblock %}
|
Add link to start new conversation
|
Add link to start new conversation
|
Twig
|
mit
|
lobodol/FireDIY-PrivateMessageBundle,lobodol/FireDIY-PrivateMessageBundle
|
twig
|
## Code Before:
{% extends 'FDPrivateMessageBundle::layout.html.twig' %}
{% trans_default_domain 'FDPrivateMessageBundle' %}
{% block fd_private_message_content %}
<ul>
{% for conversation in conversations %}
<li>
<a href="{{ url('fdpm_show_conversation', {'conversation': conversation.id}) }}">{{ conversation.subject }}</a> {{ conversation.created | date('Y-m-d H:i:s') }}
</li>
{% else %}
<li>
You have no conversation yet.
</li>
{% endfor %}
</ul>
{% endblock %}
## Instruction:
Add link to start new conversation
## Code After:
{% extends 'FDPrivateMessageBundle::layout.html.twig' %}
{% trans_default_domain 'FDPrivateMessageBundle' %}
{% block fd_private_message_content %}
<a href="{{ url('fdpm_new_conversation') }}">{{ 'conversation.create.new' | trans }}</a>
<ul>
{% for conversation in conversations %}
<li>
<a href="{{ url('fdpm_show_conversation', {'conversation': conversation.id}) }}">{{ conversation.subject }}</a> {{ conversation.created | date('Y-m-d H:i:s') }}
</li>
{% else %}
<li>
You have no conversation yet.
</li>
{% endfor %}
</ul>
{% endblock %}
|
ee9097b481a075f2040d741ff91ba054dafdc166
|
app.js
|
app.js
|
/*
This file is generated and updated by Sencha Cmd. You can edit this file as
needed for your application, but these edits will have to be merged by
Sencha Cmd when upgrading.
*/
// DO NOT DELETE - this directive is required for Sencha Cmd packages to work.
//@require @packageOverrides
Ext.useShims = true;
// do disable caching enable line below
// Ext.Loader.setConfig('disableCaching', false);
Ext.application({
name: 'TrackAnnot',
extend: 'TrackAnnot.Application',
autoCreateViewport: true
});
|
/*
This file is generated and updated by Sencha Cmd. You can edit this file as
needed for your application, but these edits will have to be merged by
Sencha Cmd when upgrading.
*/
// DO NOT DELETE - this directive is required for Sencha Cmd packages to work.
//@require @packageOverrides
Ext.useShims = true;
// do disable caching enable line below
// Ext.Loader.setConfig('disableCaching', false);
Ext.application({
name: 'TrackAnnot',
extend: 'TrackAnnot.Application',
autoCreateViewport: true
});
// Uncomment to load static demo track
//Ext.onReady(function() {
// // overwrite web service urls to static files
// var ctrl = TrackAnnot.getApplication().getController('Main');
// ctrl.setupUrls('trackers.json', 'demo/tracker.json');
//});
|
Add comment how to use static demo track during development.
|
Add comment how to use static demo track during development.
|
JavaScript
|
apache-2.0
|
NLeSC/eEcology-Annotation-UI,NLeSC/eEcology-Annotation-UI,NLeSC/eEcology-Annotation-UI
|
javascript
|
## Code Before:
/*
This file is generated and updated by Sencha Cmd. You can edit this file as
needed for your application, but these edits will have to be merged by
Sencha Cmd when upgrading.
*/
// DO NOT DELETE - this directive is required for Sencha Cmd packages to work.
//@require @packageOverrides
Ext.useShims = true;
// do disable caching enable line below
// Ext.Loader.setConfig('disableCaching', false);
Ext.application({
name: 'TrackAnnot',
extend: 'TrackAnnot.Application',
autoCreateViewport: true
});
## Instruction:
Add comment how to use static demo track during development.
## Code After:
/*
This file is generated and updated by Sencha Cmd. You can edit this file as
needed for your application, but these edits will have to be merged by
Sencha Cmd when upgrading.
*/
// DO NOT DELETE - this directive is required for Sencha Cmd packages to work.
//@require @packageOverrides
Ext.useShims = true;
// do disable caching enable line below
// Ext.Loader.setConfig('disableCaching', false);
Ext.application({
name: 'TrackAnnot',
extend: 'TrackAnnot.Application',
autoCreateViewport: true
});
// Uncomment to load static demo track
//Ext.onReady(function() {
// // overwrite web service urls to static files
// var ctrl = TrackAnnot.getApplication().getController('Main');
// ctrl.setupUrls('trackers.json', 'demo/tracker.json');
//});
|
4a167edba49ce6d6e903e959f9d65eb7d35a1fbd
|
.github/workflows/publish.yml
|
.github/workflows/publish.yml
|
name: Publish
on:
push:
branches: [ master ]
paths:
- '**.md'
- '**.yml'
- '**.swift'
jobs:
publish:
if: "!contains(format('{0} {1} {2}', github.event.head_commit.message, github.event.pull_request.title, github.event.pull_request.body), '[skip ci]')"
runs-on: macos-latest
steps:
- uses: actions/checkout@v2
- name: Build
run: swift build -v
- name: Generate
run: swift run -v Publisher
- name: Move Files
run: |
rm -Rf docs
cp -R Output/* .
rm -Rf Output
- name: Commit files
run: |
git config --local user.email "[email protected]"
git config --local user.name "Publish Bot"
git add -A
git commit -m "Publish deploy"
- name: Push changes
uses: ad-m/github-push-action@master
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
|
name: Publish
on:
push:
branches: [ master ]
paths:
- '**.md'
- '**.yml'
- '**.swift'
- 'Content/**'
jobs:
publish:
if: "!contains(format('{0} {1} {2}', github.event.head_commit.message, github.event.pull_request.title, github.event.pull_request.body), '[skip ci]')"
runs-on: macos-latest
steps:
- uses: actions/checkout@v2
- name: Build
run: swift build -v
- name: Generate
run: swift run -v Publisher
- name: Move Files
run: |
rm -Rf docs
cp -R Output/* .
rm -Rf Output
- name: Commit files
run: |
git config --local user.email "[email protected]"
git config --local user.name "Publish Bot"
git add -A
git commit -m "Publish deploy"
- name: Push changes
uses: ad-m/github-push-action@master
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
|
Deploy on all content changes
|
Deploy on all content changes
|
YAML
|
mit
|
iotize/iotize.github.io
|
yaml
|
## Code Before:
name: Publish
on:
push:
branches: [ master ]
paths:
- '**.md'
- '**.yml'
- '**.swift'
jobs:
publish:
if: "!contains(format('{0} {1} {2}', github.event.head_commit.message, github.event.pull_request.title, github.event.pull_request.body), '[skip ci]')"
runs-on: macos-latest
steps:
- uses: actions/checkout@v2
- name: Build
run: swift build -v
- name: Generate
run: swift run -v Publisher
- name: Move Files
run: |
rm -Rf docs
cp -R Output/* .
rm -Rf Output
- name: Commit files
run: |
git config --local user.email "[email protected]"
git config --local user.name "Publish Bot"
git add -A
git commit -m "Publish deploy"
- name: Push changes
uses: ad-m/github-push-action@master
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
## Instruction:
Deploy on all content changes
## Code After:
name: Publish
on:
push:
branches: [ master ]
paths:
- '**.md'
- '**.yml'
- '**.swift'
- 'Content/**'
jobs:
publish:
if: "!contains(format('{0} {1} {2}', github.event.head_commit.message, github.event.pull_request.title, github.event.pull_request.body), '[skip ci]')"
runs-on: macos-latest
steps:
- uses: actions/checkout@v2
- name: Build
run: swift build -v
- name: Generate
run: swift run -v Publisher
- name: Move Files
run: |
rm -Rf docs
cp -R Output/* .
rm -Rf Output
- name: Commit files
run: |
git config --local user.email "[email protected]"
git config --local user.name "Publish Bot"
git add -A
git commit -m "Publish deploy"
- name: Push changes
uses: ad-m/github-push-action@master
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
|
bbebd3c1ea4b4e8a76caf9d0a53e77c07d3b957d
|
app/views/pages/post/post.scala.html
|
app/views/pages/post/post.scala.html
|
@import partials.messageWithComments
@(post: Post)
@main("Post") {
@messageWithComments(post.getThread)
}
|
@import partials.messageWithComments
@(post: Post)
@main("Post") {
<p><a class="uk-link-reset uk-text-large" href="@groups.routes.Group.get(post.getGroup.getId)"><span data-uk-icon="icon: link; ratio: 1.5"></span> Go to Group</a></p>
@messageWithComments(post.getThread)
}
|
Add "Go to Group" link in Post page
|
Add "Go to Group" link in Post page
|
HTML
|
agpl-3.0
|
m4tx/arroch,m4tx/arroch
|
html
|
## Code Before:
@import partials.messageWithComments
@(post: Post)
@main("Post") {
@messageWithComments(post.getThread)
}
## Instruction:
Add "Go to Group" link in Post page
## Code After:
@import partials.messageWithComments
@(post: Post)
@main("Post") {
<p><a class="uk-link-reset uk-text-large" href="@groups.routes.Group.get(post.getGroup.getId)"><span data-uk-icon="icon: link; ratio: 1.5"></span> Go to Group</a></p>
@messageWithComments(post.getThread)
}
|
c8844298e7f8b891449edc31ff299b644e75a1e3
|
README.md
|
README.md
|
json [](https://travis-ci.org/timt/naive-json)
----
A really simple json parser library implemented in scala with no dependencies
Requirements
------------
* [scala](http://www.scala-lang.org) 2.10.4
Usage
-----
Add the following lines to your build.sbt
resolvers += "Tim Tennant's repo" at "http://dl.bintray.com/timt/repo/"
libraryDependencies += "io.shaka" %% "naive-json" % "29"
import io.shaka.json.Json
...
val json = Json("""{"thing1":{"thing2":"meet thing1"}}""")
val contentAtThing2 = json.thing1.thing2
For more examples see [JsonSpec.scala](https://github.com/timt/json/blob/master/src/test/scala/io/shaka/json/JsonSpec.scala)
See [timt/repo](http://dl.bintray.com/timt/repo/io/shaka/naive-json_2.10) for latest released version
Code license
------------
Apache License 2.0
|
json [](https://travis-ci.org/timt/naive-json) [  ](https://bintray.com/timt/repo/naive-json/_latestVersion)
----
A really simple json parser library implemented in scala with no dependencies
Requirements
------------
* [scala](http://www.scala-lang.org) 2.10.4
Usage
-----
Add the following lines to your build.sbt
resolvers += "Tim Tennant's repo" at "http://dl.bintray.com/timt/repo/"
libraryDependencies += "io.shaka" %% "naive-json" % "29"
import io.shaka.json.Json
...
val json = Json("""{"thing1":{"thing2":"meet thing1"}}""")
val contentAtThing2 = json.thing1.thing2
For more examples see [JsonSpec.scala](https://github.com/timt/json/blob/master/src/test/scala/io/shaka/json/JsonSpec.scala)
Code license
------------
Apache License 2.0
|
Add link to download latest published version
|
Add link to download latest published version
|
Markdown
|
apache-2.0
|
timt/naive-json
|
markdown
|
## Code Before:
json [](https://travis-ci.org/timt/naive-json)
----
A really simple json parser library implemented in scala with no dependencies
Requirements
------------
* [scala](http://www.scala-lang.org) 2.10.4
Usage
-----
Add the following lines to your build.sbt
resolvers += "Tim Tennant's repo" at "http://dl.bintray.com/timt/repo/"
libraryDependencies += "io.shaka" %% "naive-json" % "29"
import io.shaka.json.Json
...
val json = Json("""{"thing1":{"thing2":"meet thing1"}}""")
val contentAtThing2 = json.thing1.thing2
For more examples see [JsonSpec.scala](https://github.com/timt/json/blob/master/src/test/scala/io/shaka/json/JsonSpec.scala)
See [timt/repo](http://dl.bintray.com/timt/repo/io/shaka/naive-json_2.10) for latest released version
Code license
------------
Apache License 2.0
## Instruction:
Add link to download latest published version
## Code After:
json [](https://travis-ci.org/timt/naive-json) [  ](https://bintray.com/timt/repo/naive-json/_latestVersion)
----
A really simple json parser library implemented in scala with no dependencies
Requirements
------------
* [scala](http://www.scala-lang.org) 2.10.4
Usage
-----
Add the following lines to your build.sbt
resolvers += "Tim Tennant's repo" at "http://dl.bintray.com/timt/repo/"
libraryDependencies += "io.shaka" %% "naive-json" % "29"
import io.shaka.json.Json
...
val json = Json("""{"thing1":{"thing2":"meet thing1"}}""")
val contentAtThing2 = json.thing1.thing2
For more examples see [JsonSpec.scala](https://github.com/timt/json/blob/master/src/test/scala/io/shaka/json/JsonSpec.scala)
Code license
------------
Apache License 2.0
|
605a3eefd5130c45b8faba14a252373bdf94de7c
|
_output/code_event_listeners/_metadata.json
|
_output/code_event_listeners/_metadata.json
|
{
"entity_pre_save_SV-UserMentionsImprovements-Listener_usergroupEntityPreSave.json": {
"hash": "21a2f35cdc4c84f27b1333a1403dea74"
},
"entity_structure_SV-UserMentionsImprovements-Listener_usergroupEntityStructure.json": {
"hash": "f9e62abd2f33c8023b6bdf4fd81a6621"
},
"entity_structure_SV-UserMentionsImprovements-Listener_useroptionEntityStructure.json": {
"hash": "364e89653ac279ee5167052e05e7882a"
}
}
|
{
"entity_pre_save_SV-UserMentionsImprovements-Listener_usergroupEntityPreSave.json": {
"hash": "21a2f35cdc4c84f27b1333a1403dea74"
},
"entity_structure_SV-UserMentionsImprovements-Listener_userEntityStructure.json": {
"hash": "75abe2c187d47a0350476fb3b24a914e"
},
"entity_structure_SV-UserMentionsImprovements-Listener_usergroupEntityStructure.json": {
"hash": "f9e62abd2f33c8023b6bdf4fd81a6621"
},
"entity_structure_SV-UserMentionsImprovements-Listener_useroptionEntityStructure.json": {
"hash": "364e89653ac279ee5167052e05e7882a"
}
}
|
Use single finder query for fetching usergroup members.
|
Use single finder query for fetching usergroup members.
|
JSON
|
mit
|
liamwli/XenForo2-UserMentionsImprovements,liamwli/XenForo2-UserMentionsImprovements
|
json
|
## Code Before:
{
"entity_pre_save_SV-UserMentionsImprovements-Listener_usergroupEntityPreSave.json": {
"hash": "21a2f35cdc4c84f27b1333a1403dea74"
},
"entity_structure_SV-UserMentionsImprovements-Listener_usergroupEntityStructure.json": {
"hash": "f9e62abd2f33c8023b6bdf4fd81a6621"
},
"entity_structure_SV-UserMentionsImprovements-Listener_useroptionEntityStructure.json": {
"hash": "364e89653ac279ee5167052e05e7882a"
}
}
## Instruction:
Use single finder query for fetching usergroup members.
## Code After:
{
"entity_pre_save_SV-UserMentionsImprovements-Listener_usergroupEntityPreSave.json": {
"hash": "21a2f35cdc4c84f27b1333a1403dea74"
},
"entity_structure_SV-UserMentionsImprovements-Listener_userEntityStructure.json": {
"hash": "75abe2c187d47a0350476fb3b24a914e"
},
"entity_structure_SV-UserMentionsImprovements-Listener_usergroupEntityStructure.json": {
"hash": "f9e62abd2f33c8023b6bdf4fd81a6621"
},
"entity_structure_SV-UserMentionsImprovements-Listener_useroptionEntityStructure.json": {
"hash": "364e89653ac279ee5167052e05e7882a"
}
}
|
502b56c879b18db34cb3c067e2adca021762c3b9
|
Cargo.toml
|
Cargo.toml
|
[package]
name = "antimony"
version = "0.0.0"
homepage = "https://antimony.rs"
authors = ["Mohammed Makhlouf <[email protected]>","Mohammad Samir <[email protected]>"]
keywords = ["stream-processing","distributed", "real-time","drpc"]
repository = "https://github.com/antimonyproject/antimony"
documentation = "https://docs.rs/antimony"
readme = "README.md"
license = "MIT/Apache-2.0"
description = """
A topology builder crate for Antimony. Version 0.0.0 is published as a placeholder.
"""
[dependencies]
rustc-serialize = "0.3.24"
futures = "0.1.16"
tokio-core = "0.1.9"
tokio-io = "0.1.3"
tokio-uds = "0.1.5"
futures-cpupool = "0.1.6"
|
[package]
name = "antimony"
version = "0.0.1"
homepage = "https://antimony.rs"
authors = ["Mohammed Makhlouf <[email protected]>","Mohammad Samir <[email protected]>"]
keywords = ["stream-processing","distributed", "real-time","drpc"]
repository = "https://github.com/antimonyproject/antimony"
documentation = "https://docs.rs/antimony"
readme = "README.md"
license = "MIT/Apache-2.0"
description = """
A topology builder crate for Antimony.
"""
[dependencies]
rustc-serialize = "0.3.24"
futures = "0.1.16"
tokio-core = "0.1.9"
tokio-io = "0.1.3"
tokio-uds = "0.1.5"
futures-cpupool = "0.1.6"
|
Change to version 0.0.1 & change description
|
Change to version 0.0.1 & change description
|
TOML
|
mit
|
antimonyproject/antimony
|
toml
|
## Code Before:
[package]
name = "antimony"
version = "0.0.0"
homepage = "https://antimony.rs"
authors = ["Mohammed Makhlouf <[email protected]>","Mohammad Samir <[email protected]>"]
keywords = ["stream-processing","distributed", "real-time","drpc"]
repository = "https://github.com/antimonyproject/antimony"
documentation = "https://docs.rs/antimony"
readme = "README.md"
license = "MIT/Apache-2.0"
description = """
A topology builder crate for Antimony. Version 0.0.0 is published as a placeholder.
"""
[dependencies]
rustc-serialize = "0.3.24"
futures = "0.1.16"
tokio-core = "0.1.9"
tokio-io = "0.1.3"
tokio-uds = "0.1.5"
futures-cpupool = "0.1.6"
## Instruction:
Change to version 0.0.1 & change description
## Code After:
[package]
name = "antimony"
version = "0.0.1"
homepage = "https://antimony.rs"
authors = ["Mohammed Makhlouf <[email protected]>","Mohammad Samir <[email protected]>"]
keywords = ["stream-processing","distributed", "real-time","drpc"]
repository = "https://github.com/antimonyproject/antimony"
documentation = "https://docs.rs/antimony"
readme = "README.md"
license = "MIT/Apache-2.0"
description = """
A topology builder crate for Antimony.
"""
[dependencies]
rustc-serialize = "0.3.24"
futures = "0.1.16"
tokio-core = "0.1.9"
tokio-io = "0.1.3"
tokio-uds = "0.1.5"
futures-cpupool = "0.1.6"
|
8963e3adee2e8857f49aa911869fab378e91ddd3
|
app/controllers/recipe_controller.rb
|
app/controllers/recipe_controller.rb
|
require 'rakuten_web_service'
class RecipeController < ApplicationController
def index
RakutenWebService.configure do |c|
c.application_id = ENV["APPID"]
c.affiliate_id = ENV["AFID"]
end
@large_categories = RakutenWebService::Recipe.large_categories
@menus = RakutenWebService::Recipe.ranking(15)
@title = 'rakuten_recipe_test'
end
end
|
require 'rakuten_web_service'
class RecipeController < ApplicationController
def pickup
rakuten_api
@menus = RakutenWebService::Recipe.ranking(15)
end
# Test Page
def index
rakuten_api
@large_categories = RakutenWebService::Recipe.large_categories
@menus = RakutenWebService::Recipe.ranking(15)
@title = 'rakuten_recipe_test'
end
private
# For Rakuten API Setting
def rakuten_api
RakutenWebService.configure do |c|
c.application_id = ENV["APPID"]
c.affiliate_id = ENV["AFID"]
end
end
end
|
Add API call for pickup
|
Add API call for pickup
|
Ruby
|
mit
|
terra-yucco/ruthenium,terra-yucco/ruthenium,terra-yucco/ruthenium
|
ruby
|
## Code Before:
require 'rakuten_web_service'
class RecipeController < ApplicationController
def index
RakutenWebService.configure do |c|
c.application_id = ENV["APPID"]
c.affiliate_id = ENV["AFID"]
end
@large_categories = RakutenWebService::Recipe.large_categories
@menus = RakutenWebService::Recipe.ranking(15)
@title = 'rakuten_recipe_test'
end
end
## Instruction:
Add API call for pickup
## Code After:
require 'rakuten_web_service'
class RecipeController < ApplicationController
def pickup
rakuten_api
@menus = RakutenWebService::Recipe.ranking(15)
end
# Test Page
def index
rakuten_api
@large_categories = RakutenWebService::Recipe.large_categories
@menus = RakutenWebService::Recipe.ranking(15)
@title = 'rakuten_recipe_test'
end
private
# For Rakuten API Setting
def rakuten_api
RakutenWebService.configure do |c|
c.application_id = ENV["APPID"]
c.affiliate_id = ENV["AFID"]
end
end
end
|
583ea6c1a234ab9d484b1e80e7f567d9a5d2fb71
|
shopify/resources/image.py
|
shopify/resources/image.py
|
from ..base import ShopifyResource
import base64
import re
class Image(ShopifyResource):
_prefix_source = "/admin/products/$product_id/"
def __getattr__(self, name):
if name in ["pico", "icon", "thumb", "small", "compact", "medium", "large", "grande", "original"]:
return re.sub(r"/(.*)\.(\w{2,4})", r"/\1_%s.\2" % (name), self.src)
else:
return super(Image, self).__getattr__(name)
def attach_image(self, data, filename=None):
self.attributes["attachment"] = base64.b64encode(data)
if filename:
self.attributes["filename"] = filename
|
from ..base import ShopifyResource
from ..resources import Metafield
from six.moves import urllib
import base64
import re
class Image(ShopifyResource):
_prefix_source = "/admin/products/$product_id/"
def __getattr__(self, name):
if name in ["pico", "icon", "thumb", "small", "compact", "medium", "large", "grande", "original"]:
return re.sub(r"/(.*)\.(\w{2,4})", r"/\1_%s.\2" % (name), self.src)
else:
return super(Image, self).__getattr__(name)
def attach_image(self, data, filename=None):
self.attributes["attachment"] = base64.b64encode(data)
if filename:
self.attributes["filename"] = filename
def metafields(self):
if self.is_new():
return []
query_params = { 'metafield[owner_id]': self.id, 'metafield[owner_resource]': 'product_image' }
return Metafield.find(from_ = '/admin/metafields.json?%s' % urllib.parse.urlencode(query_params))
|
Add `metafields()` method to `Image` resource.
|
Add `metafields()` method to `Image` resource.
|
Python
|
mit
|
asiviero/shopify_python_api,SmileyJames/shopify_python_api,Shopify/shopify_python_api,metric-collective/shopify_python_api,gavinballard/shopify_python_api,ifnull/shopify_python_api
|
python
|
## Code Before:
from ..base import ShopifyResource
import base64
import re
class Image(ShopifyResource):
_prefix_source = "/admin/products/$product_id/"
def __getattr__(self, name):
if name in ["pico", "icon", "thumb", "small", "compact", "medium", "large", "grande", "original"]:
return re.sub(r"/(.*)\.(\w{2,4})", r"/\1_%s.\2" % (name), self.src)
else:
return super(Image, self).__getattr__(name)
def attach_image(self, data, filename=None):
self.attributes["attachment"] = base64.b64encode(data)
if filename:
self.attributes["filename"] = filename
## Instruction:
Add `metafields()` method to `Image` resource.
## Code After:
from ..base import ShopifyResource
from ..resources import Metafield
from six.moves import urllib
import base64
import re
class Image(ShopifyResource):
_prefix_source = "/admin/products/$product_id/"
def __getattr__(self, name):
if name in ["pico", "icon", "thumb", "small", "compact", "medium", "large", "grande", "original"]:
return re.sub(r"/(.*)\.(\w{2,4})", r"/\1_%s.\2" % (name), self.src)
else:
return super(Image, self).__getattr__(name)
def attach_image(self, data, filename=None):
self.attributes["attachment"] = base64.b64encode(data)
if filename:
self.attributes["filename"] = filename
def metafields(self):
if self.is_new():
return []
query_params = { 'metafield[owner_id]': self.id, 'metafield[owner_resource]': 'product_image' }
return Metafield.find(from_ = '/admin/metafields.json?%s' % urllib.parse.urlencode(query_params))
|
74c3ed0cfbfe917b6c386211fc64ff4925fdc036
|
lib/mix/test/mix/tasks/clean_test.exs
|
lib/mix/test/mix/tasks/clean_test.exs
|
Code.require_file "../../test_helper.exs", __DIR__
defmodule Mix.Tasks.CleanTest do
use MixTest.Case
test "compile a project without mixfile" do
in_fixture "no_mixfile", fn ->
Mix.Tasks.Compile.run []
assert File.regular?("ebin/Elixir.A.beam")
Mix.Tasks.Clean.run []
refute File.regular?("ebin/Elixir.A.beam")
end
end
end
|
Code.require_file "../../test_helper.exs", __DIR__
defmodule Mix.Tasks.CleanTest do
use MixTest.Case
defmodule DepsApp do
def project do
[
app: :sample,
version: "0.1.0",
deps: [
{ :tidy, "0.1.0", path: "elixir-lang/tidy" }
]
]
end
end
test "compile a project without mixfile" do
in_fixture "no_mixfile", fn ->
Mix.Tasks.Compile.run []
assert File.regular?("ebin/Elixir.A.beam")
Mix.Tasks.Clean.run []
refute File.regular?("ebin/Elixir.A.beam")
end
end
test "cleans all repos" do
Mix.Project.push DepsApp
in_fixture "deps_status", fn ->
Mix.Tasks.Clean.run ["--all"]
assert_received { :mix_shell, :info, ["* Cleaning tidy [path: \"elixir-lang/tidy\"]"] }
end
after
Mix.Project.pop
end
end
|
Test mix clean with the --all flag.
|
Test mix clean with the --all flag.
|
Elixir
|
apache-2.0
|
beedub/elixir,kimshrier/elixir,pedrosnk/elixir,gfvcastro/elixir,antipax/elixir,gfvcastro/elixir,kelvinst/elixir,joshprice/elixir,antipax/elixir,lexmag/elixir,ggcampinho/elixir,elixir-lang/elixir,beedub/elixir,kelvinst/elixir,pedrosnk/elixir,ggcampinho/elixir,michalmuskala/elixir,kimshrier/elixir,lexmag/elixir
|
elixir
|
## Code Before:
Code.require_file "../../test_helper.exs", __DIR__
defmodule Mix.Tasks.CleanTest do
use MixTest.Case
test "compile a project without mixfile" do
in_fixture "no_mixfile", fn ->
Mix.Tasks.Compile.run []
assert File.regular?("ebin/Elixir.A.beam")
Mix.Tasks.Clean.run []
refute File.regular?("ebin/Elixir.A.beam")
end
end
end
## Instruction:
Test mix clean with the --all flag.
## Code After:
Code.require_file "../../test_helper.exs", __DIR__
defmodule Mix.Tasks.CleanTest do
use MixTest.Case
defmodule DepsApp do
def project do
[
app: :sample,
version: "0.1.0",
deps: [
{ :tidy, "0.1.0", path: "elixir-lang/tidy" }
]
]
end
end
test "compile a project without mixfile" do
in_fixture "no_mixfile", fn ->
Mix.Tasks.Compile.run []
assert File.regular?("ebin/Elixir.A.beam")
Mix.Tasks.Clean.run []
refute File.regular?("ebin/Elixir.A.beam")
end
end
test "cleans all repos" do
Mix.Project.push DepsApp
in_fixture "deps_status", fn ->
Mix.Tasks.Clean.run ["--all"]
assert_received { :mix_shell, :info, ["* Cleaning tidy [path: \"elixir-lang/tidy\"]"] }
end
after
Mix.Project.pop
end
end
|
e4681ab97c4e281ed29d49fbecaeeb03da315619
|
spec/unit/concurrent/synchronized_sorted_set_spec.rb
|
spec/unit/concurrent/synchronized_sorted_set_spec.rb
|
require "spec_helper"
require "bunny/concurrent/synchronized_sorted_set"
describe Bunny::Concurrent::SynchronizedSortedSet do
it "synchronizes common operations needed by Bunny" do
subject.length.should == 0
10.times do
Thread.new do
subject << 1
subject << 1
subject << 2
subject << 3
subject << 4
subject << 4
subject << 4
subject << 4
subject << 5
subject << 5
subject << 5
subject << 5
subject << 6
subject << 7
subject << 8
subject.delete 8
subject.delete_if { |i| i == 1 }
end
end
subject.length.should == 6
end
end
|
require "spec_helper"
require "bunny/concurrent/synchronized_sorted_set"
describe Bunny::Concurrent::SynchronizedSortedSet do
it "synchronizes common operations needed by Bunny" do
s = described_class.new
s.length.should == 0
10.times do
Thread.new do
s << 1
s << 1
s << 2
s << 3
s << 4
s << 4
s << 4
s << 4
s << 5
s << 5
s << 5
s << 5
s << 6
s << 7
s << 8
s.delete 8
s.delete_if { |i| i == 1 }
end
end
sleep 2.0
s.length.should == 6
end
end
|
Fix race conditions in this trivial unit test. Headdesk.
|
Fix race conditions in this trivial unit test. Headdesk.
|
Ruby
|
mit
|
teodor-pripoae/bunny,gferguson-gd/bunny,pressrelations/bunny,wconrad/bunny,bartj3/bunny,gferguson-gd/bunny,jafrog/bunny,bartj3/bunny,wconrad/bunny,pressrelations/bunny,0xfaded/bunny,jafrog/bunny,0xfaded/bunny,teodor-pripoae/bunny
|
ruby
|
## Code Before:
require "spec_helper"
require "bunny/concurrent/synchronized_sorted_set"
describe Bunny::Concurrent::SynchronizedSortedSet do
it "synchronizes common operations needed by Bunny" do
subject.length.should == 0
10.times do
Thread.new do
subject << 1
subject << 1
subject << 2
subject << 3
subject << 4
subject << 4
subject << 4
subject << 4
subject << 5
subject << 5
subject << 5
subject << 5
subject << 6
subject << 7
subject << 8
subject.delete 8
subject.delete_if { |i| i == 1 }
end
end
subject.length.should == 6
end
end
## Instruction:
Fix race conditions in this trivial unit test. Headdesk.
## Code After:
require "spec_helper"
require "bunny/concurrent/synchronized_sorted_set"
describe Bunny::Concurrent::SynchronizedSortedSet do
it "synchronizes common operations needed by Bunny" do
s = described_class.new
s.length.should == 0
10.times do
Thread.new do
s << 1
s << 1
s << 2
s << 3
s << 4
s << 4
s << 4
s << 4
s << 5
s << 5
s << 5
s << 5
s << 6
s << 7
s << 8
s.delete 8
s.delete_if { |i| i == 1 }
end
end
sleep 2.0
s.length.should == 6
end
end
|
ca89111eebd62fb8bf80fcb8308ecd83088af41e
|
app/views/public/files/_list.blade.php
|
app/views/public/files/_list.blade.php
|
@if(count($files))
<div class="row">
@foreach($files as $file)
<div class="col-sm-3 col-md-2 col-xs-4">
<a href="{{ '/'.$file->path.'/'.$file->filename }}" class="thumbnail fancybox" rel="gallery">
<img src="{{ Croppa::url('/'.$file->path.'/'.$file->filename, 310, 310) }}" alt="{{ $file->alt_attribute }}">
<!-- <div class="caption">
<p>
</p>
</div> -->
</a>
</div>
@endforeach
</div>
@endif
|
@if(count($files))
@foreach(array_chunk($files->all(), 4) as $row)
<div class="row">
@foreach($row as $file)
<div class="col-xs-3">
<a href="{{ '/'.$file->path.'/'.$file->filename }}" class="thumbnail fancybox" rel="gallery">
<img src="{{ Croppa::url('/'.$file->path.'/'.$file->filename, 310, 310) }}" alt="{{ $file->alt_attribute }}">
<!-- <div class="caption">
<p>
</p>
</div> -->
</a>
</div>
@endforeach
</div>
@endforeach
@endif
|
Use of array_chunk for rows of images
|
Use of array_chunk for rows of images
|
PHP
|
mit
|
yaoshanliang/TypiCMS,sachintaware/TypiCMS,yaoshanliang/TypiCMS,yaoshanliang/TypiCMS,sachintaware/TypiCMS,sdebacker/TypiCMS,sdebacker/TypiCMS,elk1997/TypiCMS,sachintaware/TypiCMS,omusico/TypiCMS,omusico/TypiCMS,sdebacker/TypiCMS,elk1997/TypiCMS,omusico/TypiCMS
|
php
|
## Code Before:
@if(count($files))
<div class="row">
@foreach($files as $file)
<div class="col-sm-3 col-md-2 col-xs-4">
<a href="{{ '/'.$file->path.'/'.$file->filename }}" class="thumbnail fancybox" rel="gallery">
<img src="{{ Croppa::url('/'.$file->path.'/'.$file->filename, 310, 310) }}" alt="{{ $file->alt_attribute }}">
<!-- <div class="caption">
<p>
</p>
</div> -->
</a>
</div>
@endforeach
</div>
@endif
## Instruction:
Use of array_chunk for rows of images
## Code After:
@if(count($files))
@foreach(array_chunk($files->all(), 4) as $row)
<div class="row">
@foreach($row as $file)
<div class="col-xs-3">
<a href="{{ '/'.$file->path.'/'.$file->filename }}" class="thumbnail fancybox" rel="gallery">
<img src="{{ Croppa::url('/'.$file->path.'/'.$file->filename, 310, 310) }}" alt="{{ $file->alt_attribute }}">
<!-- <div class="caption">
<p>
</p>
</div> -->
</a>
</div>
@endforeach
</div>
@endforeach
@endif
|
83948474fe20d9d6b45fe5a9ce03b9dfaa7f28bd
|
mirrit/web/templates/index.html
|
mirrit/web/templates/index.html
|
{% extends "base.html" %}
{% block body_content %}
{% if g.user %}
hi {{ g.user.username }}!
{% if not g.user.github_access_token %}
<p>Auth with <a href="{{ url_for('github_auth') }}">GitHub</a></p>
{% else %}
<h3>github repos</h3>
<ul>
{% for repo in github_repos %}
<li><a href="{{ repo.url }}">{{ repo.full_name }}</a></li>
{% endfor %}
</ul>
{% endif %}
{% else %}
Please <a href="/login">login</a> or <a href="/signup">signup</a>.
{% endif %}
{% endblock %}
|
{% extends "base.html" %}
{% block body_content %}
{% if g.user %}
hi {{ g.user.username }}!
{% if not g.user.github_access_token %}
<p>Auth with <a href="{{ url_for('github_auth') }}">GitHub</a></p>
{% else %}
<h3>github repos</h3>
<ul>
{% for repo in github_repos %}
<li><a href="{{ repo.url }}">{{ repo.full_name }}</a>
{% if repo.is_tracked -%}
<button class="btn repo-untrack btn-primary">Untrack</button>
{%- else -%}
<button class="btn repo-track">Track</button>
{%- endif -%}
</li>
{% endfor %}
</ul>
{% endif %}
{% else %}
Please <a href="/login">login</a> or <a href="/signup">signup</a>.
{% endif %}
{% endblock %}
|
Add track/untrack buttons besides repos
|
Add track/untrack buttons besides repos
|
HTML
|
bsd-3-clause
|
1stvamp/mirrit
|
html
|
## Code Before:
{% extends "base.html" %}
{% block body_content %}
{% if g.user %}
hi {{ g.user.username }}!
{% if not g.user.github_access_token %}
<p>Auth with <a href="{{ url_for('github_auth') }}">GitHub</a></p>
{% else %}
<h3>github repos</h3>
<ul>
{% for repo in github_repos %}
<li><a href="{{ repo.url }}">{{ repo.full_name }}</a></li>
{% endfor %}
</ul>
{% endif %}
{% else %}
Please <a href="/login">login</a> or <a href="/signup">signup</a>.
{% endif %}
{% endblock %}
## Instruction:
Add track/untrack buttons besides repos
## Code After:
{% extends "base.html" %}
{% block body_content %}
{% if g.user %}
hi {{ g.user.username }}!
{% if not g.user.github_access_token %}
<p>Auth with <a href="{{ url_for('github_auth') }}">GitHub</a></p>
{% else %}
<h3>github repos</h3>
<ul>
{% for repo in github_repos %}
<li><a href="{{ repo.url }}">{{ repo.full_name }}</a>
{% if repo.is_tracked -%}
<button class="btn repo-untrack btn-primary">Untrack</button>
{%- else -%}
<button class="btn repo-track">Track</button>
{%- endif -%}
</li>
{% endfor %}
</ul>
{% endif %}
{% else %}
Please <a href="/login">login</a> or <a href="/signup">signup</a>.
{% endif %}
{% endblock %}
|
7a936665eff8a6a8f6889334ad2238cbfcded18b
|
member.py
|
member.py
|
import requests
from credentials import label_id
from gmailauth import refresh
access_token = refresh()
headers = {'Authorization': ('Bearer ' + access_token)}
def list_messages(headers):
params = {'labelIds': label_id, 'q': 'newer_than:3d'}
r = requests.get('https://www.googleapis.com/gmail/v1/users/me/messages',
headers=headers, params=params)
j = r.json()
messages = []
if 'messages' in j:
messages.extend(j['messages'])
# return messages
message_ids = []
for item in messages:
message_ids.append(item['id'])
return message_ids
print(list_messages(headers))
def get_message(headers, identity):
params = {'id': identity, format: 'metadata'}
r = requests.get('https://www.googleapis.com/gmail/v1/users/me/messages/id',
headers=headers, params=params)
j = r.json()
print(r.status_code, r.reason)
h = j['payload']
subject = ''
for header in h['headers']:
if header['name'] == 'Subject':
subject = header['value']
break
print(subject)
for item in list_messages(headers):
get_message(headers, item)
# get_message(headers, list_messages(headers))
|
import requests
from base64 import urlsafe_b64decode
from credentials import label_id, url1, url2
from gmailauth import refresh
# access_token = refresh()
headers = {'Authorization': ('Bearer ' + access_token)}
def list_messages(headers):
params = {'labelIds': label_id, 'q': 'newer_than:2d'}
r = requests.get('https://www.googleapis.com/gmail/v1/users/me/messages',
headers=headers, params=params)
j = r.json()
messages = []
if 'messages' in j:
messages.extend(j['messages'])
# return messages
message_ids = []
for item in messages:
message_ids.append(item['id'])
return message_ids
def get_message(headers, identity):
params = {'id': identity, 'format': 'raw'}
r = requests.get('https://www.googleapis.com/gmail/v1/users/me/messages/id',
headers=headers, params=params)
j = r.json()
raw = j['raw']
d = urlsafe_b64decode(raw)
p = d.decode()
s = p.find('https')
l = len(p)
print(p[s:l])
print('----------')
return(p[s:l])
# for item in list_messages(headers):
# get_message(headers, item)
|
Return the order details URL from email body.
|
Return the order details URL from email body.
There is currently no Agile API method that will return the order
details for an activity so the URL from the email must be used in
conjunction with a web scraper to get the relevant details.
|
Python
|
mit
|
deadlyraptor/reels
|
python
|
## Code Before:
import requests
from credentials import label_id
from gmailauth import refresh
access_token = refresh()
headers = {'Authorization': ('Bearer ' + access_token)}
def list_messages(headers):
params = {'labelIds': label_id, 'q': 'newer_than:3d'}
r = requests.get('https://www.googleapis.com/gmail/v1/users/me/messages',
headers=headers, params=params)
j = r.json()
messages = []
if 'messages' in j:
messages.extend(j['messages'])
# return messages
message_ids = []
for item in messages:
message_ids.append(item['id'])
return message_ids
print(list_messages(headers))
def get_message(headers, identity):
params = {'id': identity, format: 'metadata'}
r = requests.get('https://www.googleapis.com/gmail/v1/users/me/messages/id',
headers=headers, params=params)
j = r.json()
print(r.status_code, r.reason)
h = j['payload']
subject = ''
for header in h['headers']:
if header['name'] == 'Subject':
subject = header['value']
break
print(subject)
for item in list_messages(headers):
get_message(headers, item)
# get_message(headers, list_messages(headers))
## Instruction:
Return the order details URL from email body.
There is currently no Agile API method that will return the order
details for an activity so the URL from the email must be used in
conjunction with a web scraper to get the relevant details.
## Code After:
import requests
from base64 import urlsafe_b64decode
from credentials import label_id, url1, url2
from gmailauth import refresh
# access_token = refresh()
headers = {'Authorization': ('Bearer ' + access_token)}
def list_messages(headers):
params = {'labelIds': label_id, 'q': 'newer_than:2d'}
r = requests.get('https://www.googleapis.com/gmail/v1/users/me/messages',
headers=headers, params=params)
j = r.json()
messages = []
if 'messages' in j:
messages.extend(j['messages'])
# return messages
message_ids = []
for item in messages:
message_ids.append(item['id'])
return message_ids
def get_message(headers, identity):
params = {'id': identity, 'format': 'raw'}
r = requests.get('https://www.googleapis.com/gmail/v1/users/me/messages/id',
headers=headers, params=params)
j = r.json()
raw = j['raw']
d = urlsafe_b64decode(raw)
p = d.decode()
s = p.find('https')
l = len(p)
print(p[s:l])
print('----------')
return(p[s:l])
# for item in list_messages(headers):
# get_message(headers, item)
|
deaea4fc28155e1854c7ab5257127b31d8f1fb2b
|
app/models/chouette/timeband.rb
|
app/models/chouette/timeband.rb
|
module Chouette
class TimebandValidator < ActiveModel::Validator
def validate(record)
if record.end_time <= record.start_time
record.errors[:end_time] << I18n.t('activerecord.errors.models.timeband.start_must_be_before_end')
end
end
end
class Timeband < Chouette::TridentActiveRecord
self.primary_key = "id"
validates :start_time, :end_time, presence: true
validates_with TimebandValidator
def self.object_id_key
"Timeband"
end
end
end
|
module Chouette
class TimebandValidator < ActiveModel::Validator
def validate(record)
if record.end_time <= record.start_time
record.errors[:end_time] << I18n.t('activerecord.errors.models.timeband.start_must_be_before_end')
end
end
end
class Timeband < Chouette::TridentActiveRecord
self.primary_key = "id"
validates :start_time, :end_time, presence: true
validates_with TimebandValidator
default_scope { order(:start_time) }
def self.object_id_key
"Timeband"
end
end
end
|
Add default order for Timebands
|
Add default order for Timebands
|
Ruby
|
mit
|
afimb/ninoxe,afimb/ninoxe,afimb/ninoxe
|
ruby
|
## Code Before:
module Chouette
class TimebandValidator < ActiveModel::Validator
def validate(record)
if record.end_time <= record.start_time
record.errors[:end_time] << I18n.t('activerecord.errors.models.timeband.start_must_be_before_end')
end
end
end
class Timeband < Chouette::TridentActiveRecord
self.primary_key = "id"
validates :start_time, :end_time, presence: true
validates_with TimebandValidator
def self.object_id_key
"Timeband"
end
end
end
## Instruction:
Add default order for Timebands
## Code After:
module Chouette
class TimebandValidator < ActiveModel::Validator
def validate(record)
if record.end_time <= record.start_time
record.errors[:end_time] << I18n.t('activerecord.errors.models.timeband.start_must_be_before_end')
end
end
end
class Timeband < Chouette::TridentActiveRecord
self.primary_key = "id"
validates :start_time, :end_time, presence: true
validates_with TimebandValidator
default_scope { order(:start_time) }
def self.object_id_key
"Timeband"
end
end
end
|
2829d090861b23edf73b109b5ea5bfb765120d09
|
css/main.css
|
css/main.css
|
body {
margin: 60px auto;
width: 70%;
}
nav ul, footer ul {
font-family:'Helvetica', 'Arial', 'Sans-Serif';
padding: 0px;
list-style: none;
font-weight: bold;
font-size: .9em;
color: red;
}
nav ul li, footer ul li {
display: inline;
margin-right: 20px;
}
a {
text-decoration: none;
color: red;
}
a:hover {
text-decoration: underline;
color: purple;
}
h1 {
font-size: 1.7em;
font-family:'Helvetica', 'Arial', 'Sans-Serif';
}
h2 {
font-size: 1.2em;
font-family:'Helvetica', 'Arial', 'Sans-Serif';
h3 {
font-size: 1em;
font-family:'Helvetica', 'Arial', 'Sans-Serif';
}
p {
color: #333;
font-family:'Helvetica', 'Arial', 'Sans-Serif';
}
footer {
border-top: 1px solid #d5d5d5;
font-size: .8em;
}
ul.posts {
margin: 20px auto 40px;
}
ul.posts li {
list-style: none;
}
|
body {
margin: 60px auto;
width: 70%;
}
nav ul, footer ul {
font-family:'Helvetica', 'Arial', 'Sans-Serif';
padding: 0px;
list-style: none;
font-weight: bold;
font-size: .9em;
color: red;
}
nav ul li, footer ul li {
display: inline;
margin-right: 20px;
}
a {
text-decoration: none;
color: red;
}
a:hover {
text-decoration: underline;
color: purple;
}
h1 {
font-size: 1.7em;
font-family:'Helvetica', 'Arial', 'Sans-Serif';
}
h2 {
font-size: 1.2em;
font-family:'Helvetica', 'Arial', 'Sans-Serif';
h3 {
font-size: 1em;
font-family:'Helvetica', 'Arial', 'Sans-Serif';
}
p {
color: #333;
font-family:'Helvetica', 'Arial', 'Sans-Serif';
}
footer {
border-top: 1px solid #d5d5d5;
font-size: .8em;
}
ul.posts {
margin: 20px auto 40px;
}
ul.posts li {
list-style: none;
}
.post {
border-bottom: 1px solid black !important;
}
|
Add horizontal divider between posts
|
Add horizontal divider between posts
|
CSS
|
mit
|
yviedev/yviedev.github.io,yviedev/yviedev.github.io,yviedev/yviedev.github.io,yviedev/yviedev.github.io
|
css
|
## Code Before:
body {
margin: 60px auto;
width: 70%;
}
nav ul, footer ul {
font-family:'Helvetica', 'Arial', 'Sans-Serif';
padding: 0px;
list-style: none;
font-weight: bold;
font-size: .9em;
color: red;
}
nav ul li, footer ul li {
display: inline;
margin-right: 20px;
}
a {
text-decoration: none;
color: red;
}
a:hover {
text-decoration: underline;
color: purple;
}
h1 {
font-size: 1.7em;
font-family:'Helvetica', 'Arial', 'Sans-Serif';
}
h2 {
font-size: 1.2em;
font-family:'Helvetica', 'Arial', 'Sans-Serif';
h3 {
font-size: 1em;
font-family:'Helvetica', 'Arial', 'Sans-Serif';
}
p {
color: #333;
font-family:'Helvetica', 'Arial', 'Sans-Serif';
}
footer {
border-top: 1px solid #d5d5d5;
font-size: .8em;
}
ul.posts {
margin: 20px auto 40px;
}
ul.posts li {
list-style: none;
}
## Instruction:
Add horizontal divider between posts
## Code After:
body {
margin: 60px auto;
width: 70%;
}
nav ul, footer ul {
font-family:'Helvetica', 'Arial', 'Sans-Serif';
padding: 0px;
list-style: none;
font-weight: bold;
font-size: .9em;
color: red;
}
nav ul li, footer ul li {
display: inline;
margin-right: 20px;
}
a {
text-decoration: none;
color: red;
}
a:hover {
text-decoration: underline;
color: purple;
}
h1 {
font-size: 1.7em;
font-family:'Helvetica', 'Arial', 'Sans-Serif';
}
h2 {
font-size: 1.2em;
font-family:'Helvetica', 'Arial', 'Sans-Serif';
h3 {
font-size: 1em;
font-family:'Helvetica', 'Arial', 'Sans-Serif';
}
p {
color: #333;
font-family:'Helvetica', 'Arial', 'Sans-Serif';
}
footer {
border-top: 1px solid #d5d5d5;
font-size: .8em;
}
ul.posts {
margin: 20px auto 40px;
}
ul.posts li {
list-style: none;
}
.post {
border-bottom: 1px solid black !important;
}
|
551ab867bb88315683d972cecd31b3d22b16712c
|
tox.ini
|
tox.ini
|
[tox]
envlist = py35,docs
[testenv]
deps =
--no-deps
--requirement
{toxinidir}/requirements-dev.txt
commands = paver test_all
[testenv:docs]
basepython = python
commands = paver doc_html
[flake8]
exclude = docs/*,venv/*
|
[tox]
envlist = py35,docs
[testenv]
deps =
--no-deps
--requirement
{toxinidir}/requirements-dev.txt
commands = paver test_all
[testenv:docs]
basepython = python
commands = paver doc_html
[flake8]
exclude = docs/*,venv/*,pavement.py
|
Exclude pavement.py from style checks.
|
Exclude pavement.py from style checks.
|
INI
|
mit
|
abalkin/tz
|
ini
|
## Code Before:
[tox]
envlist = py35,docs
[testenv]
deps =
--no-deps
--requirement
{toxinidir}/requirements-dev.txt
commands = paver test_all
[testenv:docs]
basepython = python
commands = paver doc_html
[flake8]
exclude = docs/*,venv/*
## Instruction:
Exclude pavement.py from style checks.
## Code After:
[tox]
envlist = py35,docs
[testenv]
deps =
--no-deps
--requirement
{toxinidir}/requirements-dev.txt
commands = paver test_all
[testenv:docs]
basepython = python
commands = paver doc_html
[flake8]
exclude = docs/*,venv/*,pavement.py
|
962d817864dfe0808d7ba530239a8eeb4071b1a5
|
.vscode/extensions.json
|
.vscode/extensions.json
|
{
// See https://go.microsoft.com/fwlink/?LinkId=827846 to learn about workspace recommendations.
// Extension identifier format: ${publisher}.${name}. Example: vscode.csharp
// List of extensions which should be recommended for users of this workspace.,
"recommendations": [
"dbaeumer.vscode-eslint",
"jasonnutter.vscode-codeowners",
"github.vscode-pull-request-github",
"ms-azure-devops.azure-pipelines",
"ziyasal.vscode-open-in-github",
"christian-kohler.npm-intellisense",
"ryanluker.vscode-coverage-gutters"
],
// List of extensions recommended by VS Code that should not be recommended for users of this workspace.
"unwantedRecommendations": [
]
}
|
{
// See https://go.microsoft.com/fwlink/?LinkId=827846 to learn about workspace recommendations.
// Extension identifier format: ${publisher}.${name}. Example: vscode.csharp
// List of extensions which should be recommended for users of this workspace.,
"recommendations": [
"dbaeumer.vscode-eslint",
"jasonnutter.vscode-codeowners",
"github.vscode-pull-request-github",
"ms-azure-devops.azure-pipelines",
"ziyasal.vscode-open-in-github",
"christian-kohler.npm-intellisense",
"ryanluker.vscode-coverage-gutters",
"mikestead.dotenv"
],
// List of extensions recommended by VS Code that should not be recommended for users of this workspace.
"unwantedRecommendations": [
]
}
|
Add dotenv vscode extension to recommendations
|
Add dotenv vscode extension to recommendations
|
JSON
|
mit
|
AzureAD/microsoft-authentication-library-for-js,AzureAD/microsoft-authentication-library-for-js,AzureAD/microsoft-authentication-library-for-js,AzureAD/microsoft-authentication-library-for-js,AzureAD/microsoft-authentication-library-for-js,AzureAD/microsoft-authentication-library-for-js,AzureAD/microsoft-authentication-library-for-js,AzureAD/microsoft-authentication-library-for-js
|
json
|
## Code Before:
{
// See https://go.microsoft.com/fwlink/?LinkId=827846 to learn about workspace recommendations.
// Extension identifier format: ${publisher}.${name}. Example: vscode.csharp
// List of extensions which should be recommended for users of this workspace.,
"recommendations": [
"dbaeumer.vscode-eslint",
"jasonnutter.vscode-codeowners",
"github.vscode-pull-request-github",
"ms-azure-devops.azure-pipelines",
"ziyasal.vscode-open-in-github",
"christian-kohler.npm-intellisense",
"ryanluker.vscode-coverage-gutters"
],
// List of extensions recommended by VS Code that should not be recommended for users of this workspace.
"unwantedRecommendations": [
]
}
## Instruction:
Add dotenv vscode extension to recommendations
## Code After:
{
// See https://go.microsoft.com/fwlink/?LinkId=827846 to learn about workspace recommendations.
// Extension identifier format: ${publisher}.${name}. Example: vscode.csharp
// List of extensions which should be recommended for users of this workspace.,
"recommendations": [
"dbaeumer.vscode-eslint",
"jasonnutter.vscode-codeowners",
"github.vscode-pull-request-github",
"ms-azure-devops.azure-pipelines",
"ziyasal.vscode-open-in-github",
"christian-kohler.npm-intellisense",
"ryanluker.vscode-coverage-gutters",
"mikestead.dotenv"
],
// List of extensions recommended by VS Code that should not be recommended for users of this workspace.
"unwantedRecommendations": [
]
}
|
5f46a311c8b26ab6379657bb741804f8a348fa82
|
README.md
|
README.md
|
Tips and notes I find useful for me
|
Tips and notes I find useful for me
## License
[](https://creativecommons.org/publicdomain/zero/1.0/)
|
Add Creative Commons license logo
|
Add Creative Commons license logo
|
Markdown
|
cc0-1.0
|
erictleung/programming-notes,erictleung/programming-notes,erictleung/programming-notes,erictleung/programming-notes,erictleung/programming-notes,erictleung/programming-notes,erictleung/programming-notes,erictleung/programming-notes,erictleung/programming-notes
|
markdown
|
## Code Before:
Tips and notes I find useful for me
## Instruction:
Add Creative Commons license logo
## Code After:
Tips and notes I find useful for me
## License
[](https://creativecommons.org/publicdomain/zero/1.0/)
|
882a77236113c62a94982086482a583514965e1f
|
common-functions.sh
|
common-functions.sh
|
set -eu
# Escape code
esc=$(echo -en "\033")
info="${esc}[0;33m"
normal=$(echo -en "${esc}[m\017")
java -version 2>&1 | grep -q '"9' || (echo "**** Fix your PATH! ****" && java -version && exit 1)
runTree()
{
if [[ "$OSTYPE" == "cygwin" ]] || [[ "$OSTYPE" == "msys" ]] ; then
cmd //c "tree /f /a $1"
else
tree -fl $1
fi
}
|
set -eu
# Escape code
esc=$(echo -en "\033")
info="${esc}[0;33m"
normal=$(echo -en "${esc}[m\017")
java -version 2>&1 | grep -q '"9' || (echo "**** Fix your PATH! ****" && java -version && exit 1)
javac -version 2>&1 | grep -q '9' || (echo "**** Fix your PATH! Cannot find command 'javac' (version 9) ****" && javac -version && exit 1)
runTree()
{
if [[ "$OSTYPE" == "cygwin" ]] || [[ "$OSTYPE" == "msys" ]] ; then
cmd //c "tree /f /a $1"
else
tree -fl $1
fi
}
|
Update the script that adds javac version checking
|
Update the script that adds javac version checking
|
Shell
|
cc0-1.0
|
neomatrix369/jdk9-jigsaw,AdoptOpenJDK/jdk9-jigsaw,AdoptOpenJDK/jdk9-jigsaw,AdoptOpenJDK/jdk9-jigsaw,neomatrix369/jdk9-jigsaw,neomatrix369/jdk9-jigsaw
|
shell
|
## Code Before:
set -eu
# Escape code
esc=$(echo -en "\033")
info="${esc}[0;33m"
normal=$(echo -en "${esc}[m\017")
java -version 2>&1 | grep -q '"9' || (echo "**** Fix your PATH! ****" && java -version && exit 1)
runTree()
{
if [[ "$OSTYPE" == "cygwin" ]] || [[ "$OSTYPE" == "msys" ]] ; then
cmd //c "tree /f /a $1"
else
tree -fl $1
fi
}
## Instruction:
Update the script that adds javac version checking
## Code After:
set -eu
# Escape code
esc=$(echo -en "\033")
info="${esc}[0;33m"
normal=$(echo -en "${esc}[m\017")
java -version 2>&1 | grep -q '"9' || (echo "**** Fix your PATH! ****" && java -version && exit 1)
javac -version 2>&1 | grep -q '9' || (echo "**** Fix your PATH! Cannot find command 'javac' (version 9) ****" && javac -version && exit 1)
runTree()
{
if [[ "$OSTYPE" == "cygwin" ]] || [[ "$OSTYPE" == "msys" ]] ; then
cmd //c "tree /f /a $1"
else
tree -fl $1
fi
}
|
d3b4fd52d0fcd61e4dbdb5ba30d1030a4e04198b
|
src/components/VmDisks/style.css
|
src/components/VmDisks/style.css
|
.disks-ul {
list-style-type: none;
display: inline;
}
.smaller {
font-size: 75%;
}
|
.disks-ul {
list-style-type: none;
display: inline;
padding: 0;
}
.smaller {
font-size: 75%;
}
|
Remove padding in disks list in VmDetail
|
Remove padding in disks list in VmDetail
|
CSS
|
apache-2.0
|
matobet/ovirt-ui-components,matobet/ovirt-ui-components
|
css
|
## Code Before:
.disks-ul {
list-style-type: none;
display: inline;
}
.smaller {
font-size: 75%;
}
## Instruction:
Remove padding in disks list in VmDetail
## Code After:
.disks-ul {
list-style-type: none;
display: inline;
padding: 0;
}
.smaller {
font-size: 75%;
}
|
139e6acc19040d89f304875c533513c9651f2906
|
budget_proj/budget_app/filters.py
|
budget_proj/budget_app/filters.py
|
from django.db.models import CharField
from django_filters import rest_framework as filters
from . import models
class DefaultFilterMeta:
"""
Set our default Filter configurations to DRY up the FilterSet Meta classes.
"""
# Let us filter by all fields except id
exclude = ('id',)
# We prefer case insensitive matching on CharFields
filter_overrides = {
CharField: {
'filter_class': filters.CharFilter,
'extra': lambda f: {
'lookup_expr': 'iexact',
},
},
}
class OcrbFilter(filters.FilterSet):
class Meta(DefaultFilterMeta):
model = models.OCRB
class KpmFilter(filters.FilterSet):
class Meta(DefaultFilterMeta):
model = models.KPM
class BudgetHistoryFilter(filters.FilterSet):
class Meta(DefaultFilterMeta):
model = models.BudgetHistory
class LookupCodeFilter(filters.FilterSet):
class Meta(DefaultFilterMeta):
model = models.LookupCode
|
from django.db.models import CharField
from django_filters import rest_framework as filters
from . import models
class CustomFilterBase(filters.FilterSet):
"""
Extends Filterset to populate help_text from the associated model field.
Works with swagger but not the builtin docs.
"""
@classmethod
def filter_for_field(cls, f, name, lookup_expr):
result = super().filter_for_field(f, name, lookup_expr)
if 'help_text' not in result.extra:
result.extra['help_text'] = f.help_text
return result
class DefaultFilterMeta:
"""
Defaults for:
- enable filtering by all model fields except `id`
- ignoring upper/lowercase when on CharFields
"""
# Let us filter by all fields except id
exclude = ('id',)
# We prefer case insensitive matching on CharFields
filter_overrides = {
CharField: {
'filter_class': filters.CharFilter,
'extra': lambda f: {
'lookup_expr': 'iexact',
},
},
}
class OcrbFilter(CustomFilterBase):
class Meta(DefaultFilterMeta):
model = models.OCRB
class KpmFilter(CustomFilterBase):
class Meta(DefaultFilterMeta):
model = models.KPM
class BudgetHistoryFilter(CustomFilterBase):
class Meta(DefaultFilterMeta):
model = models.BudgetHistory
class LookupCodeFilter(CustomFilterBase):
class Meta(DefaultFilterMeta):
model = models.LookupCode
|
Upgrade Filters fields to use docs from model fields
|
Upgrade Filters fields to use docs from model fields
|
Python
|
mit
|
jimtyhurst/team-budget,hackoregon/team-budget,hackoregon/team-budget,hackoregon/team-budget,jimtyhurst/team-budget,jimtyhurst/team-budget
|
python
|
## Code Before:
from django.db.models import CharField
from django_filters import rest_framework as filters
from . import models
class DefaultFilterMeta:
"""
Set our default Filter configurations to DRY up the FilterSet Meta classes.
"""
# Let us filter by all fields except id
exclude = ('id',)
# We prefer case insensitive matching on CharFields
filter_overrides = {
CharField: {
'filter_class': filters.CharFilter,
'extra': lambda f: {
'lookup_expr': 'iexact',
},
},
}
class OcrbFilter(filters.FilterSet):
class Meta(DefaultFilterMeta):
model = models.OCRB
class KpmFilter(filters.FilterSet):
class Meta(DefaultFilterMeta):
model = models.KPM
class BudgetHistoryFilter(filters.FilterSet):
class Meta(DefaultFilterMeta):
model = models.BudgetHistory
class LookupCodeFilter(filters.FilterSet):
class Meta(DefaultFilterMeta):
model = models.LookupCode
## Instruction:
Upgrade Filters fields to use docs from model fields
## Code After:
from django.db.models import CharField
from django_filters import rest_framework as filters
from . import models
class CustomFilterBase(filters.FilterSet):
"""
Extends Filterset to populate help_text from the associated model field.
Works with swagger but not the builtin docs.
"""
@classmethod
def filter_for_field(cls, f, name, lookup_expr):
result = super().filter_for_field(f, name, lookup_expr)
if 'help_text' not in result.extra:
result.extra['help_text'] = f.help_text
return result
class DefaultFilterMeta:
"""
Defaults for:
- enable filtering by all model fields except `id`
- ignoring upper/lowercase when on CharFields
"""
# Let us filter by all fields except id
exclude = ('id',)
# We prefer case insensitive matching on CharFields
filter_overrides = {
CharField: {
'filter_class': filters.CharFilter,
'extra': lambda f: {
'lookup_expr': 'iexact',
},
},
}
class OcrbFilter(CustomFilterBase):
class Meta(DefaultFilterMeta):
model = models.OCRB
class KpmFilter(CustomFilterBase):
class Meta(DefaultFilterMeta):
model = models.KPM
class BudgetHistoryFilter(CustomFilterBase):
class Meta(DefaultFilterMeta):
model = models.BudgetHistory
class LookupCodeFilter(CustomFilterBase):
class Meta(DefaultFilterMeta):
model = models.LookupCode
|
28607d4d8237d0b7b536b9e5ab95a1fc8bd5abf0
|
index.html
|
index.html
|
---
layout: default
title: Civic Hacking in Burlington, VT
---
<header id="overview" class="code-for-btv">
<h1>Code for BTV</h1>
<h2>A Code for America Brigade</h2>
<p>Join over 50 developers, technology integrators, designers and hackers of all types in building and maintaining <a href="#projects">civic software and open data projects</a> in the greater Burlington, VT area.</p>
</header>
<section class="next">
<article>
<p>Our next event is in Burlington on January 11. <a href="#code-for-btv-meetup" class="button">Join Us</a></p>
</article>
</section>
<section id="code-for-btv" class="vevent">
<h1>Stay Involved</h1>
<article id="code-for-btv-events">
<h1>Code for BTV</h1>
<p>Code for BTV is an ongoing effort to support civic hacking in the greater Burlington, VT area.</p>
<p>Periodically, we'll host events for hackers to come together and work on building and reusing civic apps and supporting open data initiatives.</p>
<nav>
<ul>
<li><strong>Stay in the loop about future events</strong> with our <a href="http://eepurl.com/zjEWf">newsletter</a> or follow <a href="https://twitter.com/CodeForBTV">@CodeForBTV</a>.</li>
</ul>
</nav>
</article>
<article id="code-for-btv-meetup">
<h1 class="summary"><abbr title="Code for BTV Meetup">Meetup</abbr></h1>
<p class="description">Participate in the next Code for BTV meetup. Participants will continue working on the <a href="#projects">civic hacking projects</a> that are actively being developed and/or maintained.</p>
<p class="time"><time class="dtstart" datetime="2014-01-11T13:00-04:00">Saturday, January 11, 2014 1pm</time>-<time class="dtend" datetime="2014-01-11T17:00-04:00">5pm</time></p>
<p class="location">Office Squared, 110 Main Street (Second Floor), Burlington, VT 05401</p>
</article>
</section>
|
---
layout: default
title: Civic Hacking in Burlington, VT
---
<header id="overview" class="code-for-btv">
<h1>Code for BTV</h1>
<h2>A Code for America Brigade</h2>
<p>Join over 50 developers, technology integrators, designers and hackers of all types in building and maintaining <a href="#projects">civic software and open data projects</a> in the greater Burlington, VT area.</p>
</header>
|
Remove sections about upcoming event.
|
Remove sections about upcoming event.
|
HTML
|
bsd-3-clause
|
nfloersch/codeforbtv.github.com,nfloersch/codeforbtv.github.com,nfloersch/codeforbtv.github.com
|
html
|
## Code Before:
---
layout: default
title: Civic Hacking in Burlington, VT
---
<header id="overview" class="code-for-btv">
<h1>Code for BTV</h1>
<h2>A Code for America Brigade</h2>
<p>Join over 50 developers, technology integrators, designers and hackers of all types in building and maintaining <a href="#projects">civic software and open data projects</a> in the greater Burlington, VT area.</p>
</header>
<section class="next">
<article>
<p>Our next event is in Burlington on January 11. <a href="#code-for-btv-meetup" class="button">Join Us</a></p>
</article>
</section>
<section id="code-for-btv" class="vevent">
<h1>Stay Involved</h1>
<article id="code-for-btv-events">
<h1>Code for BTV</h1>
<p>Code for BTV is an ongoing effort to support civic hacking in the greater Burlington, VT area.</p>
<p>Periodically, we'll host events for hackers to come together and work on building and reusing civic apps and supporting open data initiatives.</p>
<nav>
<ul>
<li><strong>Stay in the loop about future events</strong> with our <a href="http://eepurl.com/zjEWf">newsletter</a> or follow <a href="https://twitter.com/CodeForBTV">@CodeForBTV</a>.</li>
</ul>
</nav>
</article>
<article id="code-for-btv-meetup">
<h1 class="summary"><abbr title="Code for BTV Meetup">Meetup</abbr></h1>
<p class="description">Participate in the next Code for BTV meetup. Participants will continue working on the <a href="#projects">civic hacking projects</a> that are actively being developed and/or maintained.</p>
<p class="time"><time class="dtstart" datetime="2014-01-11T13:00-04:00">Saturday, January 11, 2014 1pm</time>-<time class="dtend" datetime="2014-01-11T17:00-04:00">5pm</time></p>
<p class="location">Office Squared, 110 Main Street (Second Floor), Burlington, VT 05401</p>
</article>
</section>
## Instruction:
Remove sections about upcoming event.
## Code After:
---
layout: default
title: Civic Hacking in Burlington, VT
---
<header id="overview" class="code-for-btv">
<h1>Code for BTV</h1>
<h2>A Code for America Brigade</h2>
<p>Join over 50 developers, technology integrators, designers and hackers of all types in building and maintaining <a href="#projects">civic software and open data projects</a> in the greater Burlington, VT area.</p>
</header>
|
6a4331e829b79f6551f700572c94cac955ffae75
|
indexer.ini
|
indexer.ini
|
[program:indexer]
directory=/var/index-service
command=/var/index-service/venv/bin/python index_service.py
stdout_logfile=/var/log/indexer-stdout.log
autostart=true
autorestart=true
startretries=3
|
[program:indexer]
directory=/var/index-service
command=/var/index-service/venv/bin/python index_service.py
stdout_logfile=/var/log/indexer-stdout.log
stderr_logfile=/var/log/indexer-stderr.log
autostart=true
autorestart=true
startretries=3
|
Make sure that all errors generated by the service is logged.
|
Make sure that all errors generated by the service is logged.
|
INI
|
mit
|
microserv/index-service
|
ini
|
## Code Before:
[program:indexer]
directory=/var/index-service
command=/var/index-service/venv/bin/python index_service.py
stdout_logfile=/var/log/indexer-stdout.log
autostart=true
autorestart=true
startretries=3
## Instruction:
Make sure that all errors generated by the service is logged.
## Code After:
[program:indexer]
directory=/var/index-service
command=/var/index-service/venv/bin/python index_service.py
stdout_logfile=/var/log/indexer-stdout.log
stderr_logfile=/var/log/indexer-stderr.log
autostart=true
autorestart=true
startretries=3
|
8fc9bb7f764a3b1eae032d93c48d31e69c7068d9
|
notifier/templates/digest-email.txt
|
notifier/templates/digest-email.txt
|
* {{ title }} *
{{ description }}
---
Hi {{ user.name }},
You have {{thread_count}} discussion thread{% if thread_count > 1 %}s{% endif %} with updates {% if course_count > 1 %}across{% else %}in{% endif %} {{course_names}}. The most recent highlights are shown below. As a reminder, you can turn off all discussion digests from any course's Discussion Home page.
{% for course in digest.courses %}
[{{ course.title }}]
{% for thread in course.threads %}
{{ thread.title }}
---
{% for item in thread.items %}
{{ item.body }}
- {{ item.author }} on {{ item.dt }} UTC
{% endfor %}
{% endfor %}
{% endfor %}
To unsubscribe from this list, go to: {{ unsubscribe_url }}
To update your subscription preferences, go to: {{ preferences_url }}
|
* {{ title }} *
{{ description }}
---
Hi {{ user.name }},
You have {{thread_count}} discussion thread{% if thread_count > 1 %}s{% endif %} with updates {% if course_count > 1 %}across{% else %}in{% endif %} {{course_names}}. The most recent highlights are shown below. As a reminder, you can turn off all discussion digests from any course's Discussion Home page.
{% for course in digest.courses %}
[{{ course.title }}]
{% for thread in course.threads %}
{{ thread.title }}
---
{% for item in thread.items %}
{{ item.author }}: on {{ item.dt }} UTC
{{ item.body }}
{% endfor %}
{% endfor %}
{% endfor %}
If you would like to stop receiving these updates, you can turn off all Course Discussion digests from any course's Discussion Home page. You can also quickly turn off these notifications by going to {{unsubscribe_url}}.
edX, 11 Cambridge Center, Cambridge, MA 02142
|
Update plaintext email template to match HTML
|
Update plaintext email template to match HTML
|
Text
|
agpl-3.0
|
edx/notifier,Stanford-Online/notifier,EDUlib/notifier,edx/notifier,EDUlib/notifier,Stanford-Online/notifier,nttks/notifier
|
text
|
## Code Before:
* {{ title }} *
{{ description }}
---
Hi {{ user.name }},
You have {{thread_count}} discussion thread{% if thread_count > 1 %}s{% endif %} with updates {% if course_count > 1 %}across{% else %}in{% endif %} {{course_names}}. The most recent highlights are shown below. As a reminder, you can turn off all discussion digests from any course's Discussion Home page.
{% for course in digest.courses %}
[{{ course.title }}]
{% for thread in course.threads %}
{{ thread.title }}
---
{% for item in thread.items %}
{{ item.body }}
- {{ item.author }} on {{ item.dt }} UTC
{% endfor %}
{% endfor %}
{% endfor %}
To unsubscribe from this list, go to: {{ unsubscribe_url }}
To update your subscription preferences, go to: {{ preferences_url }}
## Instruction:
Update plaintext email template to match HTML
## Code After:
* {{ title }} *
{{ description }}
---
Hi {{ user.name }},
You have {{thread_count}} discussion thread{% if thread_count > 1 %}s{% endif %} with updates {% if course_count > 1 %}across{% else %}in{% endif %} {{course_names}}. The most recent highlights are shown below. As a reminder, you can turn off all discussion digests from any course's Discussion Home page.
{% for course in digest.courses %}
[{{ course.title }}]
{% for thread in course.threads %}
{{ thread.title }}
---
{% for item in thread.items %}
{{ item.author }}: on {{ item.dt }} UTC
{{ item.body }}
{% endfor %}
{% endfor %}
{% endfor %}
If you would like to stop receiving these updates, you can turn off all Course Discussion digests from any course's Discussion Home page. You can also quickly turn off these notifications by going to {{unsubscribe_url}}.
edX, 11 Cambridge Center, Cambridge, MA 02142
|
c6bc46466f1e3789c91e6b8eabab772cac42710f
|
postgres-docker/update_common.sh
|
postgres-docker/update_common.sh
|
curl -o common.sh https://raw.githubusercontent.com/sclorg/postgresql-container/master/9.4/root/usr/share/container-scripts/postgresql/common.sh && patch common.sh < multiple_dbs.patch
|
curl -o common.sh https://raw.githubusercontent.com/sclorg/postgresql-container/master/9.4/root/usr/share/container-scripts/postgresql/common.sh && patch < multiple_dbs.patch
|
Remove specifing of file to patch
|
[Postgres] Remove specifing of file to patch
|
Shell
|
apache-2.0
|
jpopelka/fabric8-analytics-common,jpopelka/fabric8-analytics-common,tisnik/fabric8-analytics-common,tisnik/fabric8-analytics-common,jpopelka/fabric8-analytics-common,tisnik/fabric8-analytics-common
|
shell
|
## Code Before:
curl -o common.sh https://raw.githubusercontent.com/sclorg/postgresql-container/master/9.4/root/usr/share/container-scripts/postgresql/common.sh && patch common.sh < multiple_dbs.patch
## Instruction:
[Postgres] Remove specifing of file to patch
## Code After:
curl -o common.sh https://raw.githubusercontent.com/sclorg/postgresql-container/master/9.4/root/usr/share/container-scripts/postgresql/common.sh && patch < multiple_dbs.patch
|
925706f9e892eb1fbdf4e08a954acb193886bb70
|
tasks/browser_extension.js
|
tasks/browser_extension.js
|
/*
* grunt-browser-extension
* https://github.com/addmitriev/grunt-browser-extension
*
* Copyright (c) 2015 Aleksey Dmitriev
* Licensed under the MIT license.
*/
'use strict';
var util = require('util');
var path = require('path');
var fs = require('fs-extra');
module.exports = function (grunt) {
var BrowserExtension = require('./lib/browser-extension')(grunt);
grunt.registerMultiTask('browser_extension', 'Grunt plugin to create any browser website extension', function () {
if(this.target){
var options = this.options();
var required_options = [];
for(var required_options_id in required_options){
if(required_options_id){
var required_option = required_options[required_options_id];
if(!options[required_option]){
grunt.fail.fatal("Please set up all required options. All options must be string value! You have not setted " + required_option);
}
}
}
var pluginRoot = path.join(path.dirname(fs.realpathSync(__filename)), '../');
var bExt = new BrowserExtension(pluginRoot, options, this.target, grunt);
bExt.copyUserFiles();
grunt.verbose.ok('User files copied');
bExt.copyBrowserFiles();
grunt.verbose.ok('Extension files copied');
bExt.buildNsisIE();
grunt.verbose.ok('NSIS installer for IE builded');
bExt.build();
grunt.verbose.ok('Extensions builded');
}
});
};
|
/*
* grunt-browser-extension
* https://github.com/addmitriev/grunt-browser-extension
*
* Copyright (c) 2015 Aleksey Dmitriev
* Licensed under the MIT license.
*/
'use strict';
var util = require('util');
var path = require('path');
var fs = require('fs-extra');
module.exports = function (grunt) {
var BrowserExtension = require('./lib/browser-extension')(grunt);
grunt.registerMultiTask('browser_extension', 'Grunt plugin to create any browser website extension', function () {
if(this.target){
var options = this.options();
var required_options = [];
for(var required_options_id in required_options){
if(required_options_id){
var required_option = required_options[required_options_id];
if(!util.isString(options[required_option])){
grunt.fail.fatal("Please set up all required options. All options must be string value! You have not setted " + required_option);
}
}
}
var pluginRoot = path.join(path.dirname(fs.realpathSync(__filename)), '../');
var bExt = new BrowserExtension(pluginRoot, options, this.target, grunt);
bExt.copyUserFiles();
grunt.verbose.ok('User files copied');
bExt.copyBrowserFiles();
grunt.verbose.ok('Extension files copied');
bExt.buildNsisIE();
grunt.verbose.ok('NSIS installer for IE builded');
bExt.build();
grunt.verbose.ok('Extensions builded');
}
});
};
|
Check config and show what required options not exists
|
Check config and show what required options not exists
|
JavaScript
|
mit
|
Tuguusl/grunt-browser-extension,Tuguusl/grunt-browser-extension
|
javascript
|
## Code Before:
/*
* grunt-browser-extension
* https://github.com/addmitriev/grunt-browser-extension
*
* Copyright (c) 2015 Aleksey Dmitriev
* Licensed under the MIT license.
*/
'use strict';
var util = require('util');
var path = require('path');
var fs = require('fs-extra');
module.exports = function (grunt) {
var BrowserExtension = require('./lib/browser-extension')(grunt);
grunt.registerMultiTask('browser_extension', 'Grunt plugin to create any browser website extension', function () {
if(this.target){
var options = this.options();
var required_options = [];
for(var required_options_id in required_options){
if(required_options_id){
var required_option = required_options[required_options_id];
if(!options[required_option]){
grunt.fail.fatal("Please set up all required options. All options must be string value! You have not setted " + required_option);
}
}
}
var pluginRoot = path.join(path.dirname(fs.realpathSync(__filename)), '../');
var bExt = new BrowserExtension(pluginRoot, options, this.target, grunt);
bExt.copyUserFiles();
grunt.verbose.ok('User files copied');
bExt.copyBrowserFiles();
grunt.verbose.ok('Extension files copied');
bExt.buildNsisIE();
grunt.verbose.ok('NSIS installer for IE builded');
bExt.build();
grunt.verbose.ok('Extensions builded');
}
});
};
## Instruction:
Check config and show what required options not exists
## Code After:
/*
* grunt-browser-extension
* https://github.com/addmitriev/grunt-browser-extension
*
* Copyright (c) 2015 Aleksey Dmitriev
* Licensed under the MIT license.
*/
'use strict';
var util = require('util');
var path = require('path');
var fs = require('fs-extra');
module.exports = function (grunt) {
var BrowserExtension = require('./lib/browser-extension')(grunt);
grunt.registerMultiTask('browser_extension', 'Grunt plugin to create any browser website extension', function () {
if(this.target){
var options = this.options();
var required_options = [];
for(var required_options_id in required_options){
if(required_options_id){
var required_option = required_options[required_options_id];
if(!util.isString(options[required_option])){
grunt.fail.fatal("Please set up all required options. All options must be string value! You have not setted " + required_option);
}
}
}
var pluginRoot = path.join(path.dirname(fs.realpathSync(__filename)), '../');
var bExt = new BrowserExtension(pluginRoot, options, this.target, grunt);
bExt.copyUserFiles();
grunt.verbose.ok('User files copied');
bExt.copyBrowserFiles();
grunt.verbose.ok('Extension files copied');
bExt.buildNsisIE();
grunt.verbose.ok('NSIS installer for IE builded');
bExt.build();
grunt.verbose.ok('Extensions builded');
}
});
};
|
f34a5d682832749dbf0011d162bf4c7c18892b45
|
zerver/apps.py
|
zerver/apps.py
|
import logging
from typing import Any, Dict
from django.apps import AppConfig
from django.conf import settings
from django.core.cache import cache
from django.db.models.signals import post_migrate
def flush_cache(sender: AppConfig, **kwargs: Any) -> None:
logging.info("Clearing memcached cache after migrations")
cache.clear()
class ZerverConfig(AppConfig):
name = "zerver" # type: str
def ready(self) -> None:
import zerver.signals
if settings.POST_MIGRATION_CACHE_FLUSHING:
post_migrate.connect(flush_cache, sender=self)
|
import logging
from typing import Any, Dict
from django.apps import AppConfig
from django.conf import settings
from django.core.cache import cache
from django.db.models.signals import post_migrate
def flush_cache(sender: AppConfig, **kwargs: Any) -> None:
logging.info("Clearing memcached cache after migrations")
cache.clear()
class ZerverConfig(AppConfig):
name = "zerver" # type: str
def ready(self) -> None:
# We import zerver.signals here for the side effect of
# registering the user_logged_in signal receiver. This import
# needs to be here (rather than e.g. at top-of-file) to avoid
# running that code too early in Django's setup process, but
# in any case, this is an intentionally unused import.
import zerver.signals
if settings.POST_MIGRATION_CACHE_FLUSHING:
post_migrate.connect(flush_cache, sender=self)
|
Document the weird unused import for signal registration.
|
signals: Document the weird unused import for signal registration.
|
Python
|
apache-2.0
|
timabbott/zulip,tommyip/zulip,zulip/zulip,andersk/zulip,andersk/zulip,eeshangarg/zulip,kou/zulip,eeshangarg/zulip,eeshangarg/zulip,showell/zulip,brainwane/zulip,andersk/zulip,rishig/zulip,synicalsyntax/zulip,andersk/zulip,tommyip/zulip,hackerkid/zulip,kou/zulip,zulip/zulip,showell/zulip,eeshangarg/zulip,shubhamdhama/zulip,rishig/zulip,brainwane/zulip,rht/zulip,timabbott/zulip,timabbott/zulip,shubhamdhama/zulip,andersk/zulip,rht/zulip,brainwane/zulip,punchagan/zulip,hackerkid/zulip,zulip/zulip,brainwane/zulip,hackerkid/zulip,showell/zulip,kou/zulip,kou/zulip,andersk/zulip,brainwane/zulip,rht/zulip,zulip/zulip,kou/zulip,zulip/zulip,shubhamdhama/zulip,punchagan/zulip,timabbott/zulip,brainwane/zulip,tommyip/zulip,punchagan/zulip,shubhamdhama/zulip,hackerkid/zulip,punchagan/zulip,synicalsyntax/zulip,synicalsyntax/zulip,hackerkid/zulip,synicalsyntax/zulip,showell/zulip,rht/zulip,rishig/zulip,rishig/zulip,showell/zulip,kou/zulip,synicalsyntax/zulip,punchagan/zulip,rht/zulip,eeshangarg/zulip,eeshangarg/zulip,rishig/zulip,timabbott/zulip,tommyip/zulip,shubhamdhama/zulip,zulip/zulip,timabbott/zulip,eeshangarg/zulip,hackerkid/zulip,rishig/zulip,rishig/zulip,tommyip/zulip,tommyip/zulip,shubhamdhama/zulip,zulip/zulip,hackerkid/zulip,kou/zulip,shubhamdhama/zulip,andersk/zulip,showell/zulip,timabbott/zulip,rht/zulip,rht/zulip,synicalsyntax/zulip,synicalsyntax/zulip,brainwane/zulip,tommyip/zulip,punchagan/zulip,showell/zulip,punchagan/zulip
|
python
|
## Code Before:
import logging
from typing import Any, Dict
from django.apps import AppConfig
from django.conf import settings
from django.core.cache import cache
from django.db.models.signals import post_migrate
def flush_cache(sender: AppConfig, **kwargs: Any) -> None:
logging.info("Clearing memcached cache after migrations")
cache.clear()
class ZerverConfig(AppConfig):
name = "zerver" # type: str
def ready(self) -> None:
import zerver.signals
if settings.POST_MIGRATION_CACHE_FLUSHING:
post_migrate.connect(flush_cache, sender=self)
## Instruction:
signals: Document the weird unused import for signal registration.
## Code After:
import logging
from typing import Any, Dict
from django.apps import AppConfig
from django.conf import settings
from django.core.cache import cache
from django.db.models.signals import post_migrate
def flush_cache(sender: AppConfig, **kwargs: Any) -> None:
logging.info("Clearing memcached cache after migrations")
cache.clear()
class ZerverConfig(AppConfig):
name = "zerver" # type: str
def ready(self) -> None:
# We import zerver.signals here for the side effect of
# registering the user_logged_in signal receiver. This import
# needs to be here (rather than e.g. at top-of-file) to avoid
# running that code too early in Django's setup process, but
# in any case, this is an intentionally unused import.
import zerver.signals
if settings.POST_MIGRATION_CACHE_FLUSHING:
post_migrate.connect(flush_cache, sender=self)
|
9cb1d45196b7e36055396fd6e2c661cb8dce5eb6
|
docs/source/releases.rst
|
docs/source/releases.rst
|
======================
Release Notes
======================
v0.1
-------
* TODO
|
======================
Release Notes
======================
Release 0.2
-----------
* Brings in dynd as a required dependency
for in-memory data.
Release 0.1
-----------
* Initial preview release
|
Add some minimal release notes
|
Add some minimal release notes
|
reStructuredText
|
bsd-3-clause
|
xlhtc007/blaze,mwiebe/blaze,ContinuumIO/blaze,ChinaQuants/blaze,nkhuyu/blaze,cowlicks/blaze,FrancescAlted/blaze,maxalbert/blaze,caseyclements/blaze,jcrist/blaze,mwiebe/blaze,aterrel/blaze,AbhiAgarwal/blaze,ContinuumIO/blaze,nkhuyu/blaze,maxalbert/blaze,AbhiAgarwal/blaze,cpcloud/blaze,markflorisson/blaze-core,dwillmer/blaze,aterrel/blaze,LiaoPan/blaze,xlhtc007/blaze,FrancescAlted/blaze,markflorisson/blaze-core,FrancescAlted/blaze,aterrel/blaze,alexmojaki/blaze,cpcloud/blaze,jdmcbr/blaze,mrocklin/blaze,mrocklin/blaze,LiaoPan/blaze,markflorisson/blaze-core,scls19fr/blaze,jcrist/blaze,jdmcbr/blaze,alexmojaki/blaze,AbhiAgarwal/blaze,scls19fr/blaze,caseyclements/blaze,mwiebe/blaze,cowlicks/blaze,AbhiAgarwal/blaze,ChinaQuants/blaze,dwillmer/blaze,FrancescAlted/blaze,markflorisson/blaze-core,mwiebe/blaze
|
restructuredtext
|
## Code Before:
======================
Release Notes
======================
v0.1
-------
* TODO
## Instruction:
Add some minimal release notes
## Code After:
======================
Release Notes
======================
Release 0.2
-----------
* Brings in dynd as a required dependency
for in-memory data.
Release 0.1
-----------
* Initial preview release
|
478d731771bf7889d944ccc175fec0bdadaed57e
|
includes/ThanksLogFormatter.php
|
includes/ThanksLogFormatter.php
|
<?php
/**
* This class formats log entries for thanks
*/
class ThanksLogFormatter extends LogFormatter {
/**
* @inheritDoc
* @suppress SecurityCheck-DoubleEscaped Problem with makeUserLink, see T201565
*/
protected function getMessageParameters() {
$params = parent::getMessageParameters();
// Convert target from a pageLink to a userLink since the target is
// actually a user, not a page.
$recipient = User::newFromName( $this->entry->getTarget()->getText(), false );
$params[2] = Message::rawParam( $this->makeUserLink( $recipient ) );
$params[3] = $recipient->getName();
return $params;
}
public function getPreloadTitles() {
// Add the recipient's user talk page to LinkBatch
return [ Title::makeTitle( NS_USER_TALK, $this->entry->getTarget()->getText() ) ];
}
}
|
<?php
/**
* This class formats log entries for thanks
*/
class ThanksLogFormatter extends LogFormatter {
/**
* @inheritDoc
*/
protected function getMessageParameters() {
$params = parent::getMessageParameters();
// Convert target from a pageLink to a userLink since the target is
// actually a user, not a page.
$recipient = User::newFromName( $this->entry->getTarget()->getText(), false );
$params[2] = Message::rawParam( $this->makeUserLink( $recipient ) );
$params[3] = $recipient->getName();
return $params;
}
public function getPreloadTitles() {
// Add the recipient's user talk page to LinkBatch
return [ Title::makeTitle( NS_USER_TALK, $this->entry->getTarget()->getText() ) ];
}
}
|
Remove unneccessary @suppress after phan-taint-check-plugin upgrade
|
Remove unneccessary @suppress after phan-taint-check-plugin upgrade
Bug: T201565
Change-Id: Ib35e55e5bebbb7dc6ca8ef4f08b519ec2065037b
|
PHP
|
mit
|
wikimedia/mediawiki-extensions-Thanks,wikimedia/mediawiki-extensions-Thanks,wikimedia/mediawiki-extensions-Thanks
|
php
|
## Code Before:
<?php
/**
* This class formats log entries for thanks
*/
class ThanksLogFormatter extends LogFormatter {
/**
* @inheritDoc
* @suppress SecurityCheck-DoubleEscaped Problem with makeUserLink, see T201565
*/
protected function getMessageParameters() {
$params = parent::getMessageParameters();
// Convert target from a pageLink to a userLink since the target is
// actually a user, not a page.
$recipient = User::newFromName( $this->entry->getTarget()->getText(), false );
$params[2] = Message::rawParam( $this->makeUserLink( $recipient ) );
$params[3] = $recipient->getName();
return $params;
}
public function getPreloadTitles() {
// Add the recipient's user talk page to LinkBatch
return [ Title::makeTitle( NS_USER_TALK, $this->entry->getTarget()->getText() ) ];
}
}
## Instruction:
Remove unneccessary @suppress after phan-taint-check-plugin upgrade
Bug: T201565
Change-Id: Ib35e55e5bebbb7dc6ca8ef4f08b519ec2065037b
## Code After:
<?php
/**
* This class formats log entries for thanks
*/
class ThanksLogFormatter extends LogFormatter {
/**
* @inheritDoc
*/
protected function getMessageParameters() {
$params = parent::getMessageParameters();
// Convert target from a pageLink to a userLink since the target is
// actually a user, not a page.
$recipient = User::newFromName( $this->entry->getTarget()->getText(), false );
$params[2] = Message::rawParam( $this->makeUserLink( $recipient ) );
$params[3] = $recipient->getName();
return $params;
}
public function getPreloadTitles() {
// Add the recipient's user talk page to LinkBatch
return [ Title::makeTitle( NS_USER_TALK, $this->entry->getTarget()->getText() ) ];
}
}
|
8c1899b772a1083ce739e237990652b73b41a48d
|
src/apps/investment-projects/constants.js
|
src/apps/investment-projects/constants.js
|
const { concat } = require('lodash')
const currentYear = (new Date()).getFullYear()
const GLOBAL_NAV_ITEM = {
path: '/investment-projects',
label: 'Investment projects',
permissions: [
'investment.read_associated_investmentproject',
'investment.read_all_investmentproject',
],
order: 5,
}
const LOCAL_NAV = [
{
path: 'details',
label: 'Project details',
},
{
path: 'team',
label: 'Project team',
},
{
path: 'interactions',
label: 'Interactions',
permissions: [
'interaction.read_associated_investmentproject_interaction',
'interaction.read_all_interaction',
],
},
{
path: 'evaluation',
label: 'Evaluations',
},
{
path: 'audit',
label: 'Audit history',
},
{
path: 'documents',
label: 'Documents',
permissions: [
'investment.read_investmentproject_document',
],
},
]
const DEFAULT_COLLECTION_QUERY = {
estimated_land_date_after: `${currentYear}-04-05`,
estimated_land_date_before: `${currentYear + 1}-04-06`,
sortby: 'estimated_land_date:asc',
}
const APP_PERMISSIONS = concat(LOCAL_NAV, GLOBAL_NAV_ITEM)
module.exports = {
GLOBAL_NAV_ITEM,
LOCAL_NAV,
DEFAULT_COLLECTION_QUERY,
APP_PERMISSIONS,
}
|
const { concat } = require('lodash')
const GLOBAL_NAV_ITEM = {
path: '/investment-projects',
label: 'Investment projects',
permissions: [
'investment.read_associated_investmentproject',
'investment.read_all_investmentproject',
],
order: 5,
}
const LOCAL_NAV = [
{
path: 'details',
label: 'Project details',
},
{
path: 'team',
label: 'Project team',
},
{
path: 'interactions',
label: 'Interactions',
permissions: [
'interaction.read_associated_investmentproject_interaction',
'interaction.read_all_interaction',
],
},
{
path: 'evaluation',
label: 'Evaluations',
},
{
path: 'audit',
label: 'Audit history',
},
{
path: 'documents',
label: 'Documents',
permissions: [
'investment.read_investmentproject_document',
],
},
]
const DEFAULT_COLLECTION_QUERY = {
sortby: 'estimated_land_date:asc',
}
const APP_PERMISSIONS = concat(LOCAL_NAV, GLOBAL_NAV_ITEM)
module.exports = {
GLOBAL_NAV_ITEM,
LOCAL_NAV,
DEFAULT_COLLECTION_QUERY,
APP_PERMISSIONS,
}
|
Remove preset date filters in investment collection
|
Remove preset date filters in investment collection
|
JavaScript
|
mit
|
uktrade/data-hub-frontend,uktrade/data-hub-frontend,uktrade/data-hub-fe-beta2,uktrade/data-hub-fe-beta2,uktrade/data-hub-frontend
|
javascript
|
## Code Before:
const { concat } = require('lodash')
const currentYear = (new Date()).getFullYear()
const GLOBAL_NAV_ITEM = {
path: '/investment-projects',
label: 'Investment projects',
permissions: [
'investment.read_associated_investmentproject',
'investment.read_all_investmentproject',
],
order: 5,
}
const LOCAL_NAV = [
{
path: 'details',
label: 'Project details',
},
{
path: 'team',
label: 'Project team',
},
{
path: 'interactions',
label: 'Interactions',
permissions: [
'interaction.read_associated_investmentproject_interaction',
'interaction.read_all_interaction',
],
},
{
path: 'evaluation',
label: 'Evaluations',
},
{
path: 'audit',
label: 'Audit history',
},
{
path: 'documents',
label: 'Documents',
permissions: [
'investment.read_investmentproject_document',
],
},
]
const DEFAULT_COLLECTION_QUERY = {
estimated_land_date_after: `${currentYear}-04-05`,
estimated_land_date_before: `${currentYear + 1}-04-06`,
sortby: 'estimated_land_date:asc',
}
const APP_PERMISSIONS = concat(LOCAL_NAV, GLOBAL_NAV_ITEM)
module.exports = {
GLOBAL_NAV_ITEM,
LOCAL_NAV,
DEFAULT_COLLECTION_QUERY,
APP_PERMISSIONS,
}
## Instruction:
Remove preset date filters in investment collection
## Code After:
const { concat } = require('lodash')
const GLOBAL_NAV_ITEM = {
path: '/investment-projects',
label: 'Investment projects',
permissions: [
'investment.read_associated_investmentproject',
'investment.read_all_investmentproject',
],
order: 5,
}
const LOCAL_NAV = [
{
path: 'details',
label: 'Project details',
},
{
path: 'team',
label: 'Project team',
},
{
path: 'interactions',
label: 'Interactions',
permissions: [
'interaction.read_associated_investmentproject_interaction',
'interaction.read_all_interaction',
],
},
{
path: 'evaluation',
label: 'Evaluations',
},
{
path: 'audit',
label: 'Audit history',
},
{
path: 'documents',
label: 'Documents',
permissions: [
'investment.read_investmentproject_document',
],
},
]
const DEFAULT_COLLECTION_QUERY = {
sortby: 'estimated_land_date:asc',
}
const APP_PERMISSIONS = concat(LOCAL_NAV, GLOBAL_NAV_ITEM)
module.exports = {
GLOBAL_NAV_ITEM,
LOCAL_NAV,
DEFAULT_COLLECTION_QUERY,
APP_PERMISSIONS,
}
|
e724792e0958efb4ccba0fbd91ae8759b0bc539a
|
test/tc_send_incremental.rb
|
test/tc_send_incremental.rb
|
require 'stringio'
require 'common/send_incrementally'
module TestIncrementalSend
def test_send
file = "data/noyes/noyes.flac"
to_server = StringIO.new 'wb'
from_server = StringIO.new 'dummy result'
result = send_incremental_features file, to_server, from_server
raw_data = to_server.string
assert_equal TMAGIC, raw_data.slice!(0,13)
assert_equal TSTART, raw_data.slice!(0,4)
id = raw_data.slice!(0,4)
assert_equal TCEPSTRA, id
cepstra = []
while id == TCEPSTRA
cep_count = 13 * raw_data.slice!(0,4).unpack('N')[0]
cep_count.times do |i|
cepstra << raw_data.slice!(0,4).unpack('g')[0]
end
id = raw_data.slice!(0,4)
end
assert_equal TEND, id
assert_equal TBYE, raw_data.slice!(0,4)
ex_cmn = IO.read("data/noyes/cmn.dat").unpack 'g*'
assert_m ex_cmn[0, cepstra.size], cepstra, 5
end
end
|
require 'stringio'
require 'common/send_incrementally'
module TestIncrementalSend
def test_send
file = "data/noyes/noyes.flac"
to_server = StringIO.new 'wb'
from_server = StringIO.new 'dummy result'
result = send_incremental_features file, to_server, from_server, 16, 8000
raw_data = to_server.string
assert_equal TMAGIC, raw_data.slice!(0,13)
assert_equal TSTART, raw_data.slice!(0,4)
id = raw_data.slice!(0,4)
assert_equal TCEPSTRA, id
cepstra = []
while id == TCEPSTRA
cep_count = 13 * raw_data.slice!(0,4).unpack('N')[0]
cep_count.times do |i|
cepstra << raw_data.slice!(0,4).unpack('g')[0]
end
id = raw_data.slice!(0,4)
end
assert_equal TEND, id
assert_equal TBYE, raw_data.slice!(0,4)
ex_cmn = IO.read("data/noyes/cmn.dat").unpack 'g*'
assert_m ex_cmn[0, cepstra.size], cepstra, 5
end
end
|
Send test now reflects additional parameters to incremental send.
|
Send test now reflects additional parameters to incremental send.
|
Ruby
|
bsd-2-clause
|
talkhouse/noyes,talkhouse/noyes,talkhouse/noyes
|
ruby
|
## Code Before:
require 'stringio'
require 'common/send_incrementally'
module TestIncrementalSend
def test_send
file = "data/noyes/noyes.flac"
to_server = StringIO.new 'wb'
from_server = StringIO.new 'dummy result'
result = send_incremental_features file, to_server, from_server
raw_data = to_server.string
assert_equal TMAGIC, raw_data.slice!(0,13)
assert_equal TSTART, raw_data.slice!(0,4)
id = raw_data.slice!(0,4)
assert_equal TCEPSTRA, id
cepstra = []
while id == TCEPSTRA
cep_count = 13 * raw_data.slice!(0,4).unpack('N')[0]
cep_count.times do |i|
cepstra << raw_data.slice!(0,4).unpack('g')[0]
end
id = raw_data.slice!(0,4)
end
assert_equal TEND, id
assert_equal TBYE, raw_data.slice!(0,4)
ex_cmn = IO.read("data/noyes/cmn.dat").unpack 'g*'
assert_m ex_cmn[0, cepstra.size], cepstra, 5
end
end
## Instruction:
Send test now reflects additional parameters to incremental send.
## Code After:
require 'stringio'
require 'common/send_incrementally'
module TestIncrementalSend
def test_send
file = "data/noyes/noyes.flac"
to_server = StringIO.new 'wb'
from_server = StringIO.new 'dummy result'
result = send_incremental_features file, to_server, from_server, 16, 8000
raw_data = to_server.string
assert_equal TMAGIC, raw_data.slice!(0,13)
assert_equal TSTART, raw_data.slice!(0,4)
id = raw_data.slice!(0,4)
assert_equal TCEPSTRA, id
cepstra = []
while id == TCEPSTRA
cep_count = 13 * raw_data.slice!(0,4).unpack('N')[0]
cep_count.times do |i|
cepstra << raw_data.slice!(0,4).unpack('g')[0]
end
id = raw_data.slice!(0,4)
end
assert_equal TEND, id
assert_equal TBYE, raw_data.slice!(0,4)
ex_cmn = IO.read("data/noyes/cmn.dat").unpack 'g*'
assert_m ex_cmn[0, cepstra.size], cepstra, 5
end
end
|
fa7172a5e3231e738d85df3baba130fdec7497d1
|
derrida/outwork/views.py
|
derrida/outwork/views.py
|
from django.views.generic import ListView
from haystack.query import SearchQuerySet
from haystack.inputs import Clean
from derrida.outwork.models import Outwork
class OutworkListView(ListView):
model = Outwork
template_name = 'outwork/outwork_list.html'
paginate_by = 16
def get_queryset(self):
# restrict to published articles
sqs = SearchQuerySet().models(self.model).filter(published=True)
if self.request.GET.get('query', None):
sqs = sqs.filter(content=Clean(self.request.GET['query']))
# default sort ?
return sqs
# return Outwork.objects.published(for_user=self.request.user)
|
from django.views.generic import ListView
from haystack.query import SearchQuerySet
from haystack.inputs import Clean, Raw
from derrida.outwork.models import Outwork
class OutworkListView(ListView):
model = Outwork
template_name = 'outwork/outwork_list.html'
paginate_by = 16
def get_queryset(self):
# restrict to published articles
sqs = SearchQuerySet().models(self.model).filter(published=Raw(True))
if self.request.GET.get('query', None):
sqs = sqs.filter(content=Clean(self.request.GET['query']))
# default sort ?
return sqs
# return Outwork.objects.published(for_user=self.request.user)
|
Fix outwork list view to properly filter on published=true in Solr
|
Fix outwork list view to properly filter on published=true in Solr
|
Python
|
apache-2.0
|
Princeton-CDH/derrida-django,Princeton-CDH/derrida-django,Princeton-CDH/derrida-django,Princeton-CDH/derrida-django
|
python
|
## Code Before:
from django.views.generic import ListView
from haystack.query import SearchQuerySet
from haystack.inputs import Clean
from derrida.outwork.models import Outwork
class OutworkListView(ListView):
model = Outwork
template_name = 'outwork/outwork_list.html'
paginate_by = 16
def get_queryset(self):
# restrict to published articles
sqs = SearchQuerySet().models(self.model).filter(published=True)
if self.request.GET.get('query', None):
sqs = sqs.filter(content=Clean(self.request.GET['query']))
# default sort ?
return sqs
# return Outwork.objects.published(for_user=self.request.user)
## Instruction:
Fix outwork list view to properly filter on published=true in Solr
## Code After:
from django.views.generic import ListView
from haystack.query import SearchQuerySet
from haystack.inputs import Clean, Raw
from derrida.outwork.models import Outwork
class OutworkListView(ListView):
model = Outwork
template_name = 'outwork/outwork_list.html'
paginate_by = 16
def get_queryset(self):
# restrict to published articles
sqs = SearchQuerySet().models(self.model).filter(published=Raw(True))
if self.request.GET.get('query', None):
sqs = sqs.filter(content=Clean(self.request.GET['query']))
# default sort ?
return sqs
# return Outwork.objects.published(for_user=self.request.user)
|
cdf68537202f35f2bb42c33088d33f18820169f0
|
app/assets/javascripts/lib/router.coffee
|
app/assets/javascripts/lib/router.coffee
|
class @Router extends Backbone.Router
routes:
"demand/:sidebar(/:slide)" : "demand"
"costs/:sidebar(/:slide)" : "costs"
"overview/:sidebar(/:slide)": "overview"
"supply/:sidebar(/:slide)" : "supply"
"flexibility/:sidebar(/:slide)" : "flexibility"
"data/:sidebar(/:slide)" : "data"
"report" : "report"
":tab/:sidebar(/:slide)" : "load_slides"
"" : "load_default_slides"
demand: (sidebar, slide) => @load_slides('demand', sidebar, slide)
costs: (sidebar, slide) => @load_slides('costs', sidebar, slide)
overview: (sidebar, slide) => @load_slides('overview', sidebar, slide)
supply: (sidebar, slide) => @load_slides('supply', sidebar, slide)
flexibility: (sidebar, slide) => @load_slides('flexibility', sidebar, slide)
data: (sidebar, slide) => @load_slides('data', sidebar, slide)
# root:
report: =>
# pass
load_slides: (tab, sidebar, slide) ->
url = "/scenario/#{_.compact([tab, sidebar, slide]).join('/')}"
$.ajax
url: url
dataType: 'script'
@update_sidebar tab, sidebar
update_sidebar: (tab, sidebar) ->
unless _.compact([tab, sidebar]).length
[tab, sidebar] = @ui_fragments()
if $("#sidebar h4.active").data('key') != tab
$("#sidebar h4").removeClass 'active'
$("#sidebar h4[data-key=#{tab}]").trigger 'click'
$("#sidebar li").removeClass 'active'
$("#sidebar li##{sidebar}").addClass 'active'
ui_fragments: ->
(Backbone.history.getFragment() || 'overview/introduction').split('/')
load_default_slides: =>
[tab, sidebar, slide] = @ui_fragments()
@load_slides(tab, sidebar, slide)
$("#sidebar h4[data-key=#{tab}]").click()
|
class @Router extends Backbone.Router
routes:
"report" : "report"
":tab/:sidebar(/:slide)" : "load_slides"
"" : "load_default_slides"
report: =>
# pass
load_slides: (tab, sidebar, slide) ->
url = "/scenario/#{_.compact([tab, sidebar, slide]).join('/')}"
$.ajax
url: url
dataType: 'script'
@update_sidebar tab, sidebar
update_sidebar: (tab, sidebar) ->
unless _.compact([tab, sidebar]).length
[tab, sidebar] = @ui_fragments()
if $("#sidebar h4.active").data('key') != tab
$("#sidebar h4").removeClass 'active'
$("#sidebar h4[data-key=#{tab}]").trigger 'click'
$("#sidebar li").removeClass 'active'
$("#sidebar li##{sidebar}").addClass 'active'
ui_fragments: ->
(Backbone.history.getFragment() || 'overview/introduction').split('/')
load_default_slides: =>
[tab, sidebar, slide] = @ui_fragments()
@load_slides(tab, sidebar, slide)
$("#sidebar h4[data-key=#{tab}]").click()
|
Remove redudant routes to support the new "overview" section
|
Remove redudant routes to support the new "overview" section
|
CoffeeScript
|
mit
|
quintel/etmodel,quintel/etmodel,quintel/etmodel,quintel/etmodel
|
coffeescript
|
## Code Before:
class @Router extends Backbone.Router
routes:
"demand/:sidebar(/:slide)" : "demand"
"costs/:sidebar(/:slide)" : "costs"
"overview/:sidebar(/:slide)": "overview"
"supply/:sidebar(/:slide)" : "supply"
"flexibility/:sidebar(/:slide)" : "flexibility"
"data/:sidebar(/:slide)" : "data"
"report" : "report"
":tab/:sidebar(/:slide)" : "load_slides"
"" : "load_default_slides"
demand: (sidebar, slide) => @load_slides('demand', sidebar, slide)
costs: (sidebar, slide) => @load_slides('costs', sidebar, slide)
overview: (sidebar, slide) => @load_slides('overview', sidebar, slide)
supply: (sidebar, slide) => @load_slides('supply', sidebar, slide)
flexibility: (sidebar, slide) => @load_slides('flexibility', sidebar, slide)
data: (sidebar, slide) => @load_slides('data', sidebar, slide)
# root:
report: =>
# pass
load_slides: (tab, sidebar, slide) ->
url = "/scenario/#{_.compact([tab, sidebar, slide]).join('/')}"
$.ajax
url: url
dataType: 'script'
@update_sidebar tab, sidebar
update_sidebar: (tab, sidebar) ->
unless _.compact([tab, sidebar]).length
[tab, sidebar] = @ui_fragments()
if $("#sidebar h4.active").data('key') != tab
$("#sidebar h4").removeClass 'active'
$("#sidebar h4[data-key=#{tab}]").trigger 'click'
$("#sidebar li").removeClass 'active'
$("#sidebar li##{sidebar}").addClass 'active'
ui_fragments: ->
(Backbone.history.getFragment() || 'overview/introduction').split('/')
load_default_slides: =>
[tab, sidebar, slide] = @ui_fragments()
@load_slides(tab, sidebar, slide)
$("#sidebar h4[data-key=#{tab}]").click()
## Instruction:
Remove redudant routes to support the new "overview" section
## Code After:
class @Router extends Backbone.Router
routes:
"report" : "report"
":tab/:sidebar(/:slide)" : "load_slides"
"" : "load_default_slides"
report: =>
# pass
load_slides: (tab, sidebar, slide) ->
url = "/scenario/#{_.compact([tab, sidebar, slide]).join('/')}"
$.ajax
url: url
dataType: 'script'
@update_sidebar tab, sidebar
update_sidebar: (tab, sidebar) ->
unless _.compact([tab, sidebar]).length
[tab, sidebar] = @ui_fragments()
if $("#sidebar h4.active").data('key') != tab
$("#sidebar h4").removeClass 'active'
$("#sidebar h4[data-key=#{tab}]").trigger 'click'
$("#sidebar li").removeClass 'active'
$("#sidebar li##{sidebar}").addClass 'active'
ui_fragments: ->
(Backbone.history.getFragment() || 'overview/introduction').split('/')
load_default_slides: =>
[tab, sidebar, slide] = @ui_fragments()
@load_slides(tab, sidebar, slide)
$("#sidebar h4[data-key=#{tab}]").click()
|
c538b138137b3be1563e64756cf5e8dc4977282a
|
metadata/me.ccrama.redditslide.txt
|
metadata/me.ccrama.redditslide.txt
|
Categories:Reading,Internet
License:GPLv3
Web Site:https://github.com/ccrama/Slide/blob/HEAD/README.md
Source Code:https://github.com/ccrama/Slide
Issue Tracker:https://github.com/ccrama/Slide/issues
Changelog:https://github.com/ccrama/Slide/blob/HEAD/CHANGELOG.md
Name:Slide
Summary:Companion app for reddit
Description:
Companion app for browsing Reddit.
.
Repo Type:git
Repo:https://github.com/ccrama/Slide
Build:5.2.2,172
disable=synccit.jar
commit=78126065c37a0dbdd4ad733632bee0a0defafcd2
subdir=app
gradle=prod,noGPlay
forceversion=yes
prebuild=sed -i -e '/withGPlayCompile/d' build.gradle
Maintainer Notes:
The synccit.jar is most likely https://github.com/talklittle/synccit-android.
.
Auto Update Mode:None
Update Check Mode:Tags
Current Version:5.3
Current Version Code:175
|
Categories:Reading,Internet
License:GPLv3
Web Site:https://github.com/ccrama/Slide/blob/HEAD/README.md
Source Code:https://github.com/ccrama/Slide
Issue Tracker:https://github.com/ccrama/Slide/issues
Changelog:https://github.com/ccrama/Slide/blob/HEAD/CHANGELOG.md
Name:Slide
Summary:Companion app for reddit
Description:
Companion app for browsing Reddit.
.
Repo Type:git
Repo:https://github.com/ccrama/Slide
Build:5.2.2,172
disable=synccit.jar
commit=78126065c37a0dbdd4ad733632bee0a0defafcd2
subdir=app
gradle=prod,noGPlay
forceversion=yes
prebuild=sed -i -e '/withGPlayCompile/d' build.gradle
Build:5.3,179
commit=a3976de76a4442a0c5b2c831c88552a926cfe51c
subdir=app
gradle=prod,noGPlay
forceversion=yes
prebuild=sed -i -e '/withGPlayCompile/d' build.gradle
Auto Update Mode:None
Update Check Mode:Tags
Current Version:5.3
Current Version Code:175
|
Update Slide to 5.3 (175)
|
Update Slide to 5.3 (175)
|
Text
|
agpl-3.0
|
f-droid/fdroiddata,f-droid/fdroiddata,f-droid/fdroid-data
|
text
|
## Code Before:
Categories:Reading,Internet
License:GPLv3
Web Site:https://github.com/ccrama/Slide/blob/HEAD/README.md
Source Code:https://github.com/ccrama/Slide
Issue Tracker:https://github.com/ccrama/Slide/issues
Changelog:https://github.com/ccrama/Slide/blob/HEAD/CHANGELOG.md
Name:Slide
Summary:Companion app for reddit
Description:
Companion app for browsing Reddit.
.
Repo Type:git
Repo:https://github.com/ccrama/Slide
Build:5.2.2,172
disable=synccit.jar
commit=78126065c37a0dbdd4ad733632bee0a0defafcd2
subdir=app
gradle=prod,noGPlay
forceversion=yes
prebuild=sed -i -e '/withGPlayCompile/d' build.gradle
Maintainer Notes:
The synccit.jar is most likely https://github.com/talklittle/synccit-android.
.
Auto Update Mode:None
Update Check Mode:Tags
Current Version:5.3
Current Version Code:175
## Instruction:
Update Slide to 5.3 (175)
## Code After:
Categories:Reading,Internet
License:GPLv3
Web Site:https://github.com/ccrama/Slide/blob/HEAD/README.md
Source Code:https://github.com/ccrama/Slide
Issue Tracker:https://github.com/ccrama/Slide/issues
Changelog:https://github.com/ccrama/Slide/blob/HEAD/CHANGELOG.md
Name:Slide
Summary:Companion app for reddit
Description:
Companion app for browsing Reddit.
.
Repo Type:git
Repo:https://github.com/ccrama/Slide
Build:5.2.2,172
disable=synccit.jar
commit=78126065c37a0dbdd4ad733632bee0a0defafcd2
subdir=app
gradle=prod,noGPlay
forceversion=yes
prebuild=sed -i -e '/withGPlayCompile/d' build.gradle
Build:5.3,179
commit=a3976de76a4442a0c5b2c831c88552a926cfe51c
subdir=app
gradle=prod,noGPlay
forceversion=yes
prebuild=sed -i -e '/withGPlayCompile/d' build.gradle
Auto Update Mode:None
Update Check Mode:Tags
Current Version:5.3
Current Version Code:175
|
197e27200e8d22a0d912dfceda846bb1ea6a402d
|
src/main/scala/modules/counter/guice/modules/CounterModule.scala
|
src/main/scala/modules/counter/guice/modules/CounterModule.scala
|
package modules.counter.guice.modules
import akka.actor.{ActorRef, ActorSystem}
import com.google.inject.name.Named
import com.google.inject.{AbstractModule, Provides, Singleton}
import core.services.persistence.PersistenceCleanup
import modules.counter.services.count.CounterPersistentActor
import net.codingwell.scalaguice.ScalaModule
class CounterModule extends AbstractModule with ScalaModule {
@Provides
@Singleton
@Named(CounterPersistentActor.name)
def counterActor(actorSystem: ActorSystem, persistenceCleanup: PersistenceCleanup): ActorRef = {
actorSystem.actorOf(CounterPersistentActor.props(persistenceCleanup))
}
}
|
package modules.counter.guice.modules
import akka.actor.{ActorRef, ActorSystem}
import com.google.inject.Provides
import com.google.inject.name.Named
import modules.counter.repositories.{CounterRepo, CounterRepoImpl}
import modules.counter.services.count.CounterActor
import net.codingwell.scalaguice.ScalaModule
import scala.concurrent.ExecutionContext
class CounterModule extends ScalaModule {
override def configure() {
bind[CounterRepo].to[CounterRepoImpl]
}
@Provides
@Named(CounterActor.name)
def counterActor(actorSystem: ActorSystem, counterRepo: CounterRepo)
(implicit executionContext: ExecutionContext): ActorRef = {
actorSystem.actorOf(CounterActor.props(counterRepo))
}
}
|
Add bind of 'CounterRepo' to implementation
|
Add bind of 'CounterRepo' to implementation
|
Scala
|
mit
|
sysgears/apollo-universal-starter-kit,sysgears/apollo-universal-starter-kit,sysgears/apollo-fullstack-starter-kit,sysgears/apollo-universal-starter-kit,sysgears/apollo-universal-starter-kit
|
scala
|
## Code Before:
package modules.counter.guice.modules
import akka.actor.{ActorRef, ActorSystem}
import com.google.inject.name.Named
import com.google.inject.{AbstractModule, Provides, Singleton}
import core.services.persistence.PersistenceCleanup
import modules.counter.services.count.CounterPersistentActor
import net.codingwell.scalaguice.ScalaModule
class CounterModule extends AbstractModule with ScalaModule {
@Provides
@Singleton
@Named(CounterPersistentActor.name)
def counterActor(actorSystem: ActorSystem, persistenceCleanup: PersistenceCleanup): ActorRef = {
actorSystem.actorOf(CounterPersistentActor.props(persistenceCleanup))
}
}
## Instruction:
Add bind of 'CounterRepo' to implementation
## Code After:
package modules.counter.guice.modules
import akka.actor.{ActorRef, ActorSystem}
import com.google.inject.Provides
import com.google.inject.name.Named
import modules.counter.repositories.{CounterRepo, CounterRepoImpl}
import modules.counter.services.count.CounterActor
import net.codingwell.scalaguice.ScalaModule
import scala.concurrent.ExecutionContext
class CounterModule extends ScalaModule {
override def configure() {
bind[CounterRepo].to[CounterRepoImpl]
}
@Provides
@Named(CounterActor.name)
def counterActor(actorSystem: ActorSystem, counterRepo: CounterRepo)
(implicit executionContext: ExecutionContext): ActorRef = {
actorSystem.actorOf(CounterActor.props(counterRepo))
}
}
|
d6216cf52503ade30db3beb54ba2ebfb3b94312f
|
.stickler.yml
|
.stickler.yml
|
linters:
flake8:
python: 3
max-line-length: 79
select: C,E,F,W,B,B950
ignore: E203, E501, W503
black:
config: ./pyproject.toml
fixer: true
fixers:
enable: true
|
linters:
flake8:
python: 3
max-line-length: 79
select: C,E,F,W,B,B950
ignore: E203, E501, W503
per-file-ignores: __init__.py:F401
black:
config: ./pyproject.toml
fixer: true
fixers:
enable: true
|
Change config file to ignore unused imports in __init__.py
|
Change config file to ignore unused imports in __init__.py
|
YAML
|
mit
|
wind-python/windpowerlib
|
yaml
|
## Code Before:
linters:
flake8:
python: 3
max-line-length: 79
select: C,E,F,W,B,B950
ignore: E203, E501, W503
black:
config: ./pyproject.toml
fixer: true
fixers:
enable: true
## Instruction:
Change config file to ignore unused imports in __init__.py
## Code After:
linters:
flake8:
python: 3
max-line-length: 79
select: C,E,F,W,B,B950
ignore: E203, E501, W503
per-file-ignores: __init__.py:F401
black:
config: ./pyproject.toml
fixer: true
fixers:
enable: true
|
2cdbe0ca000ec0c5389b9a82015a2319877481f3
|
app.json
|
app.json
|
{
"name": "Webhook Slackbot",
"description": "A Slackbot that listens to a channel and sends the full message as a webhook",
"repository": "https://github.com/brett-hobbs/webhook-slackbot",
"success_url": "/",
"env": {
"SLACK_TOKEN": {
"description": "Slack bot RTM API token",
"required": true
},
"WEBHOOK_URL": {
"description": "Outgoing webhook url",
"required": true
},
"MESSAGE_FILTER_FIELD": {
"description": "Optional field on which to filter",
"required": false
},
"MESSAGE_FILTER_TEXT": {
"description": "Text (or substring) expected in the filter field",
"required": false
},
"NODE_ENV": "staging"
},
"image": "heroku/nodejs"
}
|
{
"name": "Webhook Slackbot",
"description": "A Slackbot that listens to a channel and sends the full message as a webhook",
"repository": "https://github.com/brett-hobbs/webhook-slackbot",
"success_url": "/",
"env": {
"SLACK_TOKEN": {
"description": "Slack bot RTM API token",
"required": true
},
"WEBHOOK_URL": {
"description": "Outgoing webhook url or urls delimited by a semicolon",
"required": true
},
"MESSAGE_FILTER_FIELD": {
"description": "Optional field on which to filter, used in conjunction with MESSAGE_FILTER_TEXT",
"required": false
},
"MESSAGE_FILTER_TEXT": {
"description": "Text (substring) expected in the filter field",
"required": false
},
"NODE_ENV": "staging"
},
"image": "heroku/nodejs"
}
|
Add information about env variables
|
Add information about env variables
|
JSON
|
mit
|
brett-hobbs/webhook-slackbot
|
json
|
## Code Before:
{
"name": "Webhook Slackbot",
"description": "A Slackbot that listens to a channel and sends the full message as a webhook",
"repository": "https://github.com/brett-hobbs/webhook-slackbot",
"success_url": "/",
"env": {
"SLACK_TOKEN": {
"description": "Slack bot RTM API token",
"required": true
},
"WEBHOOK_URL": {
"description": "Outgoing webhook url",
"required": true
},
"MESSAGE_FILTER_FIELD": {
"description": "Optional field on which to filter",
"required": false
},
"MESSAGE_FILTER_TEXT": {
"description": "Text (or substring) expected in the filter field",
"required": false
},
"NODE_ENV": "staging"
},
"image": "heroku/nodejs"
}
## Instruction:
Add information about env variables
## Code After:
{
"name": "Webhook Slackbot",
"description": "A Slackbot that listens to a channel and sends the full message as a webhook",
"repository": "https://github.com/brett-hobbs/webhook-slackbot",
"success_url": "/",
"env": {
"SLACK_TOKEN": {
"description": "Slack bot RTM API token",
"required": true
},
"WEBHOOK_URL": {
"description": "Outgoing webhook url or urls delimited by a semicolon",
"required": true
},
"MESSAGE_FILTER_FIELD": {
"description": "Optional field on which to filter, used in conjunction with MESSAGE_FILTER_TEXT",
"required": false
},
"MESSAGE_FILTER_TEXT": {
"description": "Text (substring) expected in the filter field",
"required": false
},
"NODE_ENV": "staging"
},
"image": "heroku/nodejs"
}
|
8b3b7a09aa57d39b836d1f66e6ec760d804c0903
|
.github/workflows/release.yml
|
.github/workflows/release.yml
|
name: "Automated releases"
on:
pull_request_review:
schedule:
# "At 00:00 on Sunday" (https://crontab.guru/once-a-week)
- cron: "0 0 * * 0"
jobs:
release:
runs-on: ubuntu-latest
steps:
- uses: simple-icons/release-action@master
with:
repo-token: "${{ secrets.GITHUB_TOKEN }}"
|
name: "Automated releases"
on:
pull_request_review:
types: [submitted]
schedule:
# "At 00:00 on Sunday" (https://crontab.guru/once-a-week)
- cron: "0 0 * * 0"
jobs:
release:
runs-on: ubuntu-latest
steps:
- uses: simple-icons/release-action@master
with:
repo-token: "${{ secrets.GITHUB_TOKEN }}"
|
Reduce number of PR review GH Actions runs
|
Reduce number of PR review GH Actions runs
|
YAML
|
cc0-1.0
|
simple-icons/simple-icons,danleech/simpleicons.org,danleech/simple-icons,danleech/simpleicons.org,simple-icons/simple-icons
|
yaml
|
## Code Before:
name: "Automated releases"
on:
pull_request_review:
schedule:
# "At 00:00 on Sunday" (https://crontab.guru/once-a-week)
- cron: "0 0 * * 0"
jobs:
release:
runs-on: ubuntu-latest
steps:
- uses: simple-icons/release-action@master
with:
repo-token: "${{ secrets.GITHUB_TOKEN }}"
## Instruction:
Reduce number of PR review GH Actions runs
## Code After:
name: "Automated releases"
on:
pull_request_review:
types: [submitted]
schedule:
# "At 00:00 on Sunday" (https://crontab.guru/once-a-week)
- cron: "0 0 * * 0"
jobs:
release:
runs-on: ubuntu-latest
steps:
- uses: simple-icons/release-action@master
with:
repo-token: "${{ secrets.GITHUB_TOKEN }}"
|
7a9dbd45a221a1ce1b4405965de891e21bd2362c
|
robots.txt
|
robots.txt
|
User-agent: *
Disallow: /morfy/
Disallow: /plugins/
Disallow: /config/
|
User-agent: *
Disallow: /morfy/
Disallow: /plugins/
Disallow: /config/
Disallow: /vendor/
|
Add /vendor/ folder to Disallow
|
Add /vendor/ folder to Disallow
|
Text
|
mit
|
morfy-cms/morfy,fansoro/fansoro
|
text
|
## Code Before:
User-agent: *
Disallow: /morfy/
Disallow: /plugins/
Disallow: /config/
## Instruction:
Add /vendor/ folder to Disallow
## Code After:
User-agent: *
Disallow: /morfy/
Disallow: /plugins/
Disallow: /config/
Disallow: /vendor/
|
f33c03e6b3dc9f44fb6c028792b527359b3eaf5a
|
backend/app/assets/stylesheets/spree/backend/sections/_log_entries.scss
|
backend/app/assets/stylesheets/spree/backend/sections/_log_entries.scss
|
.log_entry {
&.success {
background: lighten($color-primary, 15);
td h4 {
color: darken($body-color, 25);
i {
color: $color-2;
}
}
}
&.fail {
background: lighten($color-red, 25);
td h4 {
color: lighten($body-color, 50);
i {
color: $color-5;
}
}
}
}
|
.log_entry {
&.success {
background: lighten($color-primary, 15);
td h4 {
color: darken($body-color, 25);
i {
color: $color-primary;
}
}
}
&.fail {
background: lighten($color-red, 25);
td h4 {
color: lighten($body-color, 50);
i {
color: $color-red;
}
}
}
}
|
Fix indentation on a scss partial
|
Fix indentation on a scss partial
|
SCSS
|
bsd-3-clause
|
pervino/solidus,pervino/solidus,pervino/solidus,pervino/solidus
|
scss
|
## Code Before:
.log_entry {
&.success {
background: lighten($color-primary, 15);
td h4 {
color: darken($body-color, 25);
i {
color: $color-2;
}
}
}
&.fail {
background: lighten($color-red, 25);
td h4 {
color: lighten($body-color, 50);
i {
color: $color-5;
}
}
}
}
## Instruction:
Fix indentation on a scss partial
## Code After:
.log_entry {
&.success {
background: lighten($color-primary, 15);
td h4 {
color: darken($body-color, 25);
i {
color: $color-primary;
}
}
}
&.fail {
background: lighten($color-red, 25);
td h4 {
color: lighten($body-color, 50);
i {
color: $color-red;
}
}
}
}
|
f8ca82f9a6790ca823a59fa6e436b85251d263ed
|
requirements.txt
|
requirements.txt
|
Flask==0.8
Flask-SQLAlchemy==0.16
Flask-Script==0.3.3
Jinja2==2.6
SQLAlchemy==0.7.7
Werkzeug==0.8.3
amqplib==1.0.2
anyjson==0.3.1
argparse==1.2.1
kombu==2.1.8
lxml==2.3.4
python-dateutil==1.5
wsgiref==0.1.2
ckan
ckanclient
pika==0.9.13
supervisor
foxpath>=0.99.15
psycopg2
unicodecsv
openpyxl
Flask-Login==0.1.3
flask-principal
blinker
markdown
requests==2.5.1
sqlalchemy_utils
|
Flask==0.8
Flask-SQLAlchemy==0.16
Flask-Script==0.3.3
Jinja2==2.6
SQLAlchemy==1.0.8
Werkzeug==0.8.3
amqplib==1.0.2
anyjson==0.3.1
argparse==1.2.1
kombu==2.1.8
lxml==2.3.4
python-dateutil==1.5
wsgiref==0.1.2
ckan
ckanclient
pika==0.9.13
supervisor
foxpath>=0.99.15
psycopg2
unicodecsv
openpyxl
Flask-Login==0.1.3
flask-principal
blinker
markdown
requests==2.5.1
SQLAlchemy-Utils==0.30.17
|
Upgrade & pin SQLAlchemy and SQLAlchemy-Utils
|
Upgrade & pin SQLAlchemy and SQLAlchemy-Utils
|
Text
|
agpl-3.0
|
pwyf/IATI-Data-Quality,pwyf/IATI-Data-Quality,pwyf/IATI-Data-Quality,pwyf/IATI-Data-Quality
|
text
|
## Code Before:
Flask==0.8
Flask-SQLAlchemy==0.16
Flask-Script==0.3.3
Jinja2==2.6
SQLAlchemy==0.7.7
Werkzeug==0.8.3
amqplib==1.0.2
anyjson==0.3.1
argparse==1.2.1
kombu==2.1.8
lxml==2.3.4
python-dateutil==1.5
wsgiref==0.1.2
ckan
ckanclient
pika==0.9.13
supervisor
foxpath>=0.99.15
psycopg2
unicodecsv
openpyxl
Flask-Login==0.1.3
flask-principal
blinker
markdown
requests==2.5.1
sqlalchemy_utils
## Instruction:
Upgrade & pin SQLAlchemy and SQLAlchemy-Utils
## Code After:
Flask==0.8
Flask-SQLAlchemy==0.16
Flask-Script==0.3.3
Jinja2==2.6
SQLAlchemy==1.0.8
Werkzeug==0.8.3
amqplib==1.0.2
anyjson==0.3.1
argparse==1.2.1
kombu==2.1.8
lxml==2.3.4
python-dateutil==1.5
wsgiref==0.1.2
ckan
ckanclient
pika==0.9.13
supervisor
foxpath>=0.99.15
psycopg2
unicodecsv
openpyxl
Flask-Login==0.1.3
flask-principal
blinker
markdown
requests==2.5.1
SQLAlchemy-Utils==0.30.17
|
2fec85efc5e87251dccde698b844e5d4f0580a0b
|
README.md
|
README.md
|
Keep your git repos organized
|
**gitdir** is a system that helps you keep your git repositories organized. It is *not* a git workflow, and works with different (but not all) workflows. Gitdir is the successor to [hubdir](https://github.com/fenhl/hubdir), generalized to allow working with repositories that are not hosted on github.
This is gitdir version 2.0.0 ([semver](http://semver.org/)). The versioned API is described below, in the section *The system*.
# The system
This section describes the gitdir system.
## The directories
In the gitdir system, all git repos are organized within the *git directories*, or *gitdirs*. There are two kinds of gitdir:
1. the global gitdir at `/opt/git`, and
2. the local gitdirs at `~/git`. Each user can have their own local gitdir.
The global gitdir will be used by default, while the user's local gitdir is used only for staging and when the global gitdir is inaccessible.
## Directory structure
A gitdir contains subdirectories for any host from which repositories are cloned. The way repositories are organized within the host directory is defined individually for each host. For example, `github.com` organizes repositories by github username and repo name, so that the directory for this repo would be located at `/opt/git/github.com/fenhl/gitdir`.
Three different kinds of repos may reside within a repo directory:
1. Master repos, located at `<repodir>/master`. These track [the default branch](https://help.github.com/articles/setting-the-default-branch) from the remote and should always stay clean.
2. Branches, located at `<repodir>/branch/<branch>`. These work like the master repos, except they track a different remote branch.
3. Stages, located at `<repodir>/stage`. These have more loose restrictions and are where any work happens.
4. Bare repos, located at `<repodir>/<reponame>.git`. These are created with `git init --bare`, and should be used as the remote when hosting locally (`/opt/git/localhost`).
## Repo setup
Within a repo, the following rules should be, well, followed:
* All github repos have the default `origin` remote set up as follows:
* For master and branch repos, `https://github.com:<user>/<reponame>.git`
* For stage repos, `[email protected]:<user>/<reponame>.git`
* Master and branch repos have no other remotes. For stages, do whatever works best for your git workflow.
* In multi-user environments, the global gitdir and everything under it should be owned by a group named `git` and be group read-writeable.
|
Add readme, migrated from hubdir
|
Add readme, migrated from hubdir
|
Markdown
|
mit
|
fenhl/gitdir
|
markdown
|
## Code Before:
Keep your git repos organized
## Instruction:
Add readme, migrated from hubdir
## Code After:
**gitdir** is a system that helps you keep your git repositories organized. It is *not* a git workflow, and works with different (but not all) workflows. Gitdir is the successor to [hubdir](https://github.com/fenhl/hubdir), generalized to allow working with repositories that are not hosted on github.
This is gitdir version 2.0.0 ([semver](http://semver.org/)). The versioned API is described below, in the section *The system*.
# The system
This section describes the gitdir system.
## The directories
In the gitdir system, all git repos are organized within the *git directories*, or *gitdirs*. There are two kinds of gitdir:
1. the global gitdir at `/opt/git`, and
2. the local gitdirs at `~/git`. Each user can have their own local gitdir.
The global gitdir will be used by default, while the user's local gitdir is used only for staging and when the global gitdir is inaccessible.
## Directory structure
A gitdir contains subdirectories for any host from which repositories are cloned. The way repositories are organized within the host directory is defined individually for each host. For example, `github.com` organizes repositories by github username and repo name, so that the directory for this repo would be located at `/opt/git/github.com/fenhl/gitdir`.
Three different kinds of repos may reside within a repo directory:
1. Master repos, located at `<repodir>/master`. These track [the default branch](https://help.github.com/articles/setting-the-default-branch) from the remote and should always stay clean.
2. Branches, located at `<repodir>/branch/<branch>`. These work like the master repos, except they track a different remote branch.
3. Stages, located at `<repodir>/stage`. These have more loose restrictions and are where any work happens.
4. Bare repos, located at `<repodir>/<reponame>.git`. These are created with `git init --bare`, and should be used as the remote when hosting locally (`/opt/git/localhost`).
## Repo setup
Within a repo, the following rules should be, well, followed:
* All github repos have the default `origin` remote set up as follows:
* For master and branch repos, `https://github.com:<user>/<reponame>.git`
* For stage repos, `[email protected]:<user>/<reponame>.git`
* Master and branch repos have no other remotes. For stages, do whatever works best for your git workflow.
* In multi-user environments, the global gitdir and everything under it should be owned by a group named `git` and be group read-writeable.
|
1c302f4d57e540d4b12366db77548ca94bf89a61
|
www/loglook.css
|
www/loglook.css
|
/* CSS almost completely taken from T-M at www.moronic-works.co.uk with same adaptions and additions*/
tr:nth-child(odd)
{
background: #EEE;
}
tr:nth-child(even)
{
background: #DDD;
}
td
{
max-width: 900px;
}
.log
{
border-collapse: collapse;
font-family: monospace;
font-size: 14px;
}
.user
{
font-weight: bold;
}
.message .user
{
text-align: right;
border-right: 1px solid #000;
padding-right: 0.5em;
}
.spacer
{
text-align: right;
border-right: 1px solid #000;
padding-right: 0.5em;
color: #666;
}
.text
{
word-wrap: break-word;
padding-left: 0.5em;
}
.action .text
{
font-style: italic;
}
.other .text
{
color: #666;
}
|
/* CSS almost completely taken from T-M at www.moronic-works.co.uk with same adaptions and additions*/
tr:nth-child(odd)
{
background: #EEE;
}
tr:nth-child(even)
{
background: #DDD;
}
td
{
max-width: 900px;
}
.log
{
border-collapse: collapse;
font-family: monospace;
font-size: 14px;
}
.user
{
font-weight: bold;
}
.message .user
{
text-align: right;
border-right: 1px solid #000;
padding-right: 0.5em;
}
.spacer
{
text-align: right;
border-right: 1px solid #000;
padding-right: 0.5em;
color: #666;
}
.text
{
word-wrap: break-word;
padding-left: 0.5em;
}
.action .text
{
font-style: italic;
}
.other .text
{
color: #666;
}
td.time a:link, td.time a:visited
{
color: #666;
text-decoration: none;
}
td.time a:hover
{
color: #0000ff;
text-decoration: underline;
}
|
Make timestamp anchors stand out less
|
Make timestamp anchors stand out less
|
CSS
|
mit
|
Heufneutje/RE_HeufyBot,Heufneutje/RE_HeufyBot
|
css
|
## Code Before:
/* CSS almost completely taken from T-M at www.moronic-works.co.uk with same adaptions and additions*/
tr:nth-child(odd)
{
background: #EEE;
}
tr:nth-child(even)
{
background: #DDD;
}
td
{
max-width: 900px;
}
.log
{
border-collapse: collapse;
font-family: monospace;
font-size: 14px;
}
.user
{
font-weight: bold;
}
.message .user
{
text-align: right;
border-right: 1px solid #000;
padding-right: 0.5em;
}
.spacer
{
text-align: right;
border-right: 1px solid #000;
padding-right: 0.5em;
color: #666;
}
.text
{
word-wrap: break-word;
padding-left: 0.5em;
}
.action .text
{
font-style: italic;
}
.other .text
{
color: #666;
}
## Instruction:
Make timestamp anchors stand out less
## Code After:
/* CSS almost completely taken from T-M at www.moronic-works.co.uk with same adaptions and additions*/
tr:nth-child(odd)
{
background: #EEE;
}
tr:nth-child(even)
{
background: #DDD;
}
td
{
max-width: 900px;
}
.log
{
border-collapse: collapse;
font-family: monospace;
font-size: 14px;
}
.user
{
font-weight: bold;
}
.message .user
{
text-align: right;
border-right: 1px solid #000;
padding-right: 0.5em;
}
.spacer
{
text-align: right;
border-right: 1px solid #000;
padding-right: 0.5em;
color: #666;
}
.text
{
word-wrap: break-word;
padding-left: 0.5em;
}
.action .text
{
font-style: italic;
}
.other .text
{
color: #666;
}
td.time a:link, td.time a:visited
{
color: #666;
text-decoration: none;
}
td.time a:hover
{
color: #0000ff;
text-decoration: underline;
}
|
d185407ac4caf5648ef4c12eab83fec81c307407
|
tests/test_trackable.py
|
tests/test_trackable.py
|
import pytest
from utils import authenticate, logout
pytestmark = pytest.mark.trackable()
def test_trackable_flag(app, client):
e = '[email protected]'
authenticate(client, email=e)
logout(client)
authenticate(client, email=e)
with app.app_context():
user = app.security.datastore.find_user(email=e)
assert user.last_login_at is not None
assert user.current_login_at is not None
assert user.last_login_ip == 'untrackable'
assert user.current_login_ip == 'untrackable'
assert user.login_count == 2
|
import pytest
from utils import authenticate, logout
pytestmark = pytest.mark.trackable()
def test_trackable_flag(app, client):
e = '[email protected]'
authenticate(client, email=e)
logout(client)
authenticate(client, email=e, headers={'X-Forwarded-For': '127.0.0.1'})
with app.app_context():
user = app.security.datastore.find_user(email=e)
assert user.last_login_at is not None
assert user.current_login_at is not None
assert user.last_login_ip == 'untrackable'
assert user.current_login_ip == '127.0.0.1'
assert user.login_count == 2
|
Add mock X-Forwarded-For header in trackable tests
|
Add mock X-Forwarded-For header in trackable tests
|
Python
|
mit
|
pawl/flask-security,reustle/flask-security,jonafato/flask-security,asmodehn/flask-security,quokkaproject/flask-security,LeonhardPrintz/flask-security-fork,dommert/flask-security,LeonhardPrintz/flask-security-fork,fuhrysteve/flask-security,CodeSolid/flask-security,simright/flask-security,inveniosoftware/flask-security-fork,x5a/flask-security,mafrosis/flask-security,Samael500/flask-security,dlakata/flask-security,inveniosoftware/flask-security-fork,fuhrysteve/flask-security,inveniosoftware/flask-security-fork,redpandalabs/flask-security,fmerges/flask-security,wjt/flask-security,CodeSolid/flask-security,yingbo/flask-security,asmodehn/flask-security,reustle/flask-security,felix1m/flask-security,themylogin/flask-security,a-pertsev/flask-security,GregoryVigoTorres/flask-security,x5a/flask-security,quokkaproject/flask-security,tatataufik/flask-security,Samael500/flask-security,jonafato/flask-security,mik3cap/private-flask-security,a-pertsev/flask-security,guoqiao/flask-security,themylogin/flask-security,LeonhardPrintz/flask-security-fork,GregoryVigoTorres/flask-security,dommert/flask-security,fmerges/flask-security,yingbo/flask-security,mik3cap/private-flask-security,pawl/flask-security,simright/flask-security,nfvs/flask-security,tatataufik/flask-security,dlakata/flask-security,felix1m/flask-security,covertgeek/flask-security,mafrosis/flask-security,wjt/flask-security,covertgeek/flask-security,mattupstate/flask-security,redpandalabs/flask-security,guoqiao/flask-security,mattupstate/flask-security,nfvs/flask-security
|
python
|
## Code Before:
import pytest
from utils import authenticate, logout
pytestmark = pytest.mark.trackable()
def test_trackable_flag(app, client):
e = '[email protected]'
authenticate(client, email=e)
logout(client)
authenticate(client, email=e)
with app.app_context():
user = app.security.datastore.find_user(email=e)
assert user.last_login_at is not None
assert user.current_login_at is not None
assert user.last_login_ip == 'untrackable'
assert user.current_login_ip == 'untrackable'
assert user.login_count == 2
## Instruction:
Add mock X-Forwarded-For header in trackable tests
## Code After:
import pytest
from utils import authenticate, logout
pytestmark = pytest.mark.trackable()
def test_trackable_flag(app, client):
e = '[email protected]'
authenticate(client, email=e)
logout(client)
authenticate(client, email=e, headers={'X-Forwarded-For': '127.0.0.1'})
with app.app_context():
user = app.security.datastore.find_user(email=e)
assert user.last_login_at is not None
assert user.current_login_at is not None
assert user.last_login_ip == 'untrackable'
assert user.current_login_ip == '127.0.0.1'
assert user.login_count == 2
|
7f71436ba8d4c879a945d4ba7d88a71f2dbe0a43
|
apps/crbug/bg.js
|
apps/crbug/bg.js
|
var qb;
function launch() {
if ( ! qb ) qb = QBug.create();
qb.launchBrowser();
// qb.launchBrowser('chromium');
}
if ( chrome.app.runtime ) {
ajsonp = (function() {
var factory = OAuthXhrFactory.create({
authAgent: ChromeAuthAgent.create({}),
responseType: "json"
});
return function(url, params, opt_method) {
return function(ret) {
var xhr = factory.make();
return xhr.asend(ret, opt_method ? opt_method : "GET", url + (params ? '?' + params.join('&') : ''));
};
};
})();
chrome.app.runtime.onLaunched.addListener(function(opt_launchData) {
// launchData is provided by the url_handler
if ( opt_launchData ) console.log(opt_launchData.url);
console.log('launched');
launch();
});
}
|
var qb;
function launch() {
if ( ! qb ) qb = QBug.create();
qb.launchBrowser();
// qb.launchBrowser('chromium');
}
if ( chrome.app.runtime ) {
ajsonp = (function() {
var factory = OAuthXhrFactory.create({
authAgent: ChromeAuthAgent.create({}),
responseType: "json"
});
return function(url, params, opt_method, opt_payload) {
return function(ret) {
var xhr = factory.make();
xhr.responseType = "json";
return xhr.asend(ret,
opt_method ? opt_method : "GET",
url + (params ? '?' + params.join('&') : ''),
opt_payload);
};
};
})();
chrome.app.runtime.onLaunched.addListener(function(opt_launchData) {
// launchData is provided by the url_handler
if ( opt_launchData ) console.log(opt_launchData.url);
console.log('launched');
launch();
});
}
|
Add optional payload to ajsonp
|
Add optional payload to ajsonp
|
JavaScript
|
apache-2.0
|
osric-the-knight/foam,shepheb/foam,jacksonic/foam,shepheb/foam,foam-framework/foam,osric-the-knight/foam,jacksonic/foam,osric-the-knight/foam,foam-framework/foam,mdittmer/foam,mdittmer/foam,mdittmer/foam,shepheb/foam,jlhughes/foam,jlhughes/foam,jlhughes/foam,foam-framework/foam,jacksonic/foam,foam-framework/foam,mdittmer/foam,foam-framework/foam,osric-the-knight/foam,jacksonic/foam,jlhughes/foam
|
javascript
|
## Code Before:
var qb;
function launch() {
if ( ! qb ) qb = QBug.create();
qb.launchBrowser();
// qb.launchBrowser('chromium');
}
if ( chrome.app.runtime ) {
ajsonp = (function() {
var factory = OAuthXhrFactory.create({
authAgent: ChromeAuthAgent.create({}),
responseType: "json"
});
return function(url, params, opt_method) {
return function(ret) {
var xhr = factory.make();
return xhr.asend(ret, opt_method ? opt_method : "GET", url + (params ? '?' + params.join('&') : ''));
};
};
})();
chrome.app.runtime.onLaunched.addListener(function(opt_launchData) {
// launchData is provided by the url_handler
if ( opt_launchData ) console.log(opt_launchData.url);
console.log('launched');
launch();
});
}
## Instruction:
Add optional payload to ajsonp
## Code After:
var qb;
function launch() {
if ( ! qb ) qb = QBug.create();
qb.launchBrowser();
// qb.launchBrowser('chromium');
}
if ( chrome.app.runtime ) {
ajsonp = (function() {
var factory = OAuthXhrFactory.create({
authAgent: ChromeAuthAgent.create({}),
responseType: "json"
});
return function(url, params, opt_method, opt_payload) {
return function(ret) {
var xhr = factory.make();
xhr.responseType = "json";
return xhr.asend(ret,
opt_method ? opt_method : "GET",
url + (params ? '?' + params.join('&') : ''),
opt_payload);
};
};
})();
chrome.app.runtime.onLaunched.addListener(function(opt_launchData) {
// launchData is provided by the url_handler
if ( opt_launchData ) console.log(opt_launchData.url);
console.log('launched');
launch();
});
}
|
f623f37aeda0eb538f7fa71585c3f250d7ea83c5
|
metadata/org.tasks.txt
|
metadata/org.tasks.txt
|
Categories:Office
License:GPLv3
Web Site:https://github.com/abaker/tasks
Source Code:https://github.com/abaker/tasks
Issue Tracker:https://github.com/abaker/tasks/issues
Donate:https://pledgie.com/campaigns/24281
FlattrID:2308341
Auto Name:Tasks
Summary:Fork of Astrid Tasks & To-Do List
Description:
This app is built on the same code as the original Astrid app but differs in
several ways:
* No Astrid.com synchronization
* No sharing or assigning tasks
* No iOS or web interface
* Third party apps need to update their code to support Tasks
Tasks is not affiliated with Yahoo! or Todoroo.
.
Repo Type:git
Repo:https://github.com/abaker/tasks.git
Build:4.6.16,331
commit=4.6.16
subdir=astrid
gradle=yes
Auto Update Mode:Version %v
Update Check Mode:Tags
Current Version:4.6.16
Current Version Code:331
|
Categories:Office
License:GPLv3
Web Site:https://github.com/abaker/tasks
Source Code:https://github.com/abaker/tasks
Issue Tracker:https://github.com/abaker/tasks/issues
Donate:https://pledgie.com/campaigns/24281
FlattrID:2308341
Auto Name:Tasks
Summary:Fork of Astrid Tasks & To-Do List
Description:
This app is built on the same code as the original Astrid app but differs in
several ways:
* No Astrid.com synchronization
* No sharing or assigning tasks
* No iOS or web interface
* Third party apps need to update their code to support Tasks
Tasks is not affiliated with Yahoo! or Todoroo.
.
Repo Type:git
Repo:https://github.com/abaker/tasks.git
Build:4.6.16,331
commit=4.6.16
subdir=astrid
gradle=yes
Build:4.6.17,332
commit=4.6.17
subdir=astrid
gradle=yes
Auto Update Mode:Version %v
Update Check Mode:Tags
Current Version:4.6.17
Current Version Code:332
|
Update Tasks to 4.6.17 (332)
|
Update Tasks to 4.6.17 (332)
|
Text
|
agpl-3.0
|
f-droid/fdroiddata,f-droid/fdroiddata,f-droid/fdroid-data
|
text
|
## Code Before:
Categories:Office
License:GPLv3
Web Site:https://github.com/abaker/tasks
Source Code:https://github.com/abaker/tasks
Issue Tracker:https://github.com/abaker/tasks/issues
Donate:https://pledgie.com/campaigns/24281
FlattrID:2308341
Auto Name:Tasks
Summary:Fork of Astrid Tasks & To-Do List
Description:
This app is built on the same code as the original Astrid app but differs in
several ways:
* No Astrid.com synchronization
* No sharing or assigning tasks
* No iOS or web interface
* Third party apps need to update their code to support Tasks
Tasks is not affiliated with Yahoo! or Todoroo.
.
Repo Type:git
Repo:https://github.com/abaker/tasks.git
Build:4.6.16,331
commit=4.6.16
subdir=astrid
gradle=yes
Auto Update Mode:Version %v
Update Check Mode:Tags
Current Version:4.6.16
Current Version Code:331
## Instruction:
Update Tasks to 4.6.17 (332)
## Code After:
Categories:Office
License:GPLv3
Web Site:https://github.com/abaker/tasks
Source Code:https://github.com/abaker/tasks
Issue Tracker:https://github.com/abaker/tasks/issues
Donate:https://pledgie.com/campaigns/24281
FlattrID:2308341
Auto Name:Tasks
Summary:Fork of Astrid Tasks & To-Do List
Description:
This app is built on the same code as the original Astrid app but differs in
several ways:
* No Astrid.com synchronization
* No sharing or assigning tasks
* No iOS or web interface
* Third party apps need to update their code to support Tasks
Tasks is not affiliated with Yahoo! or Todoroo.
.
Repo Type:git
Repo:https://github.com/abaker/tasks.git
Build:4.6.16,331
commit=4.6.16
subdir=astrid
gradle=yes
Build:4.6.17,332
commit=4.6.17
subdir=astrid
gradle=yes
Auto Update Mode:Version %v
Update Check Mode:Tags
Current Version:4.6.17
Current Version Code:332
|
67facc744dac98c6ced4e7b32ab253e2fb16b42e
|
CHANGELOG.md
|
CHANGELOG.md
|
First version of the OpenVeo project including the following features :
- An HTTP Server with a basic front and backends
- Connection to a MongoDB database to be used by plugins
- A plugin manager to load plugins while starting the application
- Internationalization
|
This version mainly includes corrections to be able to launch the application from outside its root directory or errors relative to Linux
platforms.
- Correct JavaScript error when starting the OpenVeo server on Linux relative to defaultController.js
- Correct issues while launching the OpenVeo server from outside its root directory
- Remove forever module
# 0.0.1 / 2015-04-13
First version of the OpenVeo project including the following features :
- An HTTP Server with a basic front and backends
- Connection to a MongoDB database to be used by plugins
- A plugin manager to load plugins while starting the application
- Internationalization
|
Update change logs for version 0.0.2
|
Update change logs for version 0.0.2
|
Markdown
|
agpl-3.0
|
veo-labs/openveo-core,veo-labs/openveo-core,veo-labs/openveo-core
|
markdown
|
## Code Before:
First version of the OpenVeo project including the following features :
- An HTTP Server with a basic front and backends
- Connection to a MongoDB database to be used by plugins
- A plugin manager to load plugins while starting the application
- Internationalization
## Instruction:
Update change logs for version 0.0.2
## Code After:
This version mainly includes corrections to be able to launch the application from outside its root directory or errors relative to Linux
platforms.
- Correct JavaScript error when starting the OpenVeo server on Linux relative to defaultController.js
- Correct issues while launching the OpenVeo server from outside its root directory
- Remove forever module
# 0.0.1 / 2015-04-13
First version of the OpenVeo project including the following features :
- An HTTP Server with a basic front and backends
- Connection to a MongoDB database to be used by plugins
- A plugin manager to load plugins while starting the application
- Internationalization
|
04fcf7d4e4cb0abefd4f6bd4ab0c1b034d43c111
|
dbcollection/__init__.py
|
dbcollection/__init__.py
|
#from dbcollection import manager, utils
# get version
from ._version import __version__
# load API methods
from .manager import load, download, process, add, remove, config_cache, query, info
|
#from dbcollection import manager, utils
# get version
from ._version import __version__
# load API methods
from .manager import (
load,
download,
process,
add,
remove,
config_cache,
query,
info)
|
Improve visually how methods are Imported
|
Improve visually how methods are Imported
|
Python
|
mit
|
farrajota/dbcollection,dbcollection/dbcollection
|
python
|
## Code Before:
#from dbcollection import manager, utils
# get version
from ._version import __version__
# load API methods
from .manager import load, download, process, add, remove, config_cache, query, info
## Instruction:
Improve visually how methods are Imported
## Code After:
#from dbcollection import manager, utils
# get version
from ._version import __version__
# load API methods
from .manager import (
load,
download,
process,
add,
remove,
config_cache,
query,
info)
|
265ec93cd2cb59badc8f95c26144905e73d91095
|
assets/src/InheritingClass.java
|
assets/src/InheritingClass.java
|
package THE_PACKAGE;
import org.jruby.Ruby;
import org.jruby.javasupport.util.RuntimeHelpers;
import org.jruby.runtime.builtin.IRubyObject;
import org.jruby.javasupport.JavaUtil;
import org.jruby.exceptions.RaiseException;
import org.ruboto.Script;
public class THE_RUBOTO_CLASS THE_ACTION THE_ANDROID_CLASS {
private Ruby __ruby__;
THE_CONSTANTS
private IRubyObject[] callbackProcs = new IRubyObject[CONSTANTS_COUNT];
private Ruby getRuby() {
if (__ruby__ == null) __ruby__ = Script.getRuby();
return __ruby__;
}
public void setCallbackProc(int id, IRubyObject obj) {
callbackProcs[id] = obj;
}
THE_METHODS
}
|
package THE_PACKAGE;
import org.jruby.Ruby;
import org.jruby.javasupport.util.RuntimeHelpers;
import org.jruby.runtime.builtin.IRubyObject;
import org.jruby.javasupport.JavaUtil;
import org.jruby.exceptions.RaiseException;
import org.ruboto.Script;
public class THE_RUBOTO_CLASS THE_ACTION THE_ANDROID_CLASS {
private Ruby __ruby__;
THE_CONSTANTS
private IRubyObject[] callbackProcs = new IRubyObject[CONSTANTS_COUNT];
THE_CONSTRUCTORS
private Ruby getRuby() {
if (__ruby__ == null) __ruby__ = Script.getRuby();
return __ruby__;
}
public void setCallbackProc(int id, IRubyObject obj) {
callbackProcs[id] = obj;
}
THE_METHODS
}
|
Add place to put constructors
|
Add place to put constructors
|
Java
|
mit
|
baberthal/ruboto,Jodell88/ruboto,lucasallan/ruboto,lucasallan/ruboto,ruboto/ruboto,Jodell88/ruboto,ruboto/ruboto,Jodell88/ruboto,lucasallan/ruboto,ruboto/ruboto,baberthal/ruboto,baberthal/ruboto
|
java
|
## Code Before:
package THE_PACKAGE;
import org.jruby.Ruby;
import org.jruby.javasupport.util.RuntimeHelpers;
import org.jruby.runtime.builtin.IRubyObject;
import org.jruby.javasupport.JavaUtil;
import org.jruby.exceptions.RaiseException;
import org.ruboto.Script;
public class THE_RUBOTO_CLASS THE_ACTION THE_ANDROID_CLASS {
private Ruby __ruby__;
THE_CONSTANTS
private IRubyObject[] callbackProcs = new IRubyObject[CONSTANTS_COUNT];
private Ruby getRuby() {
if (__ruby__ == null) __ruby__ = Script.getRuby();
return __ruby__;
}
public void setCallbackProc(int id, IRubyObject obj) {
callbackProcs[id] = obj;
}
THE_METHODS
}
## Instruction:
Add place to put constructors
## Code After:
package THE_PACKAGE;
import org.jruby.Ruby;
import org.jruby.javasupport.util.RuntimeHelpers;
import org.jruby.runtime.builtin.IRubyObject;
import org.jruby.javasupport.JavaUtil;
import org.jruby.exceptions.RaiseException;
import org.ruboto.Script;
public class THE_RUBOTO_CLASS THE_ACTION THE_ANDROID_CLASS {
private Ruby __ruby__;
THE_CONSTANTS
private IRubyObject[] callbackProcs = new IRubyObject[CONSTANTS_COUNT];
THE_CONSTRUCTORS
private Ruby getRuby() {
if (__ruby__ == null) __ruby__ = Script.getRuby();
return __ruby__;
}
public void setCallbackProc(int id, IRubyObject obj) {
callbackProcs[id] = obj;
}
THE_METHODS
}
|
d490b948e8363144decc5cd8b01df0d2748b90e3
|
fuzzers/075-pins/generate.tcl
|
fuzzers/075-pins/generate.tcl
|
create_project -force -part $::env(XRAY_PART) design design
set_property design_mode PinPlanning [current_fileset]
open_io_design -name io_1
set fp [open $::env(XRAY_PART)_package_pins.csv w]
puts $fp "pin,site,tile"
foreach pin [get_package_pins] {
set site [get_sites -quiet -of_object $pin]
if { $site == "" } {
continue
}
set tile [get_tiles -of_object $site]
puts $fp "$pin,$site,$tile"
}
|
create_project -force -part $::env(XRAY_PART) design design
set_property design_mode PinPlanning [current_fileset]
open_io_design -name io_1
set fp [open $::env(XRAY_PART)_package_pins.csv w]
puts $fp "pin,site,tile,pin_function"
foreach pin [get_package_pins] {
set site [get_sites -quiet -of_object $pin]
if { $site == "" } {
continue
}
set tile [get_tiles -of_object $site]
set pin_function [get_property PIN_FUNC [get_package_pins E18]]
puts $fp "$pin,$site,$tile,$pin_function"
}
|
Add pin functions column to package pins output.
|
Add pin functions column to package pins output.
This is required to know which pin is a PUDC pin, which requires special
handling.
Signed-off-by: Keith Rothman <1bc19627a439baf17510dc2d0b2d250c96d445a5@users.noreply.github.com>
|
Tcl
|
isc
|
SymbiFlow/prjxray,SymbiFlow/prjxray,SymbiFlow/prjxray,SymbiFlow/prjxray,SymbiFlow/prjxray
|
tcl
|
## Code Before:
create_project -force -part $::env(XRAY_PART) design design
set_property design_mode PinPlanning [current_fileset]
open_io_design -name io_1
set fp [open $::env(XRAY_PART)_package_pins.csv w]
puts $fp "pin,site,tile"
foreach pin [get_package_pins] {
set site [get_sites -quiet -of_object $pin]
if { $site == "" } {
continue
}
set tile [get_tiles -of_object $site]
puts $fp "$pin,$site,$tile"
}
## Instruction:
Add pin functions column to package pins output.
This is required to know which pin is a PUDC pin, which requires special
handling.
Signed-off-by: Keith Rothman <1bc19627a439baf17510dc2d0b2d250c96d445a5@users.noreply.github.com>
## Code After:
create_project -force -part $::env(XRAY_PART) design design
set_property design_mode PinPlanning [current_fileset]
open_io_design -name io_1
set fp [open $::env(XRAY_PART)_package_pins.csv w]
puts $fp "pin,site,tile,pin_function"
foreach pin [get_package_pins] {
set site [get_sites -quiet -of_object $pin]
if { $site == "" } {
continue
}
set tile [get_tiles -of_object $site]
set pin_function [get_property PIN_FUNC [get_package_pins E18]]
puts $fp "$pin,$site,$tile,$pin_function"
}
|
4e323f80fc309e3ae68557a43e38fc2ac1397f8b
|
README.md
|
README.md
|
[](https://travis-ci.org/msgehard/go-exercism)
Goals
===========
Provide non-Ruby developers an easy way to work with [exercism.io](http://exercism.io).
This tool is under heavy development. If you want something more stable to access exercism.io, please
see the [ruby gem](https://github.com/kytrinyx/exercism).
Development
===========
1. Install Go ```brew install go``` or the command appropriate for your platform.
1. Fork and clone.
1. Run ```git submodule update --init --recursive```
1. Write a test.
1. Run ``` bin/test ``` and watch test fail.
1. Make test pass.
1. Submit a pull request.
Building
========
1. Run ```./bin/build```
1. The binary will be built into the out directory.
|
[](https://travis-ci.org/msgehard/go-exercism)
Goals
===========
Provide non-Ruby developers an easy way to work with [exercism.io](http://exercism.io).
This tool is under heavy development. If you want something more stable to access exercism.io, please
see the [ruby gem](https://github.com/kytrinyx/exercism).
Development
===========
1. Install Go ```brew install go --cross-compile-common``` or the command appropriate for your platform.
1. Fork and clone.
1. Run ```git submodule update --init --recursive```
1. Write a test.
1. Run ``` bin/test ``` and watch test fail.
1. Make test pass.
1. Submit a pull request.
Building
========
1. Run ```bin/build``` and the binary for your platform will be built into the out directory.
1. Run ```bin/build-all``` and the binaries for OSX, Linux and Windows will be built into the release directory.
|
Add instructions on how to build all binaries.
|
Add instructions on how to build all binaries.
|
Markdown
|
mit
|
yeah-right/cli,exercism/cli,arvidsaur/cli,zabawaba99/cli,Tonkpils/cli,arvidsaur/cli,yeah-right/cli,neslom/cli,Tonkpils/cli,yeah-right/cli,zabawaba99/cli,Tonkpils/cli,neslom/cli,neslom/cli,lcowell/cli,exercism/cli,arvidsaur/cli,lcowell/cli,zabawaba99/cli,lcowell/cli
|
markdown
|
## Code Before:
[](https://travis-ci.org/msgehard/go-exercism)
Goals
===========
Provide non-Ruby developers an easy way to work with [exercism.io](http://exercism.io).
This tool is under heavy development. If you want something more stable to access exercism.io, please
see the [ruby gem](https://github.com/kytrinyx/exercism).
Development
===========
1. Install Go ```brew install go``` or the command appropriate for your platform.
1. Fork and clone.
1. Run ```git submodule update --init --recursive```
1. Write a test.
1. Run ``` bin/test ``` and watch test fail.
1. Make test pass.
1. Submit a pull request.
Building
========
1. Run ```./bin/build```
1. The binary will be built into the out directory.
## Instruction:
Add instructions on how to build all binaries.
## Code After:
[](https://travis-ci.org/msgehard/go-exercism)
Goals
===========
Provide non-Ruby developers an easy way to work with [exercism.io](http://exercism.io).
This tool is under heavy development. If you want something more stable to access exercism.io, please
see the [ruby gem](https://github.com/kytrinyx/exercism).
Development
===========
1. Install Go ```brew install go --cross-compile-common``` or the command appropriate for your platform.
1. Fork and clone.
1. Run ```git submodule update --init --recursive```
1. Write a test.
1. Run ``` bin/test ``` and watch test fail.
1. Make test pass.
1. Submit a pull request.
Building
========
1. Run ```bin/build``` and the binary for your platform will be built into the out directory.
1. Run ```bin/build-all``` and the binaries for OSX, Linux and Windows will be built into the release directory.
|
6cd213a6d70af30e166181879edd04eb8aab0da8
|
src/app/app.routes.js
|
src/app/app.routes.js
|
export default function(appModule) {
appModule.config(function($stateProvider, $locationProvider) {
'ngInject';
$stateProvider
.state({
name: 'stopwatch',
url: '/stopwatch',
component: 'atStopwatch'
})
.state({
name: 'timer',
url: '/timer',
component: 'atTimer'
})
.state({
name: 'about',
url: '/about',
component: 'atAbout'
});
$locationProvider.html5Mode(true);
});
}
|
export default function(appModule) {
appModule.config(function(
$stateProvider, $locationProvider, $urlRouterProvider
) {
'ngInject';
$stateProvider
.state({
name: 'stopwatch',
url: '/stopwatch',
component: 'atStopwatch'
})
.state({
name: 'timer',
url: '/timer',
component: 'atTimer'
})
.state({
name: 'about',
url: '/about',
component: 'atAbout'
});
$urlRouterProvider.otherwise('/stopwatch');
$locationProvider.html5Mode(true);
});
}
|
Make stopwatch the default route
|
Make stopwatch the default route
|
JavaScript
|
mit
|
JavierPDev/AudioTime,JavierPDev/AudioTime
|
javascript
|
## Code Before:
export default function(appModule) {
appModule.config(function($stateProvider, $locationProvider) {
'ngInject';
$stateProvider
.state({
name: 'stopwatch',
url: '/stopwatch',
component: 'atStopwatch'
})
.state({
name: 'timer',
url: '/timer',
component: 'atTimer'
})
.state({
name: 'about',
url: '/about',
component: 'atAbout'
});
$locationProvider.html5Mode(true);
});
}
## Instruction:
Make stopwatch the default route
## Code After:
export default function(appModule) {
appModule.config(function(
$stateProvider, $locationProvider, $urlRouterProvider
) {
'ngInject';
$stateProvider
.state({
name: 'stopwatch',
url: '/stopwatch',
component: 'atStopwatch'
})
.state({
name: 'timer',
url: '/timer',
component: 'atTimer'
})
.state({
name: 'about',
url: '/about',
component: 'atAbout'
});
$urlRouterProvider.otherwise('/stopwatch');
$locationProvider.html5Mode(true);
});
}
|
23e3197f15d13445defe6ec7cfb4f08484089068
|
tests/test_scripts/test_simulate_data.py
|
tests/test_scripts/test_simulate_data.py
|
import json
import numpy as np
from click.testing import CliRunner
from fastimgproto.scripts.simulate_data import cli as sim_cli
def test_simulate_data():
runner = CliRunner()
with runner.isolated_filesystem():
output_filename = 'simdata.npz'
result = runner.invoke(sim_cli,
[output_filename,])
assert result.exit_code == 0
with open(output_filename, 'rb') as f:
output_data = np.load(f)
expected_keys = ('uvw_lambda', 'model', 'vis')
for k in expected_keys:
assert k in output_data
|
import json
import numpy as np
from click.testing import CliRunner
from fastimgproto.scripts.simulate_data import cli as sim_cli
def test_simulate_data():
runner = CliRunner()
with runner.isolated_filesystem():
output_filename = 'simdata.npz'
result = runner.invoke(sim_cli,
[output_filename,
'--nstep','5'
])
assert result.exit_code == 0
with open(output_filename, 'rb') as f:
output_data = np.load(f)
expected_keys = ('uvw_lambda', 'model', 'vis')
for k in expected_keys:
assert k in output_data
|
Use few nsteps for testing sim-script
|
Use few nsteps for testing sim-script
|
Python
|
apache-2.0
|
SKA-ScienceDataProcessor/FastImaging-Python,SKA-ScienceDataProcessor/FastImaging-Python
|
python
|
## Code Before:
import json
import numpy as np
from click.testing import CliRunner
from fastimgproto.scripts.simulate_data import cli as sim_cli
def test_simulate_data():
runner = CliRunner()
with runner.isolated_filesystem():
output_filename = 'simdata.npz'
result = runner.invoke(sim_cli,
[output_filename,])
assert result.exit_code == 0
with open(output_filename, 'rb') as f:
output_data = np.load(f)
expected_keys = ('uvw_lambda', 'model', 'vis')
for k in expected_keys:
assert k in output_data
## Instruction:
Use few nsteps for testing sim-script
## Code After:
import json
import numpy as np
from click.testing import CliRunner
from fastimgproto.scripts.simulate_data import cli as sim_cli
def test_simulate_data():
runner = CliRunner()
with runner.isolated_filesystem():
output_filename = 'simdata.npz'
result = runner.invoke(sim_cli,
[output_filename,
'--nstep','5'
])
assert result.exit_code == 0
with open(output_filename, 'rb') as f:
output_data = np.load(f)
expected_keys = ('uvw_lambda', 'model', 'vis')
for k in expected_keys:
assert k in output_data
|
e996e560c1d40460980e6e5f1970c30c330e2dad
|
ngrinder-controller/src/test/resources/applicationContext.xml
|
ngrinder-controller/src/test/resources/applicationContext.xml
|
<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:context="http://www.springframework.org/schema/context" xmlns:aop="http://www.springframework.org/schema/aop"
xsi:schemaLocation="
http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.0.xsd
http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context-3.0.xsd
http://www.springframework.org/schema/aop http://www.springframework.org/schema/aop/spring-aop-3.0.xsd">
<context:annotation-config />
<aop:aspectj-autoproxy />
<!-- Scans the classpath of this application for @Components to deploy as beans -->
<context:component-scan base-package="org.ngrinder">
<context:exclude-filter type="annotation" expression="org.ngrinder.infra.annotation.RuntimeOnlyComponent" />
</context:component-scan>
<import resource="applicationContext-springdata.xml" />
<import resource="applicationContext-ehcache.xml" />
<import resource="applicationContext-external.xml" />
</beans>
|
<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:context="http://www.springframework.org/schema/context" xmlns:aop="http://www.springframework.org/schema/aop"
xsi:schemaLocation="
http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.0.xsd
http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context-3.0.xsd
http://www.springframework.org/schema/aop http://www.springframework.org/schema/aop/spring-aop-3.0.xsd">
<context:annotation-config />
<aop:aspectj-autoproxy />
<!-- Scans the classpath of this application for @Components to deploy as beans -->
<context:component-scan base-package="org.ngrinder">
<context:exclude-filter type="annotation" expression="org.ngrinder.infra.annotation.RuntimeOnlyComponent" />
</context:component-scan>
<import resource="applicationContext-springdata.xml" />
<import resource="applicationContext-ehcache.xml" />
<import resource="applicationContext-external.xml" />
<import resource="applicationContext-task.xml" />
</beans>
|
Add unit test for controller
|
[NGRINDER-68] Add unit test for controller
add spring task configuration.
|
XML
|
apache-2.0
|
SRCB-CloudPart/ngrinder,GwonGisoo/ngrinder,naver/ngrinder,naver/ngrinder,chengaomin/ngrinder,SRCB-CloudPart/ngrinder,nanpa83/ngrinder,SRCB-CloudPart/ngrinder,ropik/ngrinder,SRCB-CloudPart/ngrinder,GwonGisoo/ngrinder,GwonGisoo/ngrinder,songeunwoo/ngrinder,ropik/ngrinder,chengaomin/ngrinder,songeunwoo/ngrinder,bwahn/ngrinder,bwahn/ngrinder,SRCB-CloudPart/ngrinder,bwahn/ngrinder,nanpa83/ngrinder,chengaomin/ngrinder,songeunwoo/ngrinder,GwonGisoo/ngrinder,nanpa83/ngrinder,naver/ngrinder,ropik/ngrinder,bwahn/ngrinder,songeunwoo/ngrinder,songeunwoo/ngrinder,GwonGisoo/ngrinder,nanpa83/ngrinder,naver/ngrinder,nanpa83/ngrinder,ropik/ngrinder,naver/ngrinder,chengaomin/ngrinder,chengaomin/ngrinder,bwahn/ngrinder
|
xml
|
## Code Before:
<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:context="http://www.springframework.org/schema/context" xmlns:aop="http://www.springframework.org/schema/aop"
xsi:schemaLocation="
http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.0.xsd
http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context-3.0.xsd
http://www.springframework.org/schema/aop http://www.springframework.org/schema/aop/spring-aop-3.0.xsd">
<context:annotation-config />
<aop:aspectj-autoproxy />
<!-- Scans the classpath of this application for @Components to deploy as beans -->
<context:component-scan base-package="org.ngrinder">
<context:exclude-filter type="annotation" expression="org.ngrinder.infra.annotation.RuntimeOnlyComponent" />
</context:component-scan>
<import resource="applicationContext-springdata.xml" />
<import resource="applicationContext-ehcache.xml" />
<import resource="applicationContext-external.xml" />
</beans>
## Instruction:
[NGRINDER-68] Add unit test for controller
add spring task configuration.
## Code After:
<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:context="http://www.springframework.org/schema/context" xmlns:aop="http://www.springframework.org/schema/aop"
xsi:schemaLocation="
http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.0.xsd
http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context-3.0.xsd
http://www.springframework.org/schema/aop http://www.springframework.org/schema/aop/spring-aop-3.0.xsd">
<context:annotation-config />
<aop:aspectj-autoproxy />
<!-- Scans the classpath of this application for @Components to deploy as beans -->
<context:component-scan base-package="org.ngrinder">
<context:exclude-filter type="annotation" expression="org.ngrinder.infra.annotation.RuntimeOnlyComponent" />
</context:component-scan>
<import resource="applicationContext-springdata.xml" />
<import resource="applicationContext-ehcache.xml" />
<import resource="applicationContext-external.xml" />
<import resource="applicationContext-task.xml" />
</beans>
|
27813fa58438e3aa11fbddf0a9b02dcf2a93b755
|
app/scss/calllog.scss
|
app/scss/calllog.scss
|
.tabs-striped.tabs-positive .tab-item .badge {
opacity: 1;
}
.call-log-list {
.item {
h2 {
width: 50%;
display: inline-block;
}
span {
float: right;
}
&.not-seen {
span {
animation: notify-pulse 1.75s infinite;
color: #ef473a;
}
h2 {
animation: notify-pulse 1.75s infinite;
color: #ef473a;
}
}
}
}
|
.tabs-striped.tabs-positive .tab-item .badge {
opacity: 1;
}
@keyframes not-read-pulse {
0% { color: #ef473a;}
50% { color: black; }
100% { color: #ef473a; }
}
.call-log-list {
.item {
h2 {
width: 50%;
display: inline-block;
}
span {
float: right;
}
&.not-seen {
span {
animation: not-read-pulse 1.75s infinite;
color: #ef473a;
}
h2 {
animation: not-read-pulse 1.75s infinite;
color: #ef473a;
}
}
}
}
|
Make unanswered calls pulse from red to black instead of changing opacity
|
Make unanswered calls pulse from red to black instead of changing opacity
|
SCSS
|
mit
|
learning-layers/sardroid,learning-layers/sardroid,learning-layers/sardroid
|
scss
|
## Code Before:
.tabs-striped.tabs-positive .tab-item .badge {
opacity: 1;
}
.call-log-list {
.item {
h2 {
width: 50%;
display: inline-block;
}
span {
float: right;
}
&.not-seen {
span {
animation: notify-pulse 1.75s infinite;
color: #ef473a;
}
h2 {
animation: notify-pulse 1.75s infinite;
color: #ef473a;
}
}
}
}
## Instruction:
Make unanswered calls pulse from red to black instead of changing opacity
## Code After:
.tabs-striped.tabs-positive .tab-item .badge {
opacity: 1;
}
@keyframes not-read-pulse {
0% { color: #ef473a;}
50% { color: black; }
100% { color: #ef473a; }
}
.call-log-list {
.item {
h2 {
width: 50%;
display: inline-block;
}
span {
float: right;
}
&.not-seen {
span {
animation: not-read-pulse 1.75s infinite;
color: #ef473a;
}
h2 {
animation: not-read-pulse 1.75s infinite;
color: #ef473a;
}
}
}
}
|
283557d1b8097b866cb7122b20ee167847c1683d
|
src/main/java/me/rkfg/xmpp/bot/plugins/StdinPlugin.java
|
src/main/java/me/rkfg/xmpp/bot/plugins/StdinPlugin.java
|
package me.rkfg.xmpp.bot.plugins;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import me.rkfg.xmpp.bot.Main;
import org.jivesoftware.smack.packet.Message;
public class StdinPlugin implements MessagePlugin {
@Override
public Pattern getPattern() {
return null;
}
@Override
public String process(Message message, Matcher matcher) {
return null;
}
@Override
public void init() {
new Thread(new Runnable() {
@Override
public void run() {
BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(System.in));
while (!Thread.interrupted()) {
try {
String line = bufferedReader.readLine();
Main.sendMUCMessage(line);
} catch (IOException e) {
e.printStackTrace();
}
}
}
}, "Stdin handler").start();
}
@Override
public String getManual() {
return null;
}
}
|
package me.rkfg.xmpp.bot.plugins;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import me.rkfg.xmpp.bot.Main;
import org.jivesoftware.smack.packet.Message;
public class StdinPlugin implements MessagePlugin {
@Override
public Pattern getPattern() {
return null;
}
@Override
public String process(Message message, Matcher matcher) {
return null;
}
@Override
public void init() {
new Thread(new Runnable() {
@Override
public void run() {
BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(System.in));
while (!Thread.interrupted()) {
try {
String line = bufferedReader.readLine();
if (line == null) {
// stdin is not connected
break;
}
Main.sendMUCMessage(line);
} catch (IOException e) {
e.printStackTrace();
}
}
}
}, "Stdin handler").start();
}
@Override
public String getManual() {
return null;
}
}
|
Stop the stdin thread if stdin get detached or not connected.
|
Stop the stdin thread if stdin get detached or not connected.
|
Java
|
agpl-3.0
|
rkfg/jbot,rkfg/jbot,Kona-chan/jbot,neexee/jbot
|
java
|
## Code Before:
package me.rkfg.xmpp.bot.plugins;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import me.rkfg.xmpp.bot.Main;
import org.jivesoftware.smack.packet.Message;
public class StdinPlugin implements MessagePlugin {
@Override
public Pattern getPattern() {
return null;
}
@Override
public String process(Message message, Matcher matcher) {
return null;
}
@Override
public void init() {
new Thread(new Runnable() {
@Override
public void run() {
BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(System.in));
while (!Thread.interrupted()) {
try {
String line = bufferedReader.readLine();
Main.sendMUCMessage(line);
} catch (IOException e) {
e.printStackTrace();
}
}
}
}, "Stdin handler").start();
}
@Override
public String getManual() {
return null;
}
}
## Instruction:
Stop the stdin thread if stdin get detached or not connected.
## Code After:
package me.rkfg.xmpp.bot.plugins;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import me.rkfg.xmpp.bot.Main;
import org.jivesoftware.smack.packet.Message;
public class StdinPlugin implements MessagePlugin {
@Override
public Pattern getPattern() {
return null;
}
@Override
public String process(Message message, Matcher matcher) {
return null;
}
@Override
public void init() {
new Thread(new Runnable() {
@Override
public void run() {
BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(System.in));
while (!Thread.interrupted()) {
try {
String line = bufferedReader.readLine();
if (line == null) {
// stdin is not connected
break;
}
Main.sendMUCMessage(line);
} catch (IOException e) {
e.printStackTrace();
}
}
}
}, "Stdin handler").start();
}
@Override
public String getManual() {
return null;
}
}
|
62cf7b3dd4339e013086070d5b9d631fde0be027
|
source/gloperate/source/stages/ColorGradientPreparationStage.cpp
|
source/gloperate/source/stages/ColorGradientPreparationStage.cpp
|
namespace gloperate
{
ColorGradientPreparationStage::ColorGradientPreparationStage()
{
addInput("gradients", gradients);
addInput("pixmapSize", pixmapSize);
addOutput("names", names);
addOutput("pixmaps", pixmaps);
}
ColorGradientPreparationStage::~ColorGradientPreparationStage()
{
}
void ColorGradientPreparationStage::process()
{
ColorGradientPreparation preparation(gradients.data(), pixmapSize.data());
names.data() = preparation.names();
pixmaps.data() = preparation.pixmaps();
invalidateOutputs();
}
} // namespace gloperate
|
namespace gloperate
{
ColorGradientPreparationStage::ColorGradientPreparationStage()
{
addInput("gradients", gradients);
addInput("pixmapSize", pixmapSize);
addOutput("names", names);
addOutput("pixmaps", pixmaps);
}
ColorGradientPreparationStage::~ColorGradientPreparationStage()
{
}
void ColorGradientPreparationStage::process()
{
ColorGradientPreparation preparation(gradients.data(), pixmapSize.data());
preparation.fillNames(names.data());
preparation.fillPixmaps(pixmaps.data());
invalidateOutputs();
}
} // namespace gloperate
|
Use designated interface of the color gradient preparation tool in the color gradient preparation stage
|
Use designated interface of the color gradient preparation tool in the color gradient preparation stage
|
C++
|
mit
|
j-o/gloperate,p-otto/gloperate,cginternals/gloperate,lanice/gloperate,j-o/gloperate,hpicgs/gloperate,p-otto/gloperate,p-otto/gloperate,Beta-Alf/gloperate,p-otto/gloperate,lanice/gloperate,Beta-Alf/gloperate,Beta-Alf/gloperate,lanice/gloperate,cginternals/gloperate,hpicgs/gloperate,Beta-Alf/gloperate,j-o/gloperate,hpicgs/gloperate,lanice/gloperate,p-otto/gloperate,Beta-Alf/gloperate,cginternals/gloperate,hpicgs/gloperate,lanice/gloperate,j-o/gloperate,cginternals/gloperate,hpicgs/gloperate
|
c++
|
## Code Before:
namespace gloperate
{
ColorGradientPreparationStage::ColorGradientPreparationStage()
{
addInput("gradients", gradients);
addInput("pixmapSize", pixmapSize);
addOutput("names", names);
addOutput("pixmaps", pixmaps);
}
ColorGradientPreparationStage::~ColorGradientPreparationStage()
{
}
void ColorGradientPreparationStage::process()
{
ColorGradientPreparation preparation(gradients.data(), pixmapSize.data());
names.data() = preparation.names();
pixmaps.data() = preparation.pixmaps();
invalidateOutputs();
}
} // namespace gloperate
## Instruction:
Use designated interface of the color gradient preparation tool in the color gradient preparation stage
## Code After:
namespace gloperate
{
ColorGradientPreparationStage::ColorGradientPreparationStage()
{
addInput("gradients", gradients);
addInput("pixmapSize", pixmapSize);
addOutput("names", names);
addOutput("pixmaps", pixmaps);
}
ColorGradientPreparationStage::~ColorGradientPreparationStage()
{
}
void ColorGradientPreparationStage::process()
{
ColorGradientPreparation preparation(gradients.data(), pixmapSize.data());
preparation.fillNames(names.data());
preparation.fillPixmaps(pixmaps.data());
invalidateOutputs();
}
} // namespace gloperate
|
9f95cbb14b1c7e55ac610e500718b570858c6932
|
proto/certmon/messages.go
|
proto/certmon/messages.go
|
package certmon
const (
CertTypeSSH = 0
CertTypeX509 = 1
)
// Format of certificate notifications (server -> client):
// certType: 32 bit uint (big-endian)
// certLength: 64 bit uint (big-endian)
// certData: sequence of bytes
// Client sends no data.
|
package certmon
const (
ConnectString = "200 Connected to keymaster certmon service"
HttpPath = "/certmon"
CertTypeSSH = 0
CertTypeX509 = 1
)
// Format of certificate notifications (server -> client):
// certType: 32 bit uint (big-endian)
// certLength: 64 bit uint (big-endian)
// certData: sequence of bytes
// Client sends no data.
|
Add ConnectString and HttpPath constants to proto/certmon package.
|
Add ConnectString and HttpPath constants to proto/certmon package.
|
Go
|
apache-2.0
|
rgooch/keymaster,rgooch/keymaster,Symantec/keymaster,Symantec/keymaster,rgooch/keymaster,Symantec/keymaster
|
go
|
## Code Before:
package certmon
const (
CertTypeSSH = 0
CertTypeX509 = 1
)
// Format of certificate notifications (server -> client):
// certType: 32 bit uint (big-endian)
// certLength: 64 bit uint (big-endian)
// certData: sequence of bytes
// Client sends no data.
## Instruction:
Add ConnectString and HttpPath constants to proto/certmon package.
## Code After:
package certmon
const (
ConnectString = "200 Connected to keymaster certmon service"
HttpPath = "/certmon"
CertTypeSSH = 0
CertTypeX509 = 1
)
// Format of certificate notifications (server -> client):
// certType: 32 bit uint (big-endian)
// certLength: 64 bit uint (big-endian)
// certData: sequence of bytes
// Client sends no data.
|
210fd5dcb6b5473857ed2c42a6971672f826303a
|
scripts/delete-aem-stacks.sh
|
scripts/delete-aem-stacks.sh
|
set -o nounset
set -o errexit
if [ "$#" -le 1 ] || [ "$#" -gt 2 ]; then
echo 'Usage: ./delete-aem-stacks.sh <stack_prefix> [config_path]'
exit 1
fi
stack_prefix=$1
config_path=$2
delete_single_stack() {
./scripts/delete-stack.sh "$1" "$stack_prefix" "$config_path"
}
delete_multi_stacks() {
for stack in $1
do
delete_single_stack "$stack" &
done
wait
}
echo "Deleting $stack_prefix AEM stacks..."
delete_multi_stacks "apps/dns-records"
delete_multi_stacks "apps/chaos-monkey apps/orchestrator apps/author-dispatcher apps/publish-dispatcher apps/publish apps/author"
delete_multi_stacks "apps/messaging apps/security-groups"
echo "Finished deleting $stack_prefix AEM stacks"
|
set -o nounset
set -o errexit
if [ "$#" -le 1 ] || [ "$#" -gt 2 ]; then
echo 'Usage: ./delete-aem-stacks.sh <stack_prefix> [config_path]'
exit 1
fi
stack_prefix=$1
config_path=$2
delete_single_stack() {
./scripts/delete-stack.sh "$1" "$stack_prefix" "$config_path"
}
delete_multi_stacks() {
for stack in $1
do
delete_single_stack "$stack" &
done
wait
}
echo "Deleting $stack_prefix AEM stacks..."
delete_multi_stacks "apps/dns-records"
delete_multi_stacks "apps/chaos-monkey apps/orchestrator apps/author-dispatcher apps/publish-dispatcher apps/publish apps/author"
delete_multi_stacks "apps/roles apps/messaging apps/security-groups"
echo "Finished deleting $stack_prefix AEM stacks"
|
Add roles deletion on delete-aem set.
|
Add roles deletion on delete-aem set.
|
Shell
|
apache-2.0
|
shinesolutions/aem-aws-stack-builder,shinesolutions/aem-aws-stack-builder
|
shell
|
## Code Before:
set -o nounset
set -o errexit
if [ "$#" -le 1 ] || [ "$#" -gt 2 ]; then
echo 'Usage: ./delete-aem-stacks.sh <stack_prefix> [config_path]'
exit 1
fi
stack_prefix=$1
config_path=$2
delete_single_stack() {
./scripts/delete-stack.sh "$1" "$stack_prefix" "$config_path"
}
delete_multi_stacks() {
for stack in $1
do
delete_single_stack "$stack" &
done
wait
}
echo "Deleting $stack_prefix AEM stacks..."
delete_multi_stacks "apps/dns-records"
delete_multi_stacks "apps/chaos-monkey apps/orchestrator apps/author-dispatcher apps/publish-dispatcher apps/publish apps/author"
delete_multi_stacks "apps/messaging apps/security-groups"
echo "Finished deleting $stack_prefix AEM stacks"
## Instruction:
Add roles deletion on delete-aem set.
## Code After:
set -o nounset
set -o errexit
if [ "$#" -le 1 ] || [ "$#" -gt 2 ]; then
echo 'Usage: ./delete-aem-stacks.sh <stack_prefix> [config_path]'
exit 1
fi
stack_prefix=$1
config_path=$2
delete_single_stack() {
./scripts/delete-stack.sh "$1" "$stack_prefix" "$config_path"
}
delete_multi_stacks() {
for stack in $1
do
delete_single_stack "$stack" &
done
wait
}
echo "Deleting $stack_prefix AEM stacks..."
delete_multi_stacks "apps/dns-records"
delete_multi_stacks "apps/chaos-monkey apps/orchestrator apps/author-dispatcher apps/publish-dispatcher apps/publish apps/author"
delete_multi_stacks "apps/roles apps/messaging apps/security-groups"
echo "Finished deleting $stack_prefix AEM stacks"
|
9442100fe4f596a5bb3efbefa92cacef918f6e89
|
src/main/java/fr/insee/eno/exception/Utils.java
|
src/main/java/fr/insee/eno/exception/Utils.java
|
package fr.insee.eno.exception;
import net.sf.saxon.trans.XPathException;
public class Utils {
public static String getErrorLocation(String styleSheet, Exception e){
String errorMessage="";
try{
int line = ((XPathException) e).getLocator().getLineNumber();
int column = ((XPathException) e).getLocator().getColumnNumber();
errorMessage += String.format("Error in :%s [line :%d - column:%d]", styleSheet,line,column);
} catch (Exception exception){
}
return errorMessage;
}
}
|
package fr.insee.eno.exception;
import net.sf.saxon.trans.XPathException;
public class Utils {
public static String getErrorLocation(String styleSheet, Exception e){
String errorMessage="";
try{
String location = ((XPathException) e).getLocationAsString();
errorMessage += String.format("Error in :%s %s", styleSheet, location);
} catch (Exception exception){
}
return errorMessage;
}
}
|
Improve utils function for xslt exception
|
Improve utils function for xslt exception
|
Java
|
mit
|
InseeFr/Eno,InseeFr/Eno
|
java
|
## Code Before:
package fr.insee.eno.exception;
import net.sf.saxon.trans.XPathException;
public class Utils {
public static String getErrorLocation(String styleSheet, Exception e){
String errorMessage="";
try{
int line = ((XPathException) e).getLocator().getLineNumber();
int column = ((XPathException) e).getLocator().getColumnNumber();
errorMessage += String.format("Error in :%s [line :%d - column:%d]", styleSheet,line,column);
} catch (Exception exception){
}
return errorMessage;
}
}
## Instruction:
Improve utils function for xslt exception
## Code After:
package fr.insee.eno.exception;
import net.sf.saxon.trans.XPathException;
public class Utils {
public static String getErrorLocation(String styleSheet, Exception e){
String errorMessage="";
try{
String location = ((XPathException) e).getLocationAsString();
errorMessage += String.format("Error in :%s %s", styleSheet, location);
} catch (Exception exception){
}
return errorMessage;
}
}
|
5039130fc0818ea29ad22710797aee889d5560fc
|
src/components/gene/index.tsx
|
src/components/gene/index.tsx
|
import * as React from "react"
import * as Relay from "react-relay"
interface Props extends RelayProps, React.HTMLProps<GeneContents> {
gene: any
}
export class GeneContents extends React.Component<Props, null> {
render() {
return (
<div>
{this.props.gene.name}
{this.props.gene.mode}
</div>
)
}
}
export default Relay.createContainer(GeneContents, {
fragments: {
gene: () => Relay.QL`
fragment on Gene {
mode
name
}
`,
},
})
interface RelayProps {
gene: {
mode: string | null,
name: string | null,
} | any
}
|
import * as React from "react"
import * as Relay from "react-relay"
import Artworks from "../artwork_grid"
import ArtistRow from "./artist_row"
const PageSize = 10
interface Props extends RelayProps, React.HTMLProps<GeneContents> {
gene: any
}
export class GeneContents extends React.Component<Props, null> {
render() {
let artists = this.props.gene.artists.edges.map(edge => {
return (
<ArtistRow artist={edge.node as any} key={edge.__dataID__} />
)
})
return (
<div>
{artists}
</div>
)
}
}
export default Relay.createContainer(GeneContents, {
initialVariables: {
showArtists: true,
artworksSize: PageSize,
artistsSize: PageSize,
medium: "*",
aggregations: ["MEDIUM", "TOTAL", "PRICE_RANGE", "DIMENSION_RANGE"],
price_range: "*",
dimension_range: "*",
sort: "-partner_updated_at",
},
fragments: {
gene: () => Relay.QL`
fragment on Gene {
mode
name
artists: artists_connection(first: $artistsSize) @include(if: $showArtists) {
edges {
node {
${ArtistRow.getFragment("artist")}
}
}
}
artworks: artworks_connection(
first: $artworksSize,
aggregations: $aggregations,
medium: $medium,
price_range: $price_range,
dimension_range: $dimension_range,
sort: $sort,
) @skip(if: $showArtists) {
${Artworks.getFragment("artworks")}
}
}
`,
},
})
interface RelayProps {
gene: {
mode: string | null,
name: string | null,
} | any
}
|
Add ArtistRow to GeneContents view
|
Add ArtistRow to GeneContents view
|
TypeScript
|
mit
|
xtina-starr/reaction,artsy/reaction-force,artsy/reaction,xtina-starr/reaction,craigspaeth/reaction,xtina-starr/reaction,artsy/reaction,artsy/reaction-force,craigspaeth/reaction,craigspaeth/reaction,xtina-starr/reaction,artsy/reaction
|
typescript
|
## Code Before:
import * as React from "react"
import * as Relay from "react-relay"
interface Props extends RelayProps, React.HTMLProps<GeneContents> {
gene: any
}
export class GeneContents extends React.Component<Props, null> {
render() {
return (
<div>
{this.props.gene.name}
{this.props.gene.mode}
</div>
)
}
}
export default Relay.createContainer(GeneContents, {
fragments: {
gene: () => Relay.QL`
fragment on Gene {
mode
name
}
`,
},
})
interface RelayProps {
gene: {
mode: string | null,
name: string | null,
} | any
}
## Instruction:
Add ArtistRow to GeneContents view
## Code After:
import * as React from "react"
import * as Relay from "react-relay"
import Artworks from "../artwork_grid"
import ArtistRow from "./artist_row"
const PageSize = 10
interface Props extends RelayProps, React.HTMLProps<GeneContents> {
gene: any
}
export class GeneContents extends React.Component<Props, null> {
render() {
let artists = this.props.gene.artists.edges.map(edge => {
return (
<ArtistRow artist={edge.node as any} key={edge.__dataID__} />
)
})
return (
<div>
{artists}
</div>
)
}
}
export default Relay.createContainer(GeneContents, {
initialVariables: {
showArtists: true,
artworksSize: PageSize,
artistsSize: PageSize,
medium: "*",
aggregations: ["MEDIUM", "TOTAL", "PRICE_RANGE", "DIMENSION_RANGE"],
price_range: "*",
dimension_range: "*",
sort: "-partner_updated_at",
},
fragments: {
gene: () => Relay.QL`
fragment on Gene {
mode
name
artists: artists_connection(first: $artistsSize) @include(if: $showArtists) {
edges {
node {
${ArtistRow.getFragment("artist")}
}
}
}
artworks: artworks_connection(
first: $artworksSize,
aggregations: $aggregations,
medium: $medium,
price_range: $price_range,
dimension_range: $dimension_range,
sort: $sort,
) @skip(if: $showArtists) {
${Artworks.getFragment("artworks")}
}
}
`,
},
})
interface RelayProps {
gene: {
mode: string | null,
name: string | null,
} | any
}
|
58d7e13353f5203a56ab12b4f7e69cfc9730e8a0
|
examples/app/sw.js
|
examples/app/sw.js
|
importScripts('/streaming-dot.min.js');
const ASSETS = [
'/header.partial.html',
'/footer.partial.html',
'/index.dot.html'
];
self.oninstall = event => event.waitUntil(
caches.open('static')
.then(cache => cache.addAll(ASSETS))
.then(_ => self.skipWaiting())
);
self.onactivate = event => event.waitUntil(self.clients.claim());
function timeoutPromise(t) {
return new Promise(resolve =>
setTimeout(resolve, t)
);
}
self.onfetch = event => event.respondWith(
caches.match('/index.dot.html')
.then(response => response.text())
.then(body => {
const template = doT.compile(body);
const response = template({
header: caches.match('/header.partial.html').then(r => r.body),
footer: caches.match('/footer.partial.html').then(r => r.body),
location: timeoutPromise(2000).then(_ => 'in a service worker'),
version: doT.version
});
return new Response(response, {headers: {'Content-Type': 'text/html'}});
})
);
|
importScripts('/streaming-dot.min.js');
const ASSETS = [
'/header.partial.html',
'/footer.partial.html',
'/index.dot.html'
];
self.oninstall = event => event.waitUntil(
caches.open('static')
.then(cache => cache.addAll(ASSETS))
.then(_ => self.skipWaiting())
);
self.onactivate = event => event.waitUntil(self.clients.claim());
function timeoutPromise(t) {
return new Promise(resolve =>
setTimeout(resolve, t)
);
}
self.onfetch = event => {
event.parsedUrl = new URL(event.request.url);
if (event.parsedUrl.pathname !== '/') return event.respondWith(fetch(event.request));
event.respondWith(
caches.match('/index.dot.html')
.then(response => response.text())
.then(body => {
const template = doT.compile(body);
const response = template({
header: caches.match('/header.partial.html').then(r => r.body),
footer: caches.match('/footer.partial.html').then(r => r.body),
location: timeoutPromise(2000).then(_ => 'in a service worker'),
version: doT.version
});
return new Response(response, {headers: {'Content-Type': 'text/html'}});
})
);
};
|
Adjust ServiceWorker for polyfill demo
|
Adjust ServiceWorker for polyfill demo
|
JavaScript
|
apache-2.0
|
surma/streaming-dot,surma/streaming-dot
|
javascript
|
## Code Before:
importScripts('/streaming-dot.min.js');
const ASSETS = [
'/header.partial.html',
'/footer.partial.html',
'/index.dot.html'
];
self.oninstall = event => event.waitUntil(
caches.open('static')
.then(cache => cache.addAll(ASSETS))
.then(_ => self.skipWaiting())
);
self.onactivate = event => event.waitUntil(self.clients.claim());
function timeoutPromise(t) {
return new Promise(resolve =>
setTimeout(resolve, t)
);
}
self.onfetch = event => event.respondWith(
caches.match('/index.dot.html')
.then(response => response.text())
.then(body => {
const template = doT.compile(body);
const response = template({
header: caches.match('/header.partial.html').then(r => r.body),
footer: caches.match('/footer.partial.html').then(r => r.body),
location: timeoutPromise(2000).then(_ => 'in a service worker'),
version: doT.version
});
return new Response(response, {headers: {'Content-Type': 'text/html'}});
})
);
## Instruction:
Adjust ServiceWorker for polyfill demo
## Code After:
importScripts('/streaming-dot.min.js');
const ASSETS = [
'/header.partial.html',
'/footer.partial.html',
'/index.dot.html'
];
self.oninstall = event => event.waitUntil(
caches.open('static')
.then(cache => cache.addAll(ASSETS))
.then(_ => self.skipWaiting())
);
self.onactivate = event => event.waitUntil(self.clients.claim());
function timeoutPromise(t) {
return new Promise(resolve =>
setTimeout(resolve, t)
);
}
self.onfetch = event => {
event.parsedUrl = new URL(event.request.url);
if (event.parsedUrl.pathname !== '/') return event.respondWith(fetch(event.request));
event.respondWith(
caches.match('/index.dot.html')
.then(response => response.text())
.then(body => {
const template = doT.compile(body);
const response = template({
header: caches.match('/header.partial.html').then(r => r.body),
footer: caches.match('/footer.partial.html').then(r => r.body),
location: timeoutPromise(2000).then(_ => 'in a service worker'),
version: doT.version
});
return new Response(response, {headers: {'Content-Type': 'text/html'}});
})
);
};
|
0d327e6c6d48a4cdf4d42da8d1cfdab0272a5e66
|
sh/clean.sh
|
sh/clean.sh
|
rm -rf src/public/style/min
rm -rf src/public/scripts/min
|
rm -rf src/public/style/min
echo "Removed minified style files."
rm -rf src/public/scripts/min
echo "Removed minified script files."
|
Add notification when minified files are removed.
|
Add notification when minified files are removed.
|
Shell
|
mit
|
adamheins/adamheins.com,adamheins/personal-website,adamheins/personal-website,adamheins/personal-website,adamheins/adamheins.com
|
shell
|
## Code Before:
rm -rf src/public/style/min
rm -rf src/public/scripts/min
## Instruction:
Add notification when minified files are removed.
## Code After:
rm -rf src/public/style/min
echo "Removed minified style files."
rm -rf src/public/scripts/min
echo "Removed minified script files."
|
99116afa64d98e872844db774f67842429321715
|
README.md
|
README.md
|
=======
Traveler stores VLACS' users in our datomic database and provides a user interface to search users and change passwords.
## Artifact
* Releases are published to Clojars
Latest Artifact: ``` [org.vlacs/traveler "0.2.0"] ```
## Copyright and License
Created by [Mike George](http://mikegeorge.org)
Copyright © 2014 Virtual Learning Academy Charter School
Distributed under the Eclipse Public License either version 1.0 or (at
your option) any later version.
<p align="center"><a href="http://vlacs.org/" target="_blank"><img src="http://vlacs.org/images/VLACS_logo_no_dep_website.png" alt="VLACS Logo"/></a></p>
|
=======
''' This branch is not stable '''
Traveler stores VLACS' users in our datomic database and provides a user interface to search users and change passwords.
## Artifact
* Releases are published to Clojars
Latest Artifact: ``` [org.vlacs/traveler "0.2.0"] ```
## Copyright and License
Created by [Mike George](http://mikegeorge.org)
Copyright © 2014 Virtual Learning Academy Charter School
Distributed under the Eclipse Public License either version 1.0 or (at
your option) any later version.
<p align="center"><a href="http://vlacs.org/" target="_blank"><img src="http://vlacs.org/images/VLACS_logo_no_dep_website.png" alt="VLACS Logo"/></a></p>
|
Update readme badges and disclaimer
|
Update readme badges and disclaimer
|
Markdown
|
epl-1.0
|
vlacs/traveler
|
markdown
|
## Code Before:
=======
Traveler stores VLACS' users in our datomic database and provides a user interface to search users and change passwords.
## Artifact
* Releases are published to Clojars
Latest Artifact: ``` [org.vlacs/traveler "0.2.0"] ```
## Copyright and License
Created by [Mike George](http://mikegeorge.org)
Copyright © 2014 Virtual Learning Academy Charter School
Distributed under the Eclipse Public License either version 1.0 or (at
your option) any later version.
<p align="center"><a href="http://vlacs.org/" target="_blank"><img src="http://vlacs.org/images/VLACS_logo_no_dep_website.png" alt="VLACS Logo"/></a></p>
## Instruction:
Update readme badges and disclaimer
## Code After:
=======
''' This branch is not stable '''
Traveler stores VLACS' users in our datomic database and provides a user interface to search users and change passwords.
## Artifact
* Releases are published to Clojars
Latest Artifact: ``` [org.vlacs/traveler "0.2.0"] ```
## Copyright and License
Created by [Mike George](http://mikegeorge.org)
Copyright © 2014 Virtual Learning Academy Charter School
Distributed under the Eclipse Public License either version 1.0 or (at
your option) any later version.
<p align="center"><a href="http://vlacs.org/" target="_blank"><img src="http://vlacs.org/images/VLACS_logo_no_dep_website.png" alt="VLACS Logo"/></a></p>
|
935043dda123a030130571a2a4bb45b2b13f145c
|
addons/website_quote/__manifest__.py
|
addons/website_quote/__manifest__.py
|
{
'name': 'Online Proposals',
'category': 'Website',
'summary': 'Sales',
'website': 'https://www.odoo.com/page/quote-builder',
'version': '1.0',
'description': "",
'depends': ['website', 'sale_management', 'mail', 'payment', 'website_mail', 'sale_payment'],
'data': [
'data/website_quote_data.xml',
'report/sale_order_reports.xml',
'report/sale_order_templates.xml',
'report/website_quote_templates.xml',
'views/sale_order_views.xml',
'views/sale_quote_views.xml',
'views/website_quote_templates.xml',
'views/res_config_settings_views.xml',
'security/ir.model.access.csv',
],
'demo': [
'data/website_quote_demo.xml'
],
'qweb': ['static/src/xml/*.xml'],
'installable': True,
}
|
{
'name': 'Online Proposals',
'category': 'Website',
'summary': 'Sales',
'website': 'https://www.odoo.com/page/quote-builder',
'version': '1.0',
'description': "",
'depends': ['website', 'sale_management', 'mail', 'payment', 'website_mail'],
'data': [
'data/website_quote_data.xml',
'report/sale_order_reports.xml',
'report/sale_order_templates.xml',
'report/website_quote_templates.xml',
'views/sale_order_views.xml',
'views/sale_quote_views.xml',
'views/website_quote_templates.xml',
'views/res_config_settings_views.xml',
'security/ir.model.access.csv',
],
'demo': [
'data/website_quote_demo.xml'
],
'qweb': ['static/src/xml/*.xml'],
'installable': True,
}
|
Revert "[FIX] website_quote: make 'Pay & Confirm' work without website_sale"
|
Revert "[FIX] website_quote: make 'Pay & Confirm' work without website_sale"
No dependency change in stable version
This reverts commit 65a589eb54a1421baa71074701bea2873a83c75f.
|
Python
|
agpl-3.0
|
ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo
|
python
|
## Code Before:
{
'name': 'Online Proposals',
'category': 'Website',
'summary': 'Sales',
'website': 'https://www.odoo.com/page/quote-builder',
'version': '1.0',
'description': "",
'depends': ['website', 'sale_management', 'mail', 'payment', 'website_mail', 'sale_payment'],
'data': [
'data/website_quote_data.xml',
'report/sale_order_reports.xml',
'report/sale_order_templates.xml',
'report/website_quote_templates.xml',
'views/sale_order_views.xml',
'views/sale_quote_views.xml',
'views/website_quote_templates.xml',
'views/res_config_settings_views.xml',
'security/ir.model.access.csv',
],
'demo': [
'data/website_quote_demo.xml'
],
'qweb': ['static/src/xml/*.xml'],
'installable': True,
}
## Instruction:
Revert "[FIX] website_quote: make 'Pay & Confirm' work without website_sale"
No dependency change in stable version
This reverts commit 65a589eb54a1421baa71074701bea2873a83c75f.
## Code After:
{
'name': 'Online Proposals',
'category': 'Website',
'summary': 'Sales',
'website': 'https://www.odoo.com/page/quote-builder',
'version': '1.0',
'description': "",
'depends': ['website', 'sale_management', 'mail', 'payment', 'website_mail'],
'data': [
'data/website_quote_data.xml',
'report/sale_order_reports.xml',
'report/sale_order_templates.xml',
'report/website_quote_templates.xml',
'views/sale_order_views.xml',
'views/sale_quote_views.xml',
'views/website_quote_templates.xml',
'views/res_config_settings_views.xml',
'security/ir.model.access.csv',
],
'demo': [
'data/website_quote_demo.xml'
],
'qweb': ['static/src/xml/*.xml'],
'installable': True,
}
|
91b7f48ed70322997c041f720a712f54fbc55214
|
data-pages.json
|
data-pages.json
|
---
layout: null
---
{% assign posts_processed = site.posts | where:"layout","page" %}
{"nodes":[
{% for post in posts_processed %}
{
"title": "{{ post.title }}",
"page-url": "{{ post.page-url }}",
"users": "{% for user in post.users %}{{ user }}{% unless forloop.last %}, {% endunless %}{% endfor %}",
"goals": "{% for goal in post.goals %}{{ goal }}{% unless forloop.last %}, {% endunless %}{% endfor %}",
"role": "{% for role in post.role %}{{ role }}{% unless forloop.last %}, {% endunless %}{% endfor %}",
"priority": "{{ post.priority }}",
"database-url": "{{ post.url }}"
}{% unless forloop.last %},{% endunless %}
{% endfor %}
]}
|
---
layout: null
---
{% assign posts_processed = site.posts | where:"layout","page" %}
{"nodes":[
{% for post in posts_processed %}
{
"title": "{{ post.title }}",
"page-url": "{{ post.page-url }}",
"users": "{% for user in post.users %}{{ user }}{% unless forloop.last %}, {% endunless %}{% endfor %}",
"goals": "{% for goal in post.goals %}{{ goal }}{% unless forloop.last %}, {% endunless %}{% endfor %}",
"role": "{% for role in post.role %}{{ role }}{% unless forloop.last %}, {% endunless %}{% endfor %}",
"priority": "{{ post.priority }}",
"position-horizontal": "{{ post.position-horizontal }}",
"position-vertical": "{{ post.position-vertical }}",
"database-url": "{{ post.url }}"
}{% unless forloop.last %},{% endunless %}
{% endfor %}
]}
|
Integrate fixed positioning for pages
|
Integrate fixed positioning for pages
|
JSON
|
mit
|
khawkins98/EBI-Adaptive-content-model,khawkins98/EBI-Adaptive-content-model
|
json
|
## Code Before:
---
layout: null
---
{% assign posts_processed = site.posts | where:"layout","page" %}
{"nodes":[
{% for post in posts_processed %}
{
"title": "{{ post.title }}",
"page-url": "{{ post.page-url }}",
"users": "{% for user in post.users %}{{ user }}{% unless forloop.last %}, {% endunless %}{% endfor %}",
"goals": "{% for goal in post.goals %}{{ goal }}{% unless forloop.last %}, {% endunless %}{% endfor %}",
"role": "{% for role in post.role %}{{ role }}{% unless forloop.last %}, {% endunless %}{% endfor %}",
"priority": "{{ post.priority }}",
"database-url": "{{ post.url }}"
}{% unless forloop.last %},{% endunless %}
{% endfor %}
]}
## Instruction:
Integrate fixed positioning for pages
## Code After:
---
layout: null
---
{% assign posts_processed = site.posts | where:"layout","page" %}
{"nodes":[
{% for post in posts_processed %}
{
"title": "{{ post.title }}",
"page-url": "{{ post.page-url }}",
"users": "{% for user in post.users %}{{ user }}{% unless forloop.last %}, {% endunless %}{% endfor %}",
"goals": "{% for goal in post.goals %}{{ goal }}{% unless forloop.last %}, {% endunless %}{% endfor %}",
"role": "{% for role in post.role %}{{ role }}{% unless forloop.last %}, {% endunless %}{% endfor %}",
"priority": "{{ post.priority }}",
"position-horizontal": "{{ post.position-horizontal }}",
"position-vertical": "{{ post.position-vertical }}",
"database-url": "{{ post.url }}"
}{% unless forloop.last %},{% endunless %}
{% endfor %}
]}
|
8ade2d53f6d7a212eba0ded8894d801d800fb956
|
util/docker/web/sftpgo/sftpgo.json
|
util/docker/web/sftpgo/sftpgo.json
|
{
"common": {
"idle_timeout": 15,
"upload_mode": 2,
"setstat_mode": 1,
"actions": {
"execute_on": [
"upload",
"pre-delete",
"rename"
],
"hook": "/usr/local/bin/azuracast_sftp_event"
}
},
"sftpd": {
"bindings": [
{
"port": 2022,
"address": "",
"apply_proxy_config": true
}
],
"host_keys": [
"persist/id_rsa",
"persist/id_ecdsa",
"persist/id_ed25519"
],
"enable_scp": true
},
"httpd": {
"bindings": [
{
"port": 0
}
],
"templates_path": "/usr/share/sftpgo/templates",
"static_files_path": "/usr/share/sftpgo/static"
},
"telemetry": {
"bind_port": 0
},
"data_provider": {
"driver": "bolt",
"name": "sftpgo.db",
"users_base_dir": "/var/azuracast/stations",
"external_auth_hook": "/usr/local/bin/azuracast_sftp_auth",
"external_auth_scope": 0
}
}
|
{
"common": {
"idle_timeout": 15,
"upload_mode": 2,
"setstat_mode": 1,
"actions": {
"execute_on": [
"upload",
"pre-delete",
"rename"
],
"hook": "/usr/local/bin/azuracast_sftp_event"
},
"defender": {
"enabled": true,
"driver": "memory"
}
},
"sftpd": {
"bindings": [
{
"port": 2022,
"address": "",
"apply_proxy_config": true
}
],
"host_keys": [
"persist/id_rsa",
"persist/id_ecdsa",
"persist/id_ed25519"
],
"enable_scp": true
},
"httpd": {
"bindings": [
{
"port": 0
}
],
"templates_path": "/usr/share/sftpgo/templates",
"static_files_path": "/usr/share/sftpgo/static"
},
"telemetry": {
"bind_port": 0
},
"data_provider": {
"driver": "bolt",
"name": "sftpgo.db",
"users_base_dir": "/var/azuracast/stations",
"external_auth_hook": "/usr/local/bin/azuracast_sftp_auth",
"external_auth_scope": 0
}
}
|
Enable SFTPGo's built-in "defender" service.
|
Enable SFTPGo's built-in "defender" service.
|
JSON
|
agpl-3.0
|
AzuraCast/AzuraCast,AzuraCast/AzuraCast,AzuraCast/AzuraCast,AzuraCast/AzuraCast
|
json
|
## Code Before:
{
"common": {
"idle_timeout": 15,
"upload_mode": 2,
"setstat_mode": 1,
"actions": {
"execute_on": [
"upload",
"pre-delete",
"rename"
],
"hook": "/usr/local/bin/azuracast_sftp_event"
}
},
"sftpd": {
"bindings": [
{
"port": 2022,
"address": "",
"apply_proxy_config": true
}
],
"host_keys": [
"persist/id_rsa",
"persist/id_ecdsa",
"persist/id_ed25519"
],
"enable_scp": true
},
"httpd": {
"bindings": [
{
"port": 0
}
],
"templates_path": "/usr/share/sftpgo/templates",
"static_files_path": "/usr/share/sftpgo/static"
},
"telemetry": {
"bind_port": 0
},
"data_provider": {
"driver": "bolt",
"name": "sftpgo.db",
"users_base_dir": "/var/azuracast/stations",
"external_auth_hook": "/usr/local/bin/azuracast_sftp_auth",
"external_auth_scope": 0
}
}
## Instruction:
Enable SFTPGo's built-in "defender" service.
## Code After:
{
"common": {
"idle_timeout": 15,
"upload_mode": 2,
"setstat_mode": 1,
"actions": {
"execute_on": [
"upload",
"pre-delete",
"rename"
],
"hook": "/usr/local/bin/azuracast_sftp_event"
},
"defender": {
"enabled": true,
"driver": "memory"
}
},
"sftpd": {
"bindings": [
{
"port": 2022,
"address": "",
"apply_proxy_config": true
}
],
"host_keys": [
"persist/id_rsa",
"persist/id_ecdsa",
"persist/id_ed25519"
],
"enable_scp": true
},
"httpd": {
"bindings": [
{
"port": 0
}
],
"templates_path": "/usr/share/sftpgo/templates",
"static_files_path": "/usr/share/sftpgo/static"
},
"telemetry": {
"bind_port": 0
},
"data_provider": {
"driver": "bolt",
"name": "sftpgo.db",
"users_base_dir": "/var/azuracast/stations",
"external_auth_hook": "/usr/local/bin/azuracast_sftp_auth",
"external_auth_scope": 0
}
}
|
d10344dce7d012de2d434cd205fb0f179e34113c
|
packages/syft/src/syft/core/tensor/types.py
|
packages/syft/src/syft/core/tensor/types.py
|
from .passthrough import AcceptableSimpleType # type: ignore
from .passthrough import PassthroughTensor # type: ignore
from .passthrough import SupportedChainType # type: ignore
|
from .passthrough import AcceptableSimpleType # type: ignore # NOQA
from .passthrough import PassthroughTensor # type: ignore # NOQA
from .passthrough import SupportedChainType # type: ignore # NOQA
|
Fix flake8 warning by adding flake annotation
|
Fix flake8 warning by adding flake annotation
|
Python
|
apache-2.0
|
OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft
|
python
|
## Code Before:
from .passthrough import AcceptableSimpleType # type: ignore
from .passthrough import PassthroughTensor # type: ignore
from .passthrough import SupportedChainType # type: ignore
## Instruction:
Fix flake8 warning by adding flake annotation
## Code After:
from .passthrough import AcceptableSimpleType # type: ignore # NOQA
from .passthrough import PassthroughTensor # type: ignore # NOQA
from .passthrough import SupportedChainType # type: ignore # NOQA
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.