commit
stringlengths 40
40
| old_file
stringlengths 4
237
| new_file
stringlengths 4
237
| old_contents
stringlengths 1
4.24k
| new_contents
stringlengths 5
4.84k
| subject
stringlengths 15
778
| message
stringlengths 16
6.86k
| lang
stringlengths 1
30
| license
stringclasses 13
values | repos
stringlengths 5
116k
| config
stringlengths 1
30
| content
stringlengths 105
8.72k
|
---|---|---|---|---|---|---|---|---|---|---|---|
57080c84390d9a994586c55e473e45dac480002f
|
lib/mongomodel/railtie.rb
|
lib/mongomodel/railtie.rb
|
module MongoModel
class Railtie < Rails::Railtie
initializer "mongomodel.logger" do
MongoModel.logger ||= ::Rails.logger
end
initializer "mongomodel.rescue_responses" do
ActionDispatch::ShowExceptions.rescue_responses['MongoModel::DocumentNotFound'] = :not_found
end
initializer "mongomodel.database_configuration" do |app|
require 'erb'
config = Pathname.new(app.paths.config.to_a.first).join("mongomodel.yml")
mongomodel_configuration = YAML::load(ERB.new(IO.read(config)).result)
MongoModel.configuration = mongomodel_configuration[Rails.env]
end
end
end
|
module MongoModel
class Railtie < Rails::Railtie
initializer "mongomodel.logger" do
MongoModel.logger ||= ::Rails.logger
end
initializer "mongomodel.rescue_responses" do
ActionDispatch::ShowExceptions.rescue_responses['MongoModel::DocumentNotFound'] = :not_found
end
initializer "mongomodel.database_configuration" do |app|
require 'erb'
config = Pathname.new(app.paths.config.to_a.first).join("mongomodel.yml")
if File.exists?(config)
mongomodel_configuration = YAML::load(ERB.new(IO.read(config)).result)
MongoModel.configuration = mongomodel_configuration[Rails.env]
end
end
end
end
|
Check for existence of mongomodel.yml in Railtie
|
Check for existence of mongomodel.yml in Railtie
|
Ruby
|
mit
|
spohlenz/mongomodel
|
ruby
|
## Code Before:
module MongoModel
class Railtie < Rails::Railtie
initializer "mongomodel.logger" do
MongoModel.logger ||= ::Rails.logger
end
initializer "mongomodel.rescue_responses" do
ActionDispatch::ShowExceptions.rescue_responses['MongoModel::DocumentNotFound'] = :not_found
end
initializer "mongomodel.database_configuration" do |app|
require 'erb'
config = Pathname.new(app.paths.config.to_a.first).join("mongomodel.yml")
mongomodel_configuration = YAML::load(ERB.new(IO.read(config)).result)
MongoModel.configuration = mongomodel_configuration[Rails.env]
end
end
end
## Instruction:
Check for existence of mongomodel.yml in Railtie
## Code After:
module MongoModel
class Railtie < Rails::Railtie
initializer "mongomodel.logger" do
MongoModel.logger ||= ::Rails.logger
end
initializer "mongomodel.rescue_responses" do
ActionDispatch::ShowExceptions.rescue_responses['MongoModel::DocumentNotFound'] = :not_found
end
initializer "mongomodel.database_configuration" do |app|
require 'erb'
config = Pathname.new(app.paths.config.to_a.first).join("mongomodel.yml")
if File.exists?(config)
mongomodel_configuration = YAML::load(ERB.new(IO.read(config)).result)
MongoModel.configuration = mongomodel_configuration[Rails.env]
end
end
end
end
|
69ee1e8219b57f66abd1f97dcc9bacc6a36fd79f
|
C++/pure_desctructors.cpp
|
C++/pure_desctructors.cpp
|
// http://www.geeksforgeeks.org/pure-virtual-destructor-c/
// 1) C++ Accepts pure virtual destructors, a Base class needs to provide definition for pure
// virtual destructor as well because it is being called in reverse order and compiler
// looks for its definition
// 2) class contains abstract class if it contains any pure virtual functions
// 3) If a base class has pure virtual functions
// in the derived class to implement it
#include <iostream>
using namespace std;
class Base {
public:
virtual ~Base() = 0; // pure virtual destructor
virtual void foo() = 0;
};
// Definition of destructor
// if this is not defined compiler throws
// linker error
Base :: ~Base() {
cout<<"Base destructor called\n";
}
class Derived : public Base {
public:
~Derived() {
cout<<"Derived destructor called\n";
}
void foo() {
cout<<"I am foo\n";
}
};
int main() {
// the below is not possible because Base is abstract
// Derived *d = new Base();
// Base is inaccessible here when it is not public
Base *b = new Derived();
// first calls derived then calls base
delete b;
return 0;
}
|
// http://www.geeksforgeeks.org/pure-virtual-destructor-c/
// 1) C++ Accepts pure virtual destructors, a Base class needs to provide definition for pure
// virtual destructor as well because it is being called in reverse order and compiler
// looks for its definition
// 2) class contains abstract class if it contains any pure virtual functions
// 3) If a base class has pure virtual functions
// in the derived class to implement it
/**
* Virtual destructor are useful when
* a derived class object is destroyed
* only its destructor is called, if the base
* class destructor is made as virtual, then both
* derived and base class destructor is called.
*/
#include <iostream>
using namespace std;
class Base {
public:
virtual ~Base() = 0; // pure virtual destructor
virtual void foo() = 0;
};
// Definition of destructor
// if this is not defined compiler throws
// linker error
Base :: ~Base() {
cout<<"Base destructor called\n";
}
class Derived : public Base {
public:
~Derived() {
cout<<"Derived destructor called\n";
}
void foo() {
cout<<"I am foo\n";
}
};
int main() {
// the below is not possible because Base is abstract
// Derived *d = new Base();
// Base is inaccessible here when it is not public
Base *b = new Derived();
// first calls derived then calls base
delete b;
return 0;
}
|
Add explaination to why virtual destructors are needed
|
Add explaination to why virtual destructors are needed
Signed-off-by: WajahatSiddiqui <[email protected]>
|
C++
|
apache-2.0
|
WajahatSiddiqui/Workspace,WajahatSiddiqui/Workspace,WajahatSiddiqui/Workspace
|
c++
|
## Code Before:
// http://www.geeksforgeeks.org/pure-virtual-destructor-c/
// 1) C++ Accepts pure virtual destructors, a Base class needs to provide definition for pure
// virtual destructor as well because it is being called in reverse order and compiler
// looks for its definition
// 2) class contains abstract class if it contains any pure virtual functions
// 3) If a base class has pure virtual functions
// in the derived class to implement it
#include <iostream>
using namespace std;
class Base {
public:
virtual ~Base() = 0; // pure virtual destructor
virtual void foo() = 0;
};
// Definition of destructor
// if this is not defined compiler throws
// linker error
Base :: ~Base() {
cout<<"Base destructor called\n";
}
class Derived : public Base {
public:
~Derived() {
cout<<"Derived destructor called\n";
}
void foo() {
cout<<"I am foo\n";
}
};
int main() {
// the below is not possible because Base is abstract
// Derived *d = new Base();
// Base is inaccessible here when it is not public
Base *b = new Derived();
// first calls derived then calls base
delete b;
return 0;
}
## Instruction:
Add explaination to why virtual destructors are needed
Signed-off-by: WajahatSiddiqui <[email protected]>
## Code After:
// http://www.geeksforgeeks.org/pure-virtual-destructor-c/
// 1) C++ Accepts pure virtual destructors, a Base class needs to provide definition for pure
// virtual destructor as well because it is being called in reverse order and compiler
// looks for its definition
// 2) class contains abstract class if it contains any pure virtual functions
// 3) If a base class has pure virtual functions
// in the derived class to implement it
/**
* Virtual destructor are useful when
* a derived class object is destroyed
* only its destructor is called, if the base
* class destructor is made as virtual, then both
* derived and base class destructor is called.
*/
#include <iostream>
using namespace std;
class Base {
public:
virtual ~Base() = 0; // pure virtual destructor
virtual void foo() = 0;
};
// Definition of destructor
// if this is not defined compiler throws
// linker error
Base :: ~Base() {
cout<<"Base destructor called\n";
}
class Derived : public Base {
public:
~Derived() {
cout<<"Derived destructor called\n";
}
void foo() {
cout<<"I am foo\n";
}
};
int main() {
// the below is not possible because Base is abstract
// Derived *d = new Base();
// Base is inaccessible here when it is not public
Base *b = new Derived();
// first calls derived then calls base
delete b;
return 0;
}
|
036c7b37229f9dc8fca639d92225e2ed43aadf38
|
product/varna/catalog/native/stack.yaml
|
product/varna/catalog/native/stack.yaml
|
resolver: nightly-2016-10-05
compiler: ghcjs-0.2.1.820161005_ghc-8.0.1
compiler-check: match-exact
setup-info:
ghcjs:
source:
ghcjs-0.2.1.820161005_ghc-8.0.1:
url: "http://ghcjs.tolysz.org/ghc-8.0-2016-10-05-nightly-2016-10-05-820161005.tar.gz"
sha1: d5eaa1fd34080c3102bbad5343d01fd066d387b6
allow-newer: true
packages:
- .
- ../../shared
- ../../../../pkg/hs/nauva
- ../../../../pkg/hs/nauva-css
- ../../../../pkg/hs/nauva-catalog
- ../../../../pkg/hs/nauva-native
extra-deps:
- ghcjs-dom-0.5.0.2
- ghcjs-dom-jsffi-0.5.0.2
|
resolver: lts-7.19
compiler: ghcjs-0.2.1.9007019_ghc-8.0.1
compiler-check: match-exact
setup-info:
ghcjs:
source:
ghcjs-0.2.1.9007019_ghc-8.0.1:
url: http://ghcjs.tolysz.org/ghc-8.0-2017-02-05-lts-7.19-9007019.tar.gz
sha1: d2cfc25f9cda32a25a87d9af68891b2186ee52f9
allow-newer: true
packages:
- .
- ../../shared
- ../../../../pkg/hs/nauva
- ../../../../pkg/hs/nauva-css
- ../../../../pkg/hs/nauva-catalog
- ../../../../pkg/hs/nauva-native
extra-deps:
- blaze-html-0.8.1.2
- blaze-markup-0.7.1.0
- blaze-builder-0.4.0.2
- random-1.1
- data-default-0.7.1.1
- data-default-class-0.1.2.0
- data-default-instances-containers-0.0.1
- data-default-instances-dlist-0.0.1
- data-default-instances-old-locale-0.0.1
- safe-0.3.9
|
Update to newest ghcjs compiler
|
Update to newest ghcjs compiler
|
YAML
|
mit
|
wereHamster/nauva,wereHamster/nauva,wereHamster/nauva
|
yaml
|
## Code Before:
resolver: nightly-2016-10-05
compiler: ghcjs-0.2.1.820161005_ghc-8.0.1
compiler-check: match-exact
setup-info:
ghcjs:
source:
ghcjs-0.2.1.820161005_ghc-8.0.1:
url: "http://ghcjs.tolysz.org/ghc-8.0-2016-10-05-nightly-2016-10-05-820161005.tar.gz"
sha1: d5eaa1fd34080c3102bbad5343d01fd066d387b6
allow-newer: true
packages:
- .
- ../../shared
- ../../../../pkg/hs/nauva
- ../../../../pkg/hs/nauva-css
- ../../../../pkg/hs/nauva-catalog
- ../../../../pkg/hs/nauva-native
extra-deps:
- ghcjs-dom-0.5.0.2
- ghcjs-dom-jsffi-0.5.0.2
## Instruction:
Update to newest ghcjs compiler
## Code After:
resolver: lts-7.19
compiler: ghcjs-0.2.1.9007019_ghc-8.0.1
compiler-check: match-exact
setup-info:
ghcjs:
source:
ghcjs-0.2.1.9007019_ghc-8.0.1:
url: http://ghcjs.tolysz.org/ghc-8.0-2017-02-05-lts-7.19-9007019.tar.gz
sha1: d2cfc25f9cda32a25a87d9af68891b2186ee52f9
allow-newer: true
packages:
- .
- ../../shared
- ../../../../pkg/hs/nauva
- ../../../../pkg/hs/nauva-css
- ../../../../pkg/hs/nauva-catalog
- ../../../../pkg/hs/nauva-native
extra-deps:
- blaze-html-0.8.1.2
- blaze-markup-0.7.1.0
- blaze-builder-0.4.0.2
- random-1.1
- data-default-0.7.1.1
- data-default-class-0.1.2.0
- data-default-instances-containers-0.0.1
- data-default-instances-dlist-0.0.1
- data-default-instances-old-locale-0.0.1
- safe-0.3.9
|
3c064a8962cd35dda2963bf6fc88a0d387b7c49a
|
.travis-ci.sh
|
.travis-ci.sh
|
if [ "$TRAVIS_OS_NAME" = "linux" ]; then
z3_version="z3-4.7.1-x64-ubuntu-14.04"
install_dir="/usr/bin/z3"
elif [ "$TRAVIS_OS_NAME" = "osx" ]; then
z3_version="z3-4.7.1-x64-osx-10.11.6"
install_dir="/usr/local/bin/z3"
fi
wget "https://github.com/Z3Prover/z3/releases/download/z3-4.7.1/${z3_version}.zip"
unzip "${z3_version}.zip"
sudo cp "${z3_version}/bin/z3" $install_dir
# Retrieve opam.
wget -qq https://github.com/ocaml/opam/releases/download/2.0.6/opam-2.0.6-x86_64-linux
sudo mv opam-2.0.6-x86_64-linux /usr/local/bin/opam
sudo chmod a+x /usr/local/bin/opam
opam init --disable-sandboxing --yes --comp 4.04.0 && eval $(opam env)
# Install ocaml packages needed for Kind 2.
opam install --yes ocamlbuild ocamlfind menhir yojson
# Build the PR's Kind 2.
./autogen.sh
./build.sh --prefix=$(pwd) # prefix installs the binary into the working directory for Travis
# Checking regression test.
make test
|
if [ "$TRAVIS_OS_NAME" = "linux" ]; then
z3_version="z3-4.7.1-x64-ubuntu-14.04"
install_dir="/usr/bin/z3"
elif [ "$TRAVIS_OS_NAME" = "osx" ]; then
z3_version="z3-4.7.1-x64-osx-10.11.6"
install_dir="/usr/local/bin/z3"
fi
wget "https://github.com/Z3Prover/z3/releases/download/z3-4.7.1/${z3_version}.zip"
unzip "${z3_version}.zip"
sudo cp "${z3_version}/bin/z3" $install_dir
# Retrieve opam.
wget -qq https://raw.github.com/ocaml/opam/master/shell/opam_installer.sh -O - | sh -s /usr/local/bin 4.04.0
export OPAMYES=1
eval $(opam config env)
# Install ocaml packages needed for Kind 2.
opam install ocamlbuild ocamlfind menhir yojson
# Build the PR's Kind 2.
./autogen.sh
./build.sh --prefix=$(pwd) # prefix installs the binary into the working directory for Travis
# Checking regression test.
make test
|
Revert last changes in Travis script
|
Revert last changes in Travis script
|
Shell
|
apache-2.0
|
daniel-larraz/kind2,kind2-mc/kind2
|
shell
|
## Code Before:
if [ "$TRAVIS_OS_NAME" = "linux" ]; then
z3_version="z3-4.7.1-x64-ubuntu-14.04"
install_dir="/usr/bin/z3"
elif [ "$TRAVIS_OS_NAME" = "osx" ]; then
z3_version="z3-4.7.1-x64-osx-10.11.6"
install_dir="/usr/local/bin/z3"
fi
wget "https://github.com/Z3Prover/z3/releases/download/z3-4.7.1/${z3_version}.zip"
unzip "${z3_version}.zip"
sudo cp "${z3_version}/bin/z3" $install_dir
# Retrieve opam.
wget -qq https://github.com/ocaml/opam/releases/download/2.0.6/opam-2.0.6-x86_64-linux
sudo mv opam-2.0.6-x86_64-linux /usr/local/bin/opam
sudo chmod a+x /usr/local/bin/opam
opam init --disable-sandboxing --yes --comp 4.04.0 && eval $(opam env)
# Install ocaml packages needed for Kind 2.
opam install --yes ocamlbuild ocamlfind menhir yojson
# Build the PR's Kind 2.
./autogen.sh
./build.sh --prefix=$(pwd) # prefix installs the binary into the working directory for Travis
# Checking regression test.
make test
## Instruction:
Revert last changes in Travis script
## Code After:
if [ "$TRAVIS_OS_NAME" = "linux" ]; then
z3_version="z3-4.7.1-x64-ubuntu-14.04"
install_dir="/usr/bin/z3"
elif [ "$TRAVIS_OS_NAME" = "osx" ]; then
z3_version="z3-4.7.1-x64-osx-10.11.6"
install_dir="/usr/local/bin/z3"
fi
wget "https://github.com/Z3Prover/z3/releases/download/z3-4.7.1/${z3_version}.zip"
unzip "${z3_version}.zip"
sudo cp "${z3_version}/bin/z3" $install_dir
# Retrieve opam.
wget -qq https://raw.github.com/ocaml/opam/master/shell/opam_installer.sh -O - | sh -s /usr/local/bin 4.04.0
export OPAMYES=1
eval $(opam config env)
# Install ocaml packages needed for Kind 2.
opam install ocamlbuild ocamlfind menhir yojson
# Build the PR's Kind 2.
./autogen.sh
./build.sh --prefix=$(pwd) # prefix installs the binary into the working directory for Travis
# Checking regression test.
make test
|
91bd1bb7e0c2e3d4cdc32a5c13e870c08b026338
|
.circleci/config.yml
|
.circleci/config.yml
|
version: 2
jobs:
build:
machine: true
steps:
- checkout
- run:
name: Build Environment
command: docker build -t build_image .
- run:
name: Start Build Environment
command: docker run --volume "$(pwd):$(pwd)" --workdir "$(pwd)" --tty --detach build_image bash > container_id
- run:
name: Build
command: docker exec $(cat container_id) make
- run:
name: Test
command: docker exec $(cat container_id) make check
- run:
command: docker stop $(cat container_id)
when: always
- store_artifacts:
path: output.pdf
- store_artifacts:
path: pdfgen.h
- store_artifacts:
path: pdfgen.o
|
version: 2
jobs:
build:
machine: true
steps:
- checkout
- run:
name: Construct Build Environment
command: docker build -t build_image .
- run:
name: Start Build Environment
command: docker run --volume "$(pwd):$(pwd)" --workdir "$(pwd)" --tty --detach build_image bash > container_id
- run:
name: Build
command: docker exec $(cat container_id) make
- run:
name: Test
command: docker exec $(cat container_id) make check
- run:
name: Stop Build Environment
command: docker stop $(cat container_id)
when: always
- store_artifacts:
path: output.pdf
- store_artifacts:
path: pdfgen.h
- store_artifacts:
path: pdfgen.o
|
Fix up build step names
|
circleci: Fix up build step names
|
YAML
|
unlicense
|
AndreRenaud/PDFGen,AndreRenaud/PDFGen,AndreRenaud/PDFGen
|
yaml
|
## Code Before:
version: 2
jobs:
build:
machine: true
steps:
- checkout
- run:
name: Build Environment
command: docker build -t build_image .
- run:
name: Start Build Environment
command: docker run --volume "$(pwd):$(pwd)" --workdir "$(pwd)" --tty --detach build_image bash > container_id
- run:
name: Build
command: docker exec $(cat container_id) make
- run:
name: Test
command: docker exec $(cat container_id) make check
- run:
command: docker stop $(cat container_id)
when: always
- store_artifacts:
path: output.pdf
- store_artifacts:
path: pdfgen.h
- store_artifacts:
path: pdfgen.o
## Instruction:
circleci: Fix up build step names
## Code After:
version: 2
jobs:
build:
machine: true
steps:
- checkout
- run:
name: Construct Build Environment
command: docker build -t build_image .
- run:
name: Start Build Environment
command: docker run --volume "$(pwd):$(pwd)" --workdir "$(pwd)" --tty --detach build_image bash > container_id
- run:
name: Build
command: docker exec $(cat container_id) make
- run:
name: Test
command: docker exec $(cat container_id) make check
- run:
name: Stop Build Environment
command: docker stop $(cat container_id)
when: always
- store_artifacts:
path: output.pdf
- store_artifacts:
path: pdfgen.h
- store_artifacts:
path: pdfgen.o
|
b94d6a9d60781f0eeb58122151867468952a4d78
|
models/SignupForm.php
|
models/SignupForm.php
|
<?php
namespace app\models;
use yii\base\Model;
/**
* Signup form
*/
class SignupForm extends Model
{
public $email;
public $signup_token;
public $password;
/**
* @inheritdoc
*/
public function rules()
{
return [
['signup_token', 'trim'],
['email', 'trim'],
['email', 'required'],
['email', 'email'],
['email', 'string', 'max' => 255],
['email', 'unique', 'targetClass' => '\app\models\User', 'message' => 'You are already an existing user. Proceed to login'],
['password', 'required'],
['password', 'string', 'min' => 6],
];
}
/**
* Signs user up.
*
* @return User|null the saved model or null if saving fails
*/
public function signup()
{
if (!$this->validate()) {
return null;
}
$user = new User();
$user->email = $this->email;
$user->setPassword($this->password);
$user->generateAuthKey();
return $user->save() ? $user : null;
}
}
|
<?php
namespace app\models;
use yii\base\Model;
/**
* Signup form
*/
class SignupForm extends Model
{
public $email;
public $signup_token;
public $password;
/**
* @inheritdoc
*/
public function rules()
{
return [
['signup_token', 'trim'],
['email', 'trim'],
['email', 'required'],
['email', 'email'],
['email', 'string', 'max' => 255],
['email', 'unique', 'targetClass' => '\app\models\User', 'message' => 'You are already an existing user. Proceed to login'],
['password', 'required'],
['password', 'string', 'min' => 6],
];
}
/**
* Signs user up.
*
* @return User|null the saved model or null if saving fails
*/
public function signup()
{
if (!$this->validate()) {
return null;
}
$user = new User();
$user->email = $this->email;
$user->role = SignupLinks::findByEmail($this->email)->role;
$user->setPassword($this->password);
$user->generateAuthKey();
return $user->save() ? $user : null;
}
}
|
Save user role when signing up
|
Save user role when signing up
|
PHP
|
bsd-3-clause
|
nkmathew/intern-portal,nkmathew/intern-portal
|
php
|
## Code Before:
<?php
namespace app\models;
use yii\base\Model;
/**
* Signup form
*/
class SignupForm extends Model
{
public $email;
public $signup_token;
public $password;
/**
* @inheritdoc
*/
public function rules()
{
return [
['signup_token', 'trim'],
['email', 'trim'],
['email', 'required'],
['email', 'email'],
['email', 'string', 'max' => 255],
['email', 'unique', 'targetClass' => '\app\models\User', 'message' => 'You are already an existing user. Proceed to login'],
['password', 'required'],
['password', 'string', 'min' => 6],
];
}
/**
* Signs user up.
*
* @return User|null the saved model or null if saving fails
*/
public function signup()
{
if (!$this->validate()) {
return null;
}
$user = new User();
$user->email = $this->email;
$user->setPassword($this->password);
$user->generateAuthKey();
return $user->save() ? $user : null;
}
}
## Instruction:
Save user role when signing up
## Code After:
<?php
namespace app\models;
use yii\base\Model;
/**
* Signup form
*/
class SignupForm extends Model
{
public $email;
public $signup_token;
public $password;
/**
* @inheritdoc
*/
public function rules()
{
return [
['signup_token', 'trim'],
['email', 'trim'],
['email', 'required'],
['email', 'email'],
['email', 'string', 'max' => 255],
['email', 'unique', 'targetClass' => '\app\models\User', 'message' => 'You are already an existing user. Proceed to login'],
['password', 'required'],
['password', 'string', 'min' => 6],
];
}
/**
* Signs user up.
*
* @return User|null the saved model or null if saving fails
*/
public function signup()
{
if (!$this->validate()) {
return null;
}
$user = new User();
$user->email = $this->email;
$user->role = SignupLinks::findByEmail($this->email)->role;
$user->setPassword($this->password);
$user->generateAuthKey();
return $user->save() ? $user : null;
}
}
|
a7ea8927e8645dc27a5006b102e46e8331b0d90e
|
lib/tasks/scan-paths-handler.coffee
|
lib/tasks/scan-paths-handler.coffee
|
async = require 'async'
fs = require 'fs'
VariableScanner = require '../variable-scanner'
class PathScanner
constructor: (@path) ->
@scanner = new VariableScanner
load: (done) ->
currentChunk = ''
currentLine = 0
currentOffset = 0
lastIndex = 0
results = []
readStream = fs.createReadStream(@path)
readStream.on 'data', (chunk) =>
currentChunk += chunk.toString()
index = lastIndex
while result = @scanner.search(currentChunk, lastIndex)
result.range[0] += index
result.range[1] += index
for v in result
v.path = @path
v.range[0] += index
v.range[1] += index
v.definitionRange = result.range
results = results.concat(result)
{lastIndex} = result
if result?
currentChunk = currentChunk[lastIndex..-1]
lastIndex = 0
readStream.on 'end', ->
emit('scan-paths:path-scanned', results)
done()
module.exports = (paths) ->
async.each(
paths,
(path, next) ->
new PathScanner(path).load(next)
@async()
)
|
async = require 'async'
fs = require 'fs'
VariableScanner = require '../variable-scanner'
class PathScanner
constructor: (@path) ->
@scanner = new VariableScanner
load: (done) ->
currentChunk = ''
currentLine = 0
currentOffset = 0
lastIndex = 0
line = 0
results = []
readStream = fs.createReadStream(@path)
readStream.on 'data', (chunk) =>
currentChunk += chunk.toString()
index = lastIndex
while result = @scanner.search(currentChunk, lastIndex)
result.range[0] += index
result.range[1] += index
for v in result
v.path = @path
v.range[0] += index
v.range[1] += index
v.definitionRange = result.range
v.line += line
lastLine = v.line
results = results.concat(result)
{lastIndex} = result
if result?
currentChunk = currentChunk[lastIndex..-1]
line = lastLine
lastIndex = 0
readStream.on 'end', ->
emit('scan-paths:path-scanned', results)
done()
module.exports = (paths) ->
async.each(
paths,
(path, next) ->
new PathScanner(path).load(next)
@async()
)
|
Increment results line when scanning using a stream
|
Increment results line when scanning using a stream
|
CoffeeScript
|
mit
|
peter1000/atom-pigments,peter1000/atom-pigments
|
coffeescript
|
## Code Before:
async = require 'async'
fs = require 'fs'
VariableScanner = require '../variable-scanner'
class PathScanner
constructor: (@path) ->
@scanner = new VariableScanner
load: (done) ->
currentChunk = ''
currentLine = 0
currentOffset = 0
lastIndex = 0
results = []
readStream = fs.createReadStream(@path)
readStream.on 'data', (chunk) =>
currentChunk += chunk.toString()
index = lastIndex
while result = @scanner.search(currentChunk, lastIndex)
result.range[0] += index
result.range[1] += index
for v in result
v.path = @path
v.range[0] += index
v.range[1] += index
v.definitionRange = result.range
results = results.concat(result)
{lastIndex} = result
if result?
currentChunk = currentChunk[lastIndex..-1]
lastIndex = 0
readStream.on 'end', ->
emit('scan-paths:path-scanned', results)
done()
module.exports = (paths) ->
async.each(
paths,
(path, next) ->
new PathScanner(path).load(next)
@async()
)
## Instruction:
Increment results line when scanning using a stream
## Code After:
async = require 'async'
fs = require 'fs'
VariableScanner = require '../variable-scanner'
class PathScanner
constructor: (@path) ->
@scanner = new VariableScanner
load: (done) ->
currentChunk = ''
currentLine = 0
currentOffset = 0
lastIndex = 0
line = 0
results = []
readStream = fs.createReadStream(@path)
readStream.on 'data', (chunk) =>
currentChunk += chunk.toString()
index = lastIndex
while result = @scanner.search(currentChunk, lastIndex)
result.range[0] += index
result.range[1] += index
for v in result
v.path = @path
v.range[0] += index
v.range[1] += index
v.definitionRange = result.range
v.line += line
lastLine = v.line
results = results.concat(result)
{lastIndex} = result
if result?
currentChunk = currentChunk[lastIndex..-1]
line = lastLine
lastIndex = 0
readStream.on 'end', ->
emit('scan-paths:path-scanned', results)
done()
module.exports = (paths) ->
async.each(
paths,
(path, next) ->
new PathScanner(path).load(next)
@async()
)
|
5ef0bb3e218afd96cdbc370a2266be3e7e155687
|
packages/setting-spacings/scss/_settings.scss
|
packages/setting-spacings/scss/_settings.scss
|
////
/// @group Setting: spacings
/// @author Markus Oberlehner
////
/// Spacing sizes.
/// @type Map
$spacings: (
xs: 0.5em,
s: 0.75em,
m: 1em,
l: 1.5em,
xl: 2em,
xxl: 3.5em,
xxxl: 6em,
) !default;
|
////
/// @group Setting: spacings
/// @author Markus Oberlehner
////
/// Spacing sizes.
/// @type Map
$spacings: (
xxs: 0.25em,
xs: 0.5em,
s: 0.75em,
m: 1em,
l: 1.5em,
xl: 2em,
xxl: 3.5em,
xxxl: 6em,
) !default;
|
Add a new xxs spacing setting
|
Add a new xxs spacing setting
|
SCSS
|
mit
|
avalanchesass/avalanche,avalanchesass/avalanche,avalanchesass/avalanche
|
scss
|
## Code Before:
////
/// @group Setting: spacings
/// @author Markus Oberlehner
////
/// Spacing sizes.
/// @type Map
$spacings: (
xs: 0.5em,
s: 0.75em,
m: 1em,
l: 1.5em,
xl: 2em,
xxl: 3.5em,
xxxl: 6em,
) !default;
## Instruction:
Add a new xxs spacing setting
## Code After:
////
/// @group Setting: spacings
/// @author Markus Oberlehner
////
/// Spacing sizes.
/// @type Map
$spacings: (
xxs: 0.25em,
xs: 0.5em,
s: 0.75em,
m: 1em,
l: 1.5em,
xl: 2em,
xxl: 3.5em,
xxxl: 6em,
) !default;
|
54918079efb61bb8232865f83f9d29144e58ab2f
|
bin/url2pdf.sh
|
bin/url2pdf.sh
|
OUTPUTFILE=$1
URL=$2
STDERR=`mktemp`
if [ ! -f "$STDERR" ]; then
echo "Error creating tempfile, aborting"
exit 1
fi
if [ -z $OUTPUTFILE ]; then
echo "Usage: $0 <outputfilename>"
exit 1
else
/usr/local/bin/wkhtmltopdf --title "$URL" "$URL" $OUTPUTFILE 2>"$STDERR"
grep -q 'Failed loading page' "$STDERR"
# If page load failed, try it without javascript
if [ $? -eq 0 ]; then
/usr/local/bin/wkhtmltopdf -n --title "$URL" "$URL" $OUTPUTFILE 2>"$STDERR"
fi
rm -f "$STDERR"
# hacky fix for http://code.google.com/p/wkhtmltopdf/issues/detail?id=463
perl -pi -e 's/(Dests <<.*?)(#00)(.*?>>)/$1$3/s' $OUTPUTFILE
fi
|
OUTPUTFILE=$1
URL=$2
ARGS="$3"
STDERR=`mktemp`
PROXY=""
if [ ! -f "$STDERR" ]; then
echo "Error creating tempfile, aborting"
exit 1
fi
if [ -z $OUTPUTFILE ]; then
echo "Usage: $0 <outputfilename>"
exit 1
else
if [ -z "$ARGS" ]; then
/usr/local/bin/wkhtmltopdf $PROXY --title "$URL" "$URL" $OUTPUTFILE 2>"$STDERR"
/bin/grep -q 'Failed loading page' "$STDERR"
# If page load failed, try it without javascript
if [ $? -eq 0 ]; then
/usr/local/bin/wkhtmltopdf $PROXY -n --title "$URL" "$URL" $OUTPUTFILE 2>"$STDERR"
fi
rm -f "$STDERR"
else
/usr/local/bin/wkhtmltopdf $PROXY $ARGS --title "$URL" "$URL" $OUTPUTFILE
fi
# hacky fix for http://code.google.com/p/wkhtmltopdf/issues/detail?id=463
/usr/bin/perl -pi -e 's/(Dests <<.*?)(#00)(.*?>>)/$1$3/s' $OUTPUTFILE
fi
|
Add support for optional arguments, and handling url requests via a proxy
|
Add support for optional arguments, and handling url requests via a proxy
|
Shell
|
mit
|
thunderkeys/pdfconvertme-public,thunderkeys/pdfconvertme-public,thunderkeys/pdfconvertme-public
|
shell
|
## Code Before:
OUTPUTFILE=$1
URL=$2
STDERR=`mktemp`
if [ ! -f "$STDERR" ]; then
echo "Error creating tempfile, aborting"
exit 1
fi
if [ -z $OUTPUTFILE ]; then
echo "Usage: $0 <outputfilename>"
exit 1
else
/usr/local/bin/wkhtmltopdf --title "$URL" "$URL" $OUTPUTFILE 2>"$STDERR"
grep -q 'Failed loading page' "$STDERR"
# If page load failed, try it without javascript
if [ $? -eq 0 ]; then
/usr/local/bin/wkhtmltopdf -n --title "$URL" "$URL" $OUTPUTFILE 2>"$STDERR"
fi
rm -f "$STDERR"
# hacky fix for http://code.google.com/p/wkhtmltopdf/issues/detail?id=463
perl -pi -e 's/(Dests <<.*?)(#00)(.*?>>)/$1$3/s' $OUTPUTFILE
fi
## Instruction:
Add support for optional arguments, and handling url requests via a proxy
## Code After:
OUTPUTFILE=$1
URL=$2
ARGS="$3"
STDERR=`mktemp`
PROXY=""
if [ ! -f "$STDERR" ]; then
echo "Error creating tempfile, aborting"
exit 1
fi
if [ -z $OUTPUTFILE ]; then
echo "Usage: $0 <outputfilename>"
exit 1
else
if [ -z "$ARGS" ]; then
/usr/local/bin/wkhtmltopdf $PROXY --title "$URL" "$URL" $OUTPUTFILE 2>"$STDERR"
/bin/grep -q 'Failed loading page' "$STDERR"
# If page load failed, try it without javascript
if [ $? -eq 0 ]; then
/usr/local/bin/wkhtmltopdf $PROXY -n --title "$URL" "$URL" $OUTPUTFILE 2>"$STDERR"
fi
rm -f "$STDERR"
else
/usr/local/bin/wkhtmltopdf $PROXY $ARGS --title "$URL" "$URL" $OUTPUTFILE
fi
# hacky fix for http://code.google.com/p/wkhtmltopdf/issues/detail?id=463
/usr/bin/perl -pi -e 's/(Dests <<.*?)(#00)(.*?>>)/$1$3/s' $OUTPUTFILE
fi
|
6163e667f8c7e69af4d0009a29cc7b896700c20b
|
server/collections/contacts.js
|
server/collections/contacts.js
|
Contacts.allow(Server.allow.owner);
Contacts.after.remove(function (userId, doc) {
Server.upload.delete(doc.imageFile);
});
Contacts.before.update(function(userId, doc, fieldNames, modifier, options){
var mset = modifier['$set'];
if (mset && doc.imageFile && (mset.imageFile !== doc.imageFile)){
Server.upload.delete(doc.imageFile);
}
});
|
Contacts.allow(Server.allow.owner);
Contacts.after.remove(function (userId, doc) {
Server.upload.delete(doc._id, doc.imageFile);
});
Contacts.before.update(function(userId, doc, fieldNames, modifier, options){
var mset = modifier['$set'];
if (mset && doc.imageFile && (mset.imageFile !== doc.imageFile)){
Server.upload.delete(doc._id, doc.imageFile);
}
});
|
Fix calls to delete image files
|
Fix calls to delete image files
|
JavaScript
|
mit
|
ManuelDeLeon/phonebook,ManuelDeLeon/phonebook
|
javascript
|
## Code Before:
Contacts.allow(Server.allow.owner);
Contacts.after.remove(function (userId, doc) {
Server.upload.delete(doc.imageFile);
});
Contacts.before.update(function(userId, doc, fieldNames, modifier, options){
var mset = modifier['$set'];
if (mset && doc.imageFile && (mset.imageFile !== doc.imageFile)){
Server.upload.delete(doc.imageFile);
}
});
## Instruction:
Fix calls to delete image files
## Code After:
Contacts.allow(Server.allow.owner);
Contacts.after.remove(function (userId, doc) {
Server.upload.delete(doc._id, doc.imageFile);
});
Contacts.before.update(function(userId, doc, fieldNames, modifier, options){
var mset = modifier['$set'];
if (mset && doc.imageFile && (mset.imageFile !== doc.imageFile)){
Server.upload.delete(doc._id, doc.imageFile);
}
});
|
88934971d5148f9cb7245612c4415e46357f3930
|
CHANGELOG.md
|
CHANGELOG.md
|
All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/)
## [Unreleased]
## [0.3.1]
### Fixed
- Axios version to `0.18.1` due the CVE-2019-10742
- Fix the `subject` attribute unmarshal error: issue
[#32](https://github.com/cloudevents/sdk-javascript/issues/32)
|
All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/)
## [Unreleased]
### Fixed
- Axios version to `0.18.1` due the CVE-2019-10742
- Fix the `subject` attribute unmarshal error: issue
[#32](https://github.com/cloudevents/sdk-javascript/issues/32)
|
Fix changelog semantics for unreleased
|
Fix changelog semantics for unreleased
Signed-off-by: Fabio José <[email protected]>
|
Markdown
|
apache-2.0
|
cloudevents/sdk-javascript,cloudevents/sdk-javascript
|
markdown
|
## Code Before:
All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/)
## [Unreleased]
## [0.3.1]
### Fixed
- Axios version to `0.18.1` due the CVE-2019-10742
- Fix the `subject` attribute unmarshal error: issue
[#32](https://github.com/cloudevents/sdk-javascript/issues/32)
## Instruction:
Fix changelog semantics for unreleased
Signed-off-by: Fabio José <[email protected]>
## Code After:
All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/)
## [Unreleased]
### Fixed
- Axios version to `0.18.1` due the CVE-2019-10742
- Fix the `subject` attribute unmarshal error: issue
[#32](https://github.com/cloudevents/sdk-javascript/issues/32)
|
6c182dfe5d7d2de8e1957370c016fe71d8d1edb4
|
README.md
|
README.md
|
The coolest flask application for your bucket list.
|
[](https://travis-ci.org/JoshuaOndieki/buckylist)
[](https://coveralls.io/github/JoshuaOndieki/buckylist?branch=master)
The coolest flask application for your bucket list.
|
Add travis and coveralls badges
|
Add travis and coveralls badges
|
Markdown
|
mit
|
JoshuaOndieki/buckylist,JoshuaOndieki/buckylist
|
markdown
|
## Code Before:
The coolest flask application for your bucket list.
## Instruction:
Add travis and coveralls badges
## Code After:
[](https://travis-ci.org/JoshuaOndieki/buckylist)
[](https://coveralls.io/github/JoshuaOndieki/buckylist?branch=master)
The coolest flask application for your bucket list.
|
97878d9883ad6f489fb2d0883bd6de522abfd0d9
|
webkit/GHCJS/DOM/EventTargetClosures.hs
|
webkit/GHCJS/DOM/EventTargetClosures.hs
|
{-# LANGUAGE CPP #-}
module GHCJS.DOM.EventTargetClosures
(eventTargetAddEventListener) where
import GHCJS.DOM.Types
eventTargetAddEventListener ::
(GObjectClass self, IsEvent event) =>
self -> String -> Bool -> (self -> event -> IO ()) -> IO Bool
eventTargetAddEventListener self eventName bubble user = undefined
--do
-- sptr <- newStablePtr action
-- gclosurePtr <- gtk2hs_closure_new sptr
-- toBool <$> (
-- withUTFString eventName $ \ eventNamePtr ->
-- {# call webkit_dom_event_target_add_event_listener_closure #}
-- (unsafeCastGObject $ toGObject self)
-- eventNamePtr
-- (GClosure gclosurePtr)
-- (fromBool bubble))
-- where action :: Ptr GObject -> Ptr GObject -> IO ()
-- action obj1 obj2 =
-- failOnGError $
-- makeNewGObject (GObject, objectUnrefFromMainloop) (return obj2) >>= \obj2' ->
-- makeNewGObject (GObject, objectUnrefFromMainloop) (return obj1) >>= \obj1' ->
-- user (unsafeCastGObject obj1') (unsafeCastGObject obj2')
|
module GHCJS.DOM.Events (
module Graphics.UI.Gtk.WebKit.DOM.EventTargetClosures
) where
import Graphics.UI.Gtk.WebKit.DOM.EventTargetClosures
|
Fix missing ConstraintKinds on EventTargetClosure.hs error
|
Fix missing ConstraintKinds on EventTargetClosure.hs error
|
Haskell
|
mit
|
manyoo/ghcjs-dom,plow-technologies/ghcjs-dom,manyoo/ghcjs-dom,plow-technologies/ghcjs-dom
|
haskell
|
## Code Before:
{-# LANGUAGE CPP #-}
module GHCJS.DOM.EventTargetClosures
(eventTargetAddEventListener) where
import GHCJS.DOM.Types
eventTargetAddEventListener ::
(GObjectClass self, IsEvent event) =>
self -> String -> Bool -> (self -> event -> IO ()) -> IO Bool
eventTargetAddEventListener self eventName bubble user = undefined
--do
-- sptr <- newStablePtr action
-- gclosurePtr <- gtk2hs_closure_new sptr
-- toBool <$> (
-- withUTFString eventName $ \ eventNamePtr ->
-- {# call webkit_dom_event_target_add_event_listener_closure #}
-- (unsafeCastGObject $ toGObject self)
-- eventNamePtr
-- (GClosure gclosurePtr)
-- (fromBool bubble))
-- where action :: Ptr GObject -> Ptr GObject -> IO ()
-- action obj1 obj2 =
-- failOnGError $
-- makeNewGObject (GObject, objectUnrefFromMainloop) (return obj2) >>= \obj2' ->
-- makeNewGObject (GObject, objectUnrefFromMainloop) (return obj1) >>= \obj1' ->
-- user (unsafeCastGObject obj1') (unsafeCastGObject obj2')
## Instruction:
Fix missing ConstraintKinds on EventTargetClosure.hs error
## Code After:
module GHCJS.DOM.Events (
module Graphics.UI.Gtk.WebKit.DOM.EventTargetClosures
) where
import Graphics.UI.Gtk.WebKit.DOM.EventTargetClosures
|
1a598aea610d2753e4dee7567efaaab2e07d86fd
|
src/package.json
|
src/package.json
|
{
"name": "axis-discovery-ssdp",
"version": "0.0.1",
"description": "Node module capable of searching for Axis video cameras using SSDP.",
"main": "index.js",
"scripts": {
"build": "tsc -p .",
"prestart": "npm run build",
"start": "node index.js",
"pretest": "npm run build",
"test": "mocha **/*.spec.js"
},
"author": "",
"license": "Apache-2.0",
"dependencies": {
"@types/lodash": "^4.14.36",
"@types/node": "^6.0.41",
"lodash": "^4.16.2"
},
"devDependencies": {
"@types/chai": "^3.4.34",
"@types/mocha": "^2.2.32",
"chai": "^3.5.0",
"mocha": "^3.1.0"
}
}
|
{
"name": "axis-discovery-ssdp",
"version": "0.0.1",
"description": "Node module capable of searching for Axis video cameras using SSDP.",
"main": "index.js",
"scripts": {
"postinstall": "npm install -g typescript",
"build": "tsc -p .",
"prestart": "npm run build",
"start": "node index.js",
"pretest": "npm run build",
"test": "mocha **/*.spec.js"
},
"author": "",
"license": "Apache-2.0",
"dependencies": {
"@types/lodash": "^4.14.36",
"@types/node": "^6.0.41",
"lodash": "^4.16.2"
},
"devDependencies": {
"@types/chai": "^3.4.34",
"@types/mocha": "^2.2.32",
"chai": "^3.5.0",
"mocha": "^3.1.0"
}
}
|
Install TypeScript now part of project.json
|
Install TypeScript now part of project.json
|
JSON
|
apache-2.0
|
FantasticFiasco/axis-discovery-ssdp
|
json
|
## Code Before:
{
"name": "axis-discovery-ssdp",
"version": "0.0.1",
"description": "Node module capable of searching for Axis video cameras using SSDP.",
"main": "index.js",
"scripts": {
"build": "tsc -p .",
"prestart": "npm run build",
"start": "node index.js",
"pretest": "npm run build",
"test": "mocha **/*.spec.js"
},
"author": "",
"license": "Apache-2.0",
"dependencies": {
"@types/lodash": "^4.14.36",
"@types/node": "^6.0.41",
"lodash": "^4.16.2"
},
"devDependencies": {
"@types/chai": "^3.4.34",
"@types/mocha": "^2.2.32",
"chai": "^3.5.0",
"mocha": "^3.1.0"
}
}
## Instruction:
Install TypeScript now part of project.json
## Code After:
{
"name": "axis-discovery-ssdp",
"version": "0.0.1",
"description": "Node module capable of searching for Axis video cameras using SSDP.",
"main": "index.js",
"scripts": {
"postinstall": "npm install -g typescript",
"build": "tsc -p .",
"prestart": "npm run build",
"start": "node index.js",
"pretest": "npm run build",
"test": "mocha **/*.spec.js"
},
"author": "",
"license": "Apache-2.0",
"dependencies": {
"@types/lodash": "^4.14.36",
"@types/node": "^6.0.41",
"lodash": "^4.16.2"
},
"devDependencies": {
"@types/chai": "^3.4.34",
"@types/mocha": "^2.2.32",
"chai": "^3.5.0",
"mocha": "^3.1.0"
}
}
|
d9065387a1414b7f863391d9d6ad02401b826dd8
|
config/introscope_agent.yml
|
config/introscope_agent.yml
|
---
version: +
repository_root: https://ca.bintray.com/apm-agents
default_agent_name: $(jq -r -n "$VCAP_APPLICATION | .application_name")
|
---
version: +
repository_root: https://packages.broadcom.com/artifactory/apm-agents
default_agent_name: $(jq -r -n "$VCAP_APPLICATION | .application_name")
|
Update CA APM artifact location
|
Update CA APM artifact location
Broadcom is migrating CA APM agents from Bintray to JFrog. Bintray will be discontinued May 1 2021
Signed-off-by: Emily Casey <[email protected]>
|
YAML
|
apache-2.0
|
cloudfoundry/java-buildpack,cloudfoundry/java-buildpack
|
yaml
|
## Code Before:
---
version: +
repository_root: https://ca.bintray.com/apm-agents
default_agent_name: $(jq -r -n "$VCAP_APPLICATION | .application_name")
## Instruction:
Update CA APM artifact location
Broadcom is migrating CA APM agents from Bintray to JFrog. Bintray will be discontinued May 1 2021
Signed-off-by: Emily Casey <[email protected]>
## Code After:
---
version: +
repository_root: https://packages.broadcom.com/artifactory/apm-agents
default_agent_name: $(jq -r -n "$VCAP_APPLICATION | .application_name")
|
cc52754ad5a3049f7034a7b876fa6427f5572d71
|
test/process.js
|
test/process.js
|
var Process = require("../lib/Process");
var chai = require("chai");
var chaiAsPromised = require("chai-as-promised");
chai.use(chaiAsPromised);
var expect = chai.expect;
describe("Testing Process class", function () {
it("echo hello", function () {
var process = new Process({
command: "echo",
args: [ "hello" ]
});
return expect(process.getCode()).to.eventually.equals(0);
});
it("large output", function () {
var process = new Process({
command: "head",
args: [ "-c", "2048000", "/dev/zero" ]
});
return expect(process.getCode().then(function () {
return process.getStdout().length;
})).to.eventually.equals(1024 * 1024);
});
});
|
var Process = require("../lib/Process");
var chai = require("chai");
var chaiAsPromised = require("chai-as-promised");
chai.use(chaiAsPromised);
var expect = chai.expect;
describe("Testing Process class", function () {
this.timeout(5000);
it("echo hello", function () {
var process = new Process({
command: "echo",
args: [ "hello" ]
});
return expect(process.getCode()).to.eventually.equals(0);
});
it("large output", function () {
var process = new Process({
command: "head",
args: [ "-c", "2048000", "/dev/zero" ]
});
return expect(process.getCode().then(function () {
return process.getStdout().length;
})).to.eventually.equals(1024 * 1024);
});
});
|
Increase timeout so that the large stdout test won't fail on slow machine.
|
Increase timeout so that the large stdout test won't fail on slow machine.
|
JavaScript
|
mit
|
sarosia/proceger,sarosia/proceger
|
javascript
|
## Code Before:
var Process = require("../lib/Process");
var chai = require("chai");
var chaiAsPromised = require("chai-as-promised");
chai.use(chaiAsPromised);
var expect = chai.expect;
describe("Testing Process class", function () {
it("echo hello", function () {
var process = new Process({
command: "echo",
args: [ "hello" ]
});
return expect(process.getCode()).to.eventually.equals(0);
});
it("large output", function () {
var process = new Process({
command: "head",
args: [ "-c", "2048000", "/dev/zero" ]
});
return expect(process.getCode().then(function () {
return process.getStdout().length;
})).to.eventually.equals(1024 * 1024);
});
});
## Instruction:
Increase timeout so that the large stdout test won't fail on slow machine.
## Code After:
var Process = require("../lib/Process");
var chai = require("chai");
var chaiAsPromised = require("chai-as-promised");
chai.use(chaiAsPromised);
var expect = chai.expect;
describe("Testing Process class", function () {
this.timeout(5000);
it("echo hello", function () {
var process = new Process({
command: "echo",
args: [ "hello" ]
});
return expect(process.getCode()).to.eventually.equals(0);
});
it("large output", function () {
var process = new Process({
command: "head",
args: [ "-c", "2048000", "/dev/zero" ]
});
return expect(process.getCode().then(function () {
return process.getStdout().length;
})).to.eventually.equals(1024 * 1024);
});
});
|
8bdc07bb700fd2577a1c171a579b94dc0278b7d1
|
README.md
|
README.md
|
[](https://travis-ci.org/deverton/rarathon)
Client crate for the [Marathon](https://github.com/mesosphere/marathon) scheduler written in [Rust](http://www.rust-lang.org/).
|
[](https://travis-ci.org/deverton/rarathon)
Client crate for the [Marathon](https://github.com/mesosphere/marathon) scheduler written in [Rust](http://www.rust-lang.org/).
Includes a command line tool with similar functions to [marathon_client](https://github.com/mesosphere/marathon_client/).
|
Include link to original marathon_client
|
Include link to original marathon_client
|
Markdown
|
mit
|
deverton/rarathon
|
markdown
|
## Code Before:
[](https://travis-ci.org/deverton/rarathon)
Client crate for the [Marathon](https://github.com/mesosphere/marathon) scheduler written in [Rust](http://www.rust-lang.org/).
## Instruction:
Include link to original marathon_client
## Code After:
[](https://travis-ci.org/deverton/rarathon)
Client crate for the [Marathon](https://github.com/mesosphere/marathon) scheduler written in [Rust](http://www.rust-lang.org/).
Includes a command line tool with similar functions to [marathon_client](https://github.com/mesosphere/marathon_client/).
|
0a2475655fc8f11a848b7a2c948a9bffad1c4c91
|
src/browser/shared/workspace.service.ts
|
src/browser/shared/workspace.service.ts
|
import { Inject, Injectable, InjectionToken, OnDestroy, Optional } from '@angular/core';
import { from, Observable } from 'rxjs';
import { GEEKS_DIARY_DIR_PATH, NOTES_DIR_PATH, WORKSPACE_DIR_PATH } from '../../core/workspace';
import { IpcActionClient } from '../../libs/ipc';
export class WorkspaceConfig {
rootDirPath?: string = WORKSPACE_DIR_PATH;
geeksDiaryDirPath?: string = GEEKS_DIARY_DIR_PATH;
notesDirPath?: string = NOTES_DIR_PATH;
}
export const WORKSPACE_DEFAULT_CONFIG = new InjectionToken<WorkspaceConfig>('WorkspaceConfig');
@Injectable()
export class WorkspaceService implements OnDestroy {
readonly configs: WorkspaceConfig;
private ipcClient = new IpcActionClient('workspace');
constructor(
@Optional() @Inject(WORKSPACE_DEFAULT_CONFIG) config: WorkspaceConfig,
) {
this.configs = {
...(new WorkspaceConfig()),
...config,
};
}
ngOnDestroy(): void {
this.ipcClient.destroy();
}
initWorkspace(): Observable<void> {
return from(this.ipcClient.performAction('initWorkspace'));
}
}
|
import { Inject, Injectable, InjectionToken, OnDestroy, Optional } from '@angular/core';
import { from, Observable } from 'rxjs';
import { ASSETS_DIR_PATH, GEEKS_DIARY_DIR_PATH, NOTES_DIR_PATH, WORKSPACE_DIR_PATH } from '../../core/workspace';
import { IpcActionClient } from '../../libs/ipc';
export class WorkspaceConfig {
rootDirPath?: string = WORKSPACE_DIR_PATH;
geeksDiaryDirPath?: string = GEEKS_DIARY_DIR_PATH;
notesDirPath?: string = NOTES_DIR_PATH;
assetsDirPath?: string = ASSETS_DIR_PATH;
}
export const WORKSPACE_DEFAULT_CONFIG = new InjectionToken<WorkspaceConfig>('WorkspaceConfig');
@Injectable()
export class WorkspaceService implements OnDestroy {
readonly configs: WorkspaceConfig;
private ipcClient = new IpcActionClient('workspace');
constructor(
@Optional() @Inject(WORKSPACE_DEFAULT_CONFIG) config: WorkspaceConfig,
) {
this.configs = {
...(new WorkspaceConfig()),
...config,
};
}
ngOnDestroy(): void {
this.ipcClient.destroy();
}
initWorkspace(): Observable<void> {
return from(this.ipcClient.performAction('initWorkspace'));
}
}
|
Add assets directory path config option
|
Add assets directory path config option
|
TypeScript
|
mit
|
seokju-na/geeks-diary,seokju-na/geeks-diary,seokju-na/geeks-diary
|
typescript
|
## Code Before:
import { Inject, Injectable, InjectionToken, OnDestroy, Optional } from '@angular/core';
import { from, Observable } from 'rxjs';
import { GEEKS_DIARY_DIR_PATH, NOTES_DIR_PATH, WORKSPACE_DIR_PATH } from '../../core/workspace';
import { IpcActionClient } from '../../libs/ipc';
export class WorkspaceConfig {
rootDirPath?: string = WORKSPACE_DIR_PATH;
geeksDiaryDirPath?: string = GEEKS_DIARY_DIR_PATH;
notesDirPath?: string = NOTES_DIR_PATH;
}
export const WORKSPACE_DEFAULT_CONFIG = new InjectionToken<WorkspaceConfig>('WorkspaceConfig');
@Injectable()
export class WorkspaceService implements OnDestroy {
readonly configs: WorkspaceConfig;
private ipcClient = new IpcActionClient('workspace');
constructor(
@Optional() @Inject(WORKSPACE_DEFAULT_CONFIG) config: WorkspaceConfig,
) {
this.configs = {
...(new WorkspaceConfig()),
...config,
};
}
ngOnDestroy(): void {
this.ipcClient.destroy();
}
initWorkspace(): Observable<void> {
return from(this.ipcClient.performAction('initWorkspace'));
}
}
## Instruction:
Add assets directory path config option
## Code After:
import { Inject, Injectable, InjectionToken, OnDestroy, Optional } from '@angular/core';
import { from, Observable } from 'rxjs';
import { ASSETS_DIR_PATH, GEEKS_DIARY_DIR_PATH, NOTES_DIR_PATH, WORKSPACE_DIR_PATH } from '../../core/workspace';
import { IpcActionClient } from '../../libs/ipc';
export class WorkspaceConfig {
rootDirPath?: string = WORKSPACE_DIR_PATH;
geeksDiaryDirPath?: string = GEEKS_DIARY_DIR_PATH;
notesDirPath?: string = NOTES_DIR_PATH;
assetsDirPath?: string = ASSETS_DIR_PATH;
}
export const WORKSPACE_DEFAULT_CONFIG = new InjectionToken<WorkspaceConfig>('WorkspaceConfig');
@Injectable()
export class WorkspaceService implements OnDestroy {
readonly configs: WorkspaceConfig;
private ipcClient = new IpcActionClient('workspace');
constructor(
@Optional() @Inject(WORKSPACE_DEFAULT_CONFIG) config: WorkspaceConfig,
) {
this.configs = {
...(new WorkspaceConfig()),
...config,
};
}
ngOnDestroy(): void {
this.ipcClient.destroy();
}
initWorkspace(): Observable<void> {
return from(this.ipcClient.performAction('initWorkspace'));
}
}
|
6faea4325868edbba42f8d80b34cd048648d174a
|
core/lib/generators/spree/dummy/templates/rails/database.yml
|
core/lib/generators/spree/dummy/templates/rails/database.yml
|
login: &login
<% if database == 'mysql' %>
adapter: mysql2
encoding: utf8
reconnect: false
pool: 5
username: root
password:
#socket: /tmp/mysql.sock
<% else %>
adapter: sqlite3
pool: 5
timeout: 5000
<% end %>
development:
<<: *login
database: db/dummy_dev
test:
<<: *login
database: db/dummy_test
cucumber:
<<: *login
database: db/dummy_test
production:
<<: *login
database: db/dummy_prod
|
login: &login
<% if database == 'mysql' %>
adapter: mysql2
encoding: utf8
reconnect: false
pool: 5
username: root
password:
#socket: /tmp/mysql.sock
<% else %>
adapter: sqlite3
pool: 5
timeout: 5000
<% end %>
development:
<<: *login
database: db/dummy_dev
test:
<<: *login
database: db/dummy_test
production:
<<: *login
database: db/dummy_prod
|
Remove cucumber from dummy test config
|
Remove cucumber from dummy test config
|
YAML
|
bsd-3-clause
|
pervino/spree,bjornlinder/Spree,lyzxsc/spree,vmatekole/spree,vinayvinsol/spree,vulk/spree,grzlus/spree,TimurTarasenko/spree,TrialGuides/spree,camelmasa/spree,Engeltj/spree,surfdome/spree,radarseesradar/spree,vinayvinsol/spree,DynamoMTL/spree,moneyspyder/spree,nooysters/spree,Hawaiideveloper/shoppingcart,kewaunited/spree,delphsoft/spree-store-ballchair,mleglise/spree,athal7/solidus,pulkit21/spree,siddharth28/spree,joanblake/spree,mleglise/spree,knuepwebdev/FloatTubeRodHolders,cutefrank/spree,abhishekjain16/spree,jspizziri/spree,joanblake/spree,degica/spree,firman/spree,archSeer/spree,judaro13/spree-fork,fahidnasir/spree,assembledbrands/spree,cutefrank/spree,softr8/spree,brchristian/spree,jsurdilla/solidus,RatioClothing/spree,wolfieorama/spree,Nevensoft/spree,tesserakt/clean_spree,brchristian/spree,pjmj777/spree,shekibobo/spree,berkes/spree,JuandGirald/spree,ayb/spree,priyank-gupta/spree,softr8/spree,karlitxo/spree,Antdesk/karpal-spree,bonobos/solidus,reinaris/spree,moneyspyder/spree,codesavvy/sandbox,tancnle/spree,Boomkat/spree,builtbybuffalo/spree,orenf/spree,abhishekjain16/spree,radarseesradar/spree,mindvolt/spree,vmatekole/spree,groundctrl/spree,trigrass2/spree,agient/agientstorefront,AgilTec/spree,Kagetsuki/spree,codesavvy/sandbox,athal7/solidus,sunny2601/spree,jeffboulet/spree,lsirivong/solidus,LBRapid/spree,jsurdilla/solidus,wolfieorama/spree,SadTreeFriends/spree,jsurdilla/solidus,builtbybuffalo/spree,odk211/spree,Nevensoft/spree,jeffboulet/spree,bjornlinder/Spree,pjmj777/spree,caiqinghua/spree,jasonfb/spree,adaddeo/spree,quentinuys/spree,locomotivapro/spree,scottcrawford03/solidus,scottcrawford03/solidus,pulkit21/spree,jhawthorn/spree,builtbybuffalo/spree,shaywood2/spree,robodisco/spree,lzcabrera/spree-1-3-stable,JDutil/spree,alvinjean/spree,Arpsara/solidus,beni55/spree,useiichi/spree,watg/spree,madetech/spree,codesavvy/sandbox,Machpowersystems/spree_mach,raow/spree,yushine/spree,piousbox/spree,zamiang/spree,moneyspyder/spree,alepore/spree,lsirivong/spree,forkata/solidus,vinsol/spree,AgilTec/spree,Senjai/spree,robodisco/spree,CiscoCloud/spree,dandanwei/spree,woboinc/spree,codesavvy/sandbox,grzlus/spree,Migweld/spree,shioyama/spree,KMikhaylovCTG/spree,CJMrozek/spree,yushine/spree,devilcoders/solidus,bjornlinder/Spree,project-eutopia/spree,lzcabrera/spree-1-3-stable,KMikhaylovCTG/spree,richardnuno/solidus,rajeevriitm/spree,shekibobo/spree,Antdesk/karpal-spree,surfdome/spree,ckk-scratch/solidus,vinsol/spree,maybii/spree,trigrass2/spree,adaddeo/spree,fahidnasir/spree,bonobos/solidus,net2b/spree,richardnuno/solidus,trigrass2/spree,DynamoMTL/spree,gregoryrikson/spree-sample,odk211/spree,progsri/spree,groundctrl/spree,Nevensoft/spree,azranel/spree,Senjai/solidus,grzlus/spree,CiscoCloud/spree,gautamsawhney/spree,shaywood2/spree,alvinjean/spree,lsirivong/solidus,Ropeney/spree,ahmetabdi/spree,radarseesradar/spree,NerdsvilleCEO/spree,agient/agientstorefront,moneyspyder/spree,yiqing95/spree,azclick/spree,imella/spree,pulkit21/spree,derekluo/spree,biagidp/spree,njerrywerry/spree,ahmetabdi/spree,net2b/spree,yushine/spree,hifly/spree,hoanghiep90/spree,edgward/spree,tesserakt/clean_spree,jparr/spree,urimikhli/spree,kewaunited/spree,Antdesk/karpal-spree,jaspreet21anand/spree,dandanwei/spree,jasonfb/spree,welitonfreitas/spree,shioyama/spree,archSeer/spree,caiqinghua/spree,madetech/spree,hifly/spree,patdec/spree,keatonrow/spree,gregoryrikson/spree-sample,caiqinghua/spree,madetech/spree,yomishra/pce,shioyama/spree,ramkumar-kr/spree,archSeer/spree,watg/spree,scottcrawford03/solidus,jordan-brough/solidus,yomishra/pce,freerunningtech/spree,ahmetabdi/spree,jspizziri/spree,Senjai/solidus,groundctrl/spree,dafontaine/spree,jsurdilla/solidus,mleglise/spree,AgilTec/spree,tomash/spree,caiqinghua/spree,JuandGirald/spree,camelmasa/spree,keatonrow/spree,forkata/solidus,priyank-gupta/spree,rbngzlv/spree,Senjai/spree,CJMrozek/spree,locomotivapro/spree,DarkoP/spree,FadliKun/spree,Lostmyname/spree,ckk-scratch/solidus,karlitxo/spree,welitonfreitas/spree,vinsol/spree,lyzxsc/spree,robodisco/spree,RatioClothing/spree,jimblesm/spree,TimurTarasenko/spree,JDutil/spree,dandanwei/spree,keatonrow/spree,sideci-sample/sideci-sample-spree,sideci-sample/sideci-sample-spree,gregoryrikson/spree-sample,TimurTarasenko/spree,project-eutopia/spree,alepore/spree,jasonfb/spree,jparr/spree,HealthWave/spree,RatioClothing/spree,dotandbo/spree,yiqing95/spree,vinsol/spree,tesserakt/clean_spree,volpejoaquin/spree,rbngzlv/spree,TrialGuides/spree,mindvolt/spree,urimikhli/spree,grzlus/solidus,sunny2601/spree,grzlus/solidus,woboinc/spree,nooysters/spree,ramkumar-kr/spree,carlesjove/spree,gautamsawhney/spree,jaspreet21anand/spree,Boomkat/spree,NerdsvilleCEO/spree,athal7/solidus,joanblake/spree,vcavallo/spree,calvinl/spree,rajeevriitm/spree,CiscoCloud/spree,freerunningtech/spree,useiichi/spree,tomash/spree,gautamsawhney/spree,ujai/spree,watg/spree,LBRapid/spree,quentinuys/spree,carlesjove/spree,Hates/spree,athal7/solidus,PhoenixTeam/spree_phoenix,piousbox/spree,FadliKun/spree,joanblake/spree,carlesjove/spree,omarsar/spree,azranel/spree,radarseesradar/spree,vulk/spree,sfcgeorge/spree,dafontaine/spree,gregoryrikson/spree-sample,piousbox/spree,reinaris/spree,patdec/spree,grzlus/solidus,firman/spree,SadTreeFriends/spree,mleglise/spree,reinaris/spree,degica/spree,camelmasa/spree,net2b/spree,Hawaiideveloper/shoppingcart,volpejoaquin/spree,njerrywerry/spree,jspizziri/spree,reidblomquist/spree,dafontaine/spree,hoanghiep90/spree,alepore/spree,miyazawatomoka/spree,Machpowersystems/spree_mach,Migweld/spree,bonobos/solidus,tailic/spree,xuewenfei/solidus,quentinuys/spree,vmatekole/spree,vinayvinsol/spree,abhishekjain16/spree,rajeevriitm/spree,vcavallo/spree,vcavallo/spree,maybii/spree,useiichi/spree,JuandGirald/spree,grzlus/solidus,TimurTarasenko/spree,judaro13/spree-fork,edgward/spree,robodisco/spree,jordan-brough/spree,berkes/spree,tailic/spree,imella/spree,berkes/spree,surfdome/spree,tomash/spree,Mayvenn/spree,ayb/spree,TrialGuides/spree,karlitxo/spree,yiqing95/spree,pervino/solidus,kewaunited/spree,ujai/spree,APohio/spree,pervino/spree,APohio/spree,Engeltj/spree,quentinuys/spree,azclick/spree,piousbox/spree,kitwalker12/spree,wolfieorama/spree,thogg4/spree,shaywood2/spree,priyank-gupta/spree,calvinl/spree,edgward/spree,njerrywerry/spree,StemboltHQ/spree,rajeevriitm/spree,HealthWave/spree,azclick/spree,Arpsara/solidus,njerrywerry/spree,trigrass2/spree,judaro13/spree-fork,vulk/spree,ujai/spree,wolfieorama/spree,madetech/spree,ckk-scratch/solidus,Mayvenn/spree,bonobos/solidus,raow/spree,pulkit21/spree,siddharth28/spree,yushine/spree,builtbybuffalo/spree,jimblesm/spree,priyank-gupta/spree,rakibulislam/spree,DarkoP/spree,derekluo/spree,ramkumar-kr/spree,richardnuno/solidus,Lostmyname/spree,APohio/spree,volpejoaquin/spree,orenf/spree,vulk/spree,Lostmyname/spree,welitonfreitas/spree,SadTreeFriends/spree,mindvolt/spree,lyzxsc/spree,StemboltHQ/spree,patdec/spree,azranel/spree,alejandromangione/spree,volpejoaquin/spree,carlesjove/spree,yiqing95/spree,Migweld/spree,jspizziri/spree,degica/spree,useiichi/spree,thogg4/spree,forkata/solidus,CJMrozek/spree,reidblomquist/spree,Ropeney/spree,DynamoMTL/spree,cutefrank/spree,assembledbrands/spree,shekibobo/spree,APohio/spree,sfcgeorge/spree,Senjai/solidus,assembledbrands/spree,progsri/spree,ayb/spree,adaddeo/spree,agient/agientstorefront,TrialGuides/spree,camelmasa/spree,DynamoMTL/spree,Senjai/spree,xuewenfei/solidus,bricesanchez/spree,calvinl/spree,dafontaine/spree,biagidp/spree,azclick/spree,jimblesm/spree,CJMrozek/spree,rakibulislam/spree,sunny2601/spree,edgward/spree,zaeznet/spree,nooysters/spree,softr8/spree,miyazawatomoka/spree,vmatekole/spree,freerunningtech/spree,KMikhaylovCTG/spree,sliaquat/spree,locomotivapro/spree,lsirivong/spree,firman/spree,alvinjean/spree,omarsar/spree,NerdsvilleCEO/spree,net2b/spree,maybii/spree,sfcgeorge/spree,jeffboulet/spree,siddharth28/spree,StemboltHQ/spree,Boomkat/spree,ayb/spree,kewaunited/spree,lzcabrera/spree-1-3-stable,derekluo/spree,jordan-brough/spree,thogg4/spree,adaddeo/spree,project-eutopia/spree,pervino/solidus,delphsoft/spree-store-ballchair,NerdsvilleCEO/spree,ckk-scratch/solidus,Nevensoft/spree,zamiang/spree,groundctrl/spree,Lostmyname/spree,lsirivong/spree,Kagetsuki/spree,rakibulislam/spree,orenf/spree,delphsoft/spree-store-ballchair,LBRapid/spree,devilcoders/solidus,Ropeney/spree,xuewenfei/solidus,dandanwei/spree,Hates/spree,PhoenixTeam/spree_phoenix,scottcrawford03/solidus,progsri/spree,jaspreet21anand/spree,derekluo/spree,pjmj777/spree,FadliKun/spree,alejandromangione/spree,bricesanchez/spree,karlitxo/spree,kitwalker12/spree,Hates/spree,JDutil/spree,Ropeney/spree,jparr/spree,DarkoP/spree,zamiang/spree,knuepwebdev/FloatTubeRodHolders,hifly/spree,dotandbo/spree,Boomkat/spree,mindvolt/spree,raow/spree,delphsoft/spree-store-ballchair,abhishekjain16/spree,bricesanchez/spree,welitonfreitas/spree,zaeznet/spree,zamiang/spree,beni55/spree,jhawthorn/spree,dotandbo/spree,Arpsara/solidus,Engeltj/spree,brchristian/spree,imella/spree,alejandromangione/spree,archSeer/spree,omarsar/spree,HealthWave/spree,maybii/spree,raow/spree,zaeznet/spree,Arpsara/solidus,rbngzlv/spree,firman/spree,PhoenixTeam/spree_phoenix,rakibulislam/spree,devilcoders/solidus,hoanghiep90/spree,fahidnasir/spree,sideci-sample/sideci-sample-spree,reidblomquist/spree,Migweld/spree,richardnuno/solidus,Machpowersystems/spree_mach,miyazawatomoka/spree,nooysters/spree,forkata/solidus,jparr/spree,tesserakt/clean_spree,dotandbo/spree,jordan-brough/solidus,ramkumar-kr/spree,brchristian/spree,surfdome/spree,keatonrow/spree,shekibobo/spree,calvinl/spree,lyzxsc/spree,CiscoCloud/spree,sfcgeorge/spree,AgilTec/spree,miyazawatomoka/spree,tailic/spree,berkes/spree,jimblesm/spree,FadliKun/spree,kitwalker12/spree,tancnle/spree,Senjai/solidus,progsri/spree,beni55/spree,Mayvenn/spree,jhawthorn/spree,sliaquat/spree,jordan-brough/solidus,vcavallo/spree,patdec/spree,lsirivong/spree,lsirivong/solidus,thogg4/spree,hifly/spree,sliaquat/spree,jasonfb/spree,SadTreeFriends/spree,locomotivapro/spree,sliaquat/spree,DarkoP/spree,agient/agientstorefront,cutefrank/spree,reidblomquist/spree,Kagetsuki/spree,fahidnasir/spree,gautamsawhney/spree,zaeznet/spree,KMikhaylovCTG/spree,odk211/spree,reinaris/spree,jaspreet21anand/spree,yomishra/pce,Kagetsuki/spree,JDutil/spree,pervino/spree,sunny2601/spree,pervino/spree,alvinjean/spree,pervino/solidus,alejandromangione/spree,omarsar/spree,tancnle/spree,Hates/spree,tomash/spree,grzlus/spree,Mayvenn/spree,odk211/spree,jeffboulet/spree,JuandGirald/spree,jordan-brough/solidus,biagidp/spree,Hawaiideveloper/shoppingcart,ahmetabdi/spree,jordan-brough/spree,pervino/solidus,knuepwebdev/FloatTubeRodHolders,woboinc/spree,Engeltj/spree,Hawaiideveloper/shoppingcart,beni55/spree,siddharth28/spree,lsirivong/solidus,PhoenixTeam/spree_phoenix,rbngzlv/spree,project-eutopia/spree,hoanghiep90/spree,azranel/spree,xuewenfei/solidus,shaywood2/spree,tancnle/spree,orenf/spree,softr8/spree,devilcoders/solidus,vinayvinsol/spree,urimikhli/spree
|
yaml
|
## Code Before:
login: &login
<% if database == 'mysql' %>
adapter: mysql2
encoding: utf8
reconnect: false
pool: 5
username: root
password:
#socket: /tmp/mysql.sock
<% else %>
adapter: sqlite3
pool: 5
timeout: 5000
<% end %>
development:
<<: *login
database: db/dummy_dev
test:
<<: *login
database: db/dummy_test
cucumber:
<<: *login
database: db/dummy_test
production:
<<: *login
database: db/dummy_prod
## Instruction:
Remove cucumber from dummy test config
## Code After:
login: &login
<% if database == 'mysql' %>
adapter: mysql2
encoding: utf8
reconnect: false
pool: 5
username: root
password:
#socket: /tmp/mysql.sock
<% else %>
adapter: sqlite3
pool: 5
timeout: 5000
<% end %>
development:
<<: *login
database: db/dummy_dev
test:
<<: *login
database: db/dummy_test
production:
<<: *login
database: db/dummy_prod
|
ef0e099d20ddbc352fe5b1dfb5770fba0a08f5b1
|
metadata/com.nagopy.android.disablemanager2.txt
|
metadata/com.nagopy.android.disablemanager2.txt
|
Categories:System
License:Apache2
Web Site:http://blog.nagopy.com/
Source Code:https://github.com/75py/DisableManager/
Issue Tracker:https://github.com/75py/DisableManager/issues
Auto Name:Disable Manager
Summary:Assists the disabling of pre-installed apps
Description:
Assists in the disabling of pre-installed apps. You can show lists of apps
installed apps and can disable/enable them. Please do at your own risk.
.
Repo Type:git
Repo:https://github.com/75py/DisableManager.git
Build:2.0.2,20002
commit=2.0.2
subdir=app
gradle=yes
rm=uiautomator,libs
prebuild=echo sdk.dir=$$SDK$$ >> ../ViewPagerIndicator/local.properties && \
sed -i -e "/include ':uiautomator'/d" ../settings.gradle
Build:2.0.3,20003
commit=2.0.3
subdir=app
gradle=yes
rm=uiautomator,libs
prebuild=echo sdk.dir=$$SDK$$ >> ../ViewPagerIndicator/local.properties && \
sed -i -e "/include ':uiautomator'/d" ../settings.gradle
Auto Update Mode:Version %v
Update Check Mode:Tags
Current Version:2.0.3
Current Version Code:20003
|
Categories:System
License:Apache2
Web Site:http://blog.nagopy.com/
Source Code:https://github.com/75py/DisableManager/
Issue Tracker:https://github.com/75py/DisableManager/issues
Auto Name:Disable Manager
Summary:Assists the disabling of pre-installed apps
Description:
Assists in the disabling of pre-installed apps. You can show lists of apps
installed apps and can disable/enable them. Please do at your own risk.
.
Repo Type:git
Repo:https://github.com/75py/DisableManager.git
Build:2.0.2,20002
commit=e7ffb0ffaabdcfd98eed404af34639cc21e3f567
subdir=app
gradle=yes
rm=uiautomator,libs
prebuild=echo sdk.dir=$$SDK$$ >> ../ViewPagerIndicator/local.properties && \
sed -i -e "/include ':uiautomator'/d" ../settings.gradle
Build:2.0.3,20003
commit=2.0.3
subdir=app
gradle=yes
rm=uiautomator,libs
prebuild=echo sdk.dir=$$SDK$$ >> ../ViewPagerIndicator/local.properties && \
sed -i -e "/include ':uiautomator'/d" ../settings.gradle
Auto Update Mode:Version %v
Update Check Mode:Tags
Current Version:2.0.3
Current Version Code:20003
|
Disable Manager: Don't change deployed builds.
|
Disable Manager: Don't change deployed builds.
|
Text
|
agpl-3.0
|
f-droid/fdroid-data,f-droid/fdroiddata,f-droid/fdroiddata
|
text
|
## Code Before:
Categories:System
License:Apache2
Web Site:http://blog.nagopy.com/
Source Code:https://github.com/75py/DisableManager/
Issue Tracker:https://github.com/75py/DisableManager/issues
Auto Name:Disable Manager
Summary:Assists the disabling of pre-installed apps
Description:
Assists in the disabling of pre-installed apps. You can show lists of apps
installed apps and can disable/enable them. Please do at your own risk.
.
Repo Type:git
Repo:https://github.com/75py/DisableManager.git
Build:2.0.2,20002
commit=2.0.2
subdir=app
gradle=yes
rm=uiautomator,libs
prebuild=echo sdk.dir=$$SDK$$ >> ../ViewPagerIndicator/local.properties && \
sed -i -e "/include ':uiautomator'/d" ../settings.gradle
Build:2.0.3,20003
commit=2.0.3
subdir=app
gradle=yes
rm=uiautomator,libs
prebuild=echo sdk.dir=$$SDK$$ >> ../ViewPagerIndicator/local.properties && \
sed -i -e "/include ':uiautomator'/d" ../settings.gradle
Auto Update Mode:Version %v
Update Check Mode:Tags
Current Version:2.0.3
Current Version Code:20003
## Instruction:
Disable Manager: Don't change deployed builds.
## Code After:
Categories:System
License:Apache2
Web Site:http://blog.nagopy.com/
Source Code:https://github.com/75py/DisableManager/
Issue Tracker:https://github.com/75py/DisableManager/issues
Auto Name:Disable Manager
Summary:Assists the disabling of pre-installed apps
Description:
Assists in the disabling of pre-installed apps. You can show lists of apps
installed apps and can disable/enable them. Please do at your own risk.
.
Repo Type:git
Repo:https://github.com/75py/DisableManager.git
Build:2.0.2,20002
commit=e7ffb0ffaabdcfd98eed404af34639cc21e3f567
subdir=app
gradle=yes
rm=uiautomator,libs
prebuild=echo sdk.dir=$$SDK$$ >> ../ViewPagerIndicator/local.properties && \
sed -i -e "/include ':uiautomator'/d" ../settings.gradle
Build:2.0.3,20003
commit=2.0.3
subdir=app
gradle=yes
rm=uiautomator,libs
prebuild=echo sdk.dir=$$SDK$$ >> ../ViewPagerIndicator/local.properties && \
sed -i -e "/include ':uiautomator'/d" ../settings.gradle
Auto Update Mode:Version %v
Update Check Mode:Tags
Current Version:2.0.3
Current Version Code:20003
|
9c7fd609d041a75c578e180c2fd46251f8b81dc4
|
packages/core/src/classes/http-request.ts
|
packages/core/src/classes/http-request.ts
|
export class HttpRequest {
params: { [key: string]: any } = {};
body: any = undefined;
query: { [key: string]: any } = {};
constructor(private expressRequest?) {
if (expressRequest) {
this.query = expressRequest.query;
this.params = expressRequest.params;
this.body = expressRequest.body;
}
}
getHeader(field: string): string {
if (this.expressRequest) {
return this.expressRequest.getHeader(field);
}
return field;
}
}
|
import { HttpMethod } from '../interfaces';
export class HttpRequest {
params: { [key: string]: any } = {};
body: any = undefined;
query: { [key: string]: any } = {};
method: HttpMethod = 'GET';
path: string = '';
constructor(private expressRequest?) {
if (expressRequest) {
this.query = expressRequest.query;
this.params = expressRequest.params;
this.body = expressRequest.body;
this.method = expressRequest.method;
this.path = expressRequest.path;
}
}
getHeader(field: string): string {
if (this.expressRequest) {
return this.expressRequest.getHeader(field);
}
return field;
}
}
|
Add path and method to HttpRequest.
|
Add path and method to HttpRequest.
|
TypeScript
|
mit
|
FoalTS/foal,FoalTS/foal,FoalTS/foal,FoalTS/foal
|
typescript
|
## Code Before:
export class HttpRequest {
params: { [key: string]: any } = {};
body: any = undefined;
query: { [key: string]: any } = {};
constructor(private expressRequest?) {
if (expressRequest) {
this.query = expressRequest.query;
this.params = expressRequest.params;
this.body = expressRequest.body;
}
}
getHeader(field: string): string {
if (this.expressRequest) {
return this.expressRequest.getHeader(field);
}
return field;
}
}
## Instruction:
Add path and method to HttpRequest.
## Code After:
import { HttpMethod } from '../interfaces';
export class HttpRequest {
params: { [key: string]: any } = {};
body: any = undefined;
query: { [key: string]: any } = {};
method: HttpMethod = 'GET';
path: string = '';
constructor(private expressRequest?) {
if (expressRequest) {
this.query = expressRequest.query;
this.params = expressRequest.params;
this.body = expressRequest.body;
this.method = expressRequest.method;
this.path = expressRequest.path;
}
}
getHeader(field: string): string {
if (this.expressRequest) {
return this.expressRequest.getHeader(field);
}
return field;
}
}
|
106833059bc2dad8a284de50e153bf673d2e3b4b
|
premis_event_service/urls.py
|
premis_event_service/urls.py
|
from django.conf.urls.defaults import *
urlpatterns = patterns(
'premis_event_service.views',
# begin CODA Family url structure >
(r'^APP/$', 'app'),
# node urls
# (r'^APP/node/$', 'node'),
# (r'^APP/node/(?P<identifier>.+?)/$', 'node'),
# event urls
(r'^APP/event/$', 'app_event'),
(r'^APP/event/(?P<identifier>.+?)/$', 'app_event'),
# agent urls
(r'^APP/agent/$', 'app_agent'),
(r'^APP/agent/(?P<identifier>.+?)/$', 'app_agent'),
# html view urls
(r'^event/$', 'recent_event_list'),
(r'^event/search/$', 'event_search'),
(r'^event/search.json$', 'json_event_search'),
(r'^event/find/(?P<linked_identifier>.+?)/(?P<event_type>.+?)?/$', 'findEvent'),
(r'^event/(?P<identifier>.+?)/$', 'humanEvent'),
(r'^agent/$', 'humanAgent'),
(r'^agent/(?P<identifier>.+?).xml$', 'agentXML'),
(r'^agent/(?P<identifier>.+?).premis.xml$', 'agentXML'),
(r'^agent/(?P<identifier>.+?).json$', 'json_agent'),
(r'^agent/(?P<identifier>.+?)/$', 'humanAgent'),
)
|
try:
from django.conf.urls import patterns, url
except ImportError:
from django.conf.urls.defaults import * # In case of Django<=1.3
urlpatterns = patterns(
'premis_event_service.views',
# begin CODA Family url structure >
(r'^APP/$', 'app'),
# node urls
# (r'^APP/node/$', 'node'),
# (r'^APP/node/(?P<identifier>.+?)/$', 'node'),
# event urls
(r'^APP/event/$', 'app_event'),
(r'^APP/event/(?P<identifier>.+?)/$', 'app_event'),
# agent urls
(r'^APP/agent/$', 'app_agent'),
(r'^APP/agent/(?P<identifier>.+?)/$', 'app_agent'),
# html view urls
(r'^event/$', 'recent_event_list'),
(r'^event/search/$', 'event_search'),
(r'^event/search.json$', 'json_event_search'),
(r'^event/find/(?P<linked_identifier>.+?)/(?P<event_type>.+?)?/$', 'findEvent'),
(r'^event/(?P<identifier>.+?)/$', 'humanEvent'),
(r'^agent/$', 'humanAgent'),
(r'^agent/(?P<identifier>.+?).xml$', 'agentXML'),
(r'^agent/(?P<identifier>.+?).premis.xml$', 'agentXML'),
(r'^agent/(?P<identifier>.+?).json$', 'json_agent'),
(r'^agent/(?P<identifier>.+?)/$', 'humanAgent'),
)
|
Support new and old Django urlconf imports
|
Support new and old Django urlconf imports
|
Python
|
bsd-3-clause
|
unt-libraries/django-premis-event-service,unt-libraries/django-premis-event-service,unt-libraries/django-premis-event-service
|
python
|
## Code Before:
from django.conf.urls.defaults import *
urlpatterns = patterns(
'premis_event_service.views',
# begin CODA Family url structure >
(r'^APP/$', 'app'),
# node urls
# (r'^APP/node/$', 'node'),
# (r'^APP/node/(?P<identifier>.+?)/$', 'node'),
# event urls
(r'^APP/event/$', 'app_event'),
(r'^APP/event/(?P<identifier>.+?)/$', 'app_event'),
# agent urls
(r'^APP/agent/$', 'app_agent'),
(r'^APP/agent/(?P<identifier>.+?)/$', 'app_agent'),
# html view urls
(r'^event/$', 'recent_event_list'),
(r'^event/search/$', 'event_search'),
(r'^event/search.json$', 'json_event_search'),
(r'^event/find/(?P<linked_identifier>.+?)/(?P<event_type>.+?)?/$', 'findEvent'),
(r'^event/(?P<identifier>.+?)/$', 'humanEvent'),
(r'^agent/$', 'humanAgent'),
(r'^agent/(?P<identifier>.+?).xml$', 'agentXML'),
(r'^agent/(?P<identifier>.+?).premis.xml$', 'agentXML'),
(r'^agent/(?P<identifier>.+?).json$', 'json_agent'),
(r'^agent/(?P<identifier>.+?)/$', 'humanAgent'),
)
## Instruction:
Support new and old Django urlconf imports
## Code After:
try:
from django.conf.urls import patterns, url
except ImportError:
from django.conf.urls.defaults import * # In case of Django<=1.3
urlpatterns = patterns(
'premis_event_service.views',
# begin CODA Family url structure >
(r'^APP/$', 'app'),
# node urls
# (r'^APP/node/$', 'node'),
# (r'^APP/node/(?P<identifier>.+?)/$', 'node'),
# event urls
(r'^APP/event/$', 'app_event'),
(r'^APP/event/(?P<identifier>.+?)/$', 'app_event'),
# agent urls
(r'^APP/agent/$', 'app_agent'),
(r'^APP/agent/(?P<identifier>.+?)/$', 'app_agent'),
# html view urls
(r'^event/$', 'recent_event_list'),
(r'^event/search/$', 'event_search'),
(r'^event/search.json$', 'json_event_search'),
(r'^event/find/(?P<linked_identifier>.+?)/(?P<event_type>.+?)?/$', 'findEvent'),
(r'^event/(?P<identifier>.+?)/$', 'humanEvent'),
(r'^agent/$', 'humanAgent'),
(r'^agent/(?P<identifier>.+?).xml$', 'agentXML'),
(r'^agent/(?P<identifier>.+?).premis.xml$', 'agentXML'),
(r'^agent/(?P<identifier>.+?).json$', 'json_agent'),
(r'^agent/(?P<identifier>.+?)/$', 'humanAgent'),
)
|
9ba6b4bff661ebed70e93b7c08d448c17c7f74da
|
README.md
|
README.md
|
This is a PyTorch implementation of Asynchronous Advantage Actor Critic (A3C) from ["Asynchronous Methods for Deep Reinforcement Learning"](https://arxiv.org/pdf/1602.01783v1.pdf).
This implementation is inspired by [Universe Starter Agent](https://github.com/openai/universe-starter-agent).
In contrast to the starter agent, it uses an optimizer with shared statistics as in the original paper.
## Contibutions
Contributions are very welcome. If you know how to make this code better, don't hesitate to send a pull request.
## Usage
```
OMP_NUM_THREADS=1 python main.py --env-name "PongDeterministic-v3" --num-processes 16
```
This code runs evaluation in a separate thread in addition to 16 processes.
## Results
With 16 processes it converges for PongDeterministic-v3 in 15 minutes.

For BreakoutDeterministic-v3 it takes more than several hours.
|
This is a PyTorch implementation of Asynchronous Advantage Actor Critic (A3C) from ["Asynchronous Methods for Deep Reinforcement Learning"](https://arxiv.org/pdf/1602.01783v1.pdf).
This implementation is inspired by [Universe Starter Agent](https://github.com/openai/universe-starter-agent).
In contrast to the starter agent, it uses an optimizer with shared statistics as in the original paper.
## Contibutions
Contributions are very welcome. If you know how to make this code better, don't hesitate to send a pull request.
## Usage
```
OMP_NUM_THREADS=1 python main.py --env-name "PongDeterministic-v3" --num-processes 16
```
This code runs evaluation in a separate thread in addition to 16 processes.
Note:
Install most recent nightly build (version '0.1.10+2fd4d08' or later) of PyTorch via this command to prevent memory leaks:
`
pip install git+https://github.com/pytorch/pytorch
`
## Results
With 16 processes it converges for PongDeterministic-v3 in 15 minutes.

For BreakoutDeterministic-v3 it takes more than several hours.
|
Install recent pytorch version to reduce leak
|
Install recent pytorch version to reduce leak
|
Markdown
|
mit
|
ikostrikov/pytorch-a3c
|
markdown
|
## Code Before:
This is a PyTorch implementation of Asynchronous Advantage Actor Critic (A3C) from ["Asynchronous Methods for Deep Reinforcement Learning"](https://arxiv.org/pdf/1602.01783v1.pdf).
This implementation is inspired by [Universe Starter Agent](https://github.com/openai/universe-starter-agent).
In contrast to the starter agent, it uses an optimizer with shared statistics as in the original paper.
## Contibutions
Contributions are very welcome. If you know how to make this code better, don't hesitate to send a pull request.
## Usage
```
OMP_NUM_THREADS=1 python main.py --env-name "PongDeterministic-v3" --num-processes 16
```
This code runs evaluation in a separate thread in addition to 16 processes.
## Results
With 16 processes it converges for PongDeterministic-v3 in 15 minutes.

For BreakoutDeterministic-v3 it takes more than several hours.
## Instruction:
Install recent pytorch version to reduce leak
## Code After:
This is a PyTorch implementation of Asynchronous Advantage Actor Critic (A3C) from ["Asynchronous Methods for Deep Reinforcement Learning"](https://arxiv.org/pdf/1602.01783v1.pdf).
This implementation is inspired by [Universe Starter Agent](https://github.com/openai/universe-starter-agent).
In contrast to the starter agent, it uses an optimizer with shared statistics as in the original paper.
## Contibutions
Contributions are very welcome. If you know how to make this code better, don't hesitate to send a pull request.
## Usage
```
OMP_NUM_THREADS=1 python main.py --env-name "PongDeterministic-v3" --num-processes 16
```
This code runs evaluation in a separate thread in addition to 16 processes.
Note:
Install most recent nightly build (version '0.1.10+2fd4d08' or later) of PyTorch via this command to prevent memory leaks:
`
pip install git+https://github.com/pytorch/pytorch
`
## Results
With 16 processes it converges for PongDeterministic-v3 in 15 minutes.

For BreakoutDeterministic-v3 it takes more than several hours.
|
56f15da64edcc1e9b6ef8e5c2b0dbc55a3e9c3a9
|
settings.gradle.kts
|
settings.gradle.kts
|
rootProject.name = "SpongeAPI"
pluginManagement {
repositories {
mavenLocal()
mavenCentral()
gradlePluginPortal()
maven("https://repo-new.spongepowered.org/repository/maven-public")
maven("https://repo.spongepowered.org/maven")
}
resolutionStrategy {
eachPlugin {
if (requested.id.id.startsWith("org.spongepowered.gradle.")) {
val version = requested.version ?: "0.11.7-SNAPSHOT"
useModule("org.spongepowered:SpongeGradle:$version")
}
if (requested.id.id.startsWith("net.minecrell.licenser")) {
val vresion = requested.version ?: "0.4.1"
useModule("net.minecrell.licenser:licenser:$version")
}
}
}
}
|
rootProject.name = "SpongeAPI"
pluginManagement {
repositories {
mavenLocal()
mavenCentral()
gradlePluginPortal()
maven("https://repo-new.spongepowered.org/repository/maven-public")
maven("https://repo.spongepowered.org/maven")
}
resolutionStrategy {
eachPlugin {
if (requested.id.id.startsWith("org.spongepowered.gradle.")) {
val version = requested.version ?: "0.11.7-SNAPSHOT"
useModule("org.spongepowered:SpongeGradle:$version")
}
if (requested.id.id == "net.minecrell.licenser") {
val version = requested.version ?: "0.4.1"
useModule("gradle.plugin.net.minecrell:licenser:$version")
}
}
}
}
|
Fix resolving the licenser plugin.
|
Fix resolving the licenser plugin.
|
Kotlin
|
mit
|
SpongePowered/SpongeAPI,SpongePowered/SpongeAPI,SpongePowered/SpongeAPI
|
kotlin
|
## Code Before:
rootProject.name = "SpongeAPI"
pluginManagement {
repositories {
mavenLocal()
mavenCentral()
gradlePluginPortal()
maven("https://repo-new.spongepowered.org/repository/maven-public")
maven("https://repo.spongepowered.org/maven")
}
resolutionStrategy {
eachPlugin {
if (requested.id.id.startsWith("org.spongepowered.gradle.")) {
val version = requested.version ?: "0.11.7-SNAPSHOT"
useModule("org.spongepowered:SpongeGradle:$version")
}
if (requested.id.id.startsWith("net.minecrell.licenser")) {
val vresion = requested.version ?: "0.4.1"
useModule("net.minecrell.licenser:licenser:$version")
}
}
}
}
## Instruction:
Fix resolving the licenser plugin.
## Code After:
rootProject.name = "SpongeAPI"
pluginManagement {
repositories {
mavenLocal()
mavenCentral()
gradlePluginPortal()
maven("https://repo-new.spongepowered.org/repository/maven-public")
maven("https://repo.spongepowered.org/maven")
}
resolutionStrategy {
eachPlugin {
if (requested.id.id.startsWith("org.spongepowered.gradle.")) {
val version = requested.version ?: "0.11.7-SNAPSHOT"
useModule("org.spongepowered:SpongeGradle:$version")
}
if (requested.id.id == "net.minecrell.licenser") {
val version = requested.version ?: "0.4.1"
useModule("gradle.plugin.net.minecrell:licenser:$version")
}
}
}
}
|
34f0ace4f781059be100d74e5f99f905df322e89
|
demo/protected/controllers/SiteController.php
|
demo/protected/controllers/SiteController.php
|
<?php
class SiteController extends Controller
{
public $layout = 'column1';
/**
* Displays the front page.
*/
public function actionIndex()
{
$this->redirect(array('/auth/assignment/index'));
}
/**
* Resets the database for the demo application.
*/
public function actionReset()
{
/* @var $db CDbConnection */
$db = Yii::app()->getComponent('db');
$filename = __DIR__ . DIRECTORY_SEPARATOR . '..' . DIRECTORY_SEPARATOR . 'data' . DIRECTORY_SEPARATOR . 'schema.sql';
if (file_exists($filename))
{
$schema = file_get_contents($filename);
$schema = preg_split("/;\s+/", trim($schema, ';'));
foreach ($schema as $sql)
$db->createCommand($sql)->execute();
}
Yii::app()->user->setFlash('success', 'Demo reset.');
$this->redirect(array('index'));
}
/**
* This is the action to handle external exceptions.
*/
public function actionError()
{
if ($error = Yii::app()->errorHandler->error)
{
if (Yii::app()->request->isAjaxRequest)
echo $error['message'];
else
$this->render('error', $error);
}
}
}
|
<?php
class SiteController extends Controller
{
public $layout = 'column1';
/**
* Displays the front page.
*/
public function actionIndex()
{
$this->redirect(array('/auth/assignment/index'));
}
/**
* Resets the database for the demo application.
*/
public function actionReset()
{
/* @var $db CDbConnection */
$db = Yii::app()->getComponent('db');
$filename = dirname(__FILE__) . DIRECTORY_SEPARATOR . '..' . DIRECTORY_SEPARATOR . 'data' . DIRECTORY_SEPARATOR . 'schema.sql';
if (file_exists($filename))
{
$schema = file_get_contents($filename);
$schema = preg_split("/;\s+/", trim($schema, ';'));
foreach ($schema as $sql)
$db->createCommand($sql)->execute();
}
Yii::app()->user->setFlash('success', 'Demo reset.');
$this->redirect(array('index'));
}
/**
* This is the action to handle external exceptions.
*/
public function actionError()
{
if ($error = Yii::app()->errorHandler->error)
{
if (Yii::app()->request->isAjaxRequest)
echo $error['message'];
else
$this->render('error', $error);
}
}
}
|
Change to use dirname instead of __DIR__
|
Change to use dirname instead of __DIR__
|
PHP
|
bsd-3-clause
|
smartapps-fr/yii-auth,forex-formula/yii-auth,crisu83/yii-auth
|
php
|
## Code Before:
<?php
class SiteController extends Controller
{
public $layout = 'column1';
/**
* Displays the front page.
*/
public function actionIndex()
{
$this->redirect(array('/auth/assignment/index'));
}
/**
* Resets the database for the demo application.
*/
public function actionReset()
{
/* @var $db CDbConnection */
$db = Yii::app()->getComponent('db');
$filename = __DIR__ . DIRECTORY_SEPARATOR . '..' . DIRECTORY_SEPARATOR . 'data' . DIRECTORY_SEPARATOR . 'schema.sql';
if (file_exists($filename))
{
$schema = file_get_contents($filename);
$schema = preg_split("/;\s+/", trim($schema, ';'));
foreach ($schema as $sql)
$db->createCommand($sql)->execute();
}
Yii::app()->user->setFlash('success', 'Demo reset.');
$this->redirect(array('index'));
}
/**
* This is the action to handle external exceptions.
*/
public function actionError()
{
if ($error = Yii::app()->errorHandler->error)
{
if (Yii::app()->request->isAjaxRequest)
echo $error['message'];
else
$this->render('error', $error);
}
}
}
## Instruction:
Change to use dirname instead of __DIR__
## Code After:
<?php
class SiteController extends Controller
{
public $layout = 'column1';
/**
* Displays the front page.
*/
public function actionIndex()
{
$this->redirect(array('/auth/assignment/index'));
}
/**
* Resets the database for the demo application.
*/
public function actionReset()
{
/* @var $db CDbConnection */
$db = Yii::app()->getComponent('db');
$filename = dirname(__FILE__) . DIRECTORY_SEPARATOR . '..' . DIRECTORY_SEPARATOR . 'data' . DIRECTORY_SEPARATOR . 'schema.sql';
if (file_exists($filename))
{
$schema = file_get_contents($filename);
$schema = preg_split("/;\s+/", trim($schema, ';'));
foreach ($schema as $sql)
$db->createCommand($sql)->execute();
}
Yii::app()->user->setFlash('success', 'Demo reset.');
$this->redirect(array('index'));
}
/**
* This is the action to handle external exceptions.
*/
public function actionError()
{
if ($error = Yii::app()->errorHandler->error)
{
if (Yii::app()->request->isAjaxRequest)
echo $error['message'];
else
$this->render('error', $error);
}
}
}
|
75d66dfa4ddbb8bdd7903194feff1b69805f8d0d
|
index.html
|
index.html
|
---
layout: default
---
<div class="jumbotron">
<h1>{{ site.data.config.title | escape }}</h1>
<p>Ride Dallas.</p>
<a class="btn btn-primary btn-lg" href="https://www.facebook.com/fixed.touring" role="button">
<i class="fa fa-facebook"></i> Fixed Touring
</a>
<a class="btn btn-info btn-lg" href="https://twitter.com/fixedtouring" role="button">
<i class="fa fa-twitter"></i> Fixed Touring
</a>
</div>
<div class="list-group">
{% for post in site.posts %}
<a href="{{ post.url | escape }}" class="list-group-item">
<h2>
{% if post.categories contains 'bikes' %}
<i class="fa fa-bicycle"></i>
{% elsif post.categories contains 'rides' %}
<i class="fa fa-map-marker"></i>
{% endif %}
{{ post.title | escape }}
</h2>
<p>
<mark>
{{ post.date | date: '%Y-%m-%d' }}
</mark>
{{ post.content | split: '<!---->' | last | strip_html | strip_newlines | split: ' ' | join: ' ' | truncate: 200 }}
</p>
</a>
{% endfor %}
</div>
|
---
layout: default
---
<div class="jumbotron">
<h1>{{ site.data.config.title | escape }}</h1>
<p>Ride Dallas.</p>
<a class="btn btn-primary btn-lg" href="https://www.facebook.com/fixed.touring" role="button">
<i class="fa fa-facebook"></i> Fixed Touring
</a>
<a class="btn btn-info btn-lg" href="https://twitter.com/fixedtouring" role="button">
<i class="fa fa-twitter"></i> Fixed Touring
</a>
</div>
<div class="list-group">
{% for post in site.posts %}
<a href="{{ post.url | escape }}" class="list-group-item">
<h2>
{{ post.title | escape }}
<small>
{% if post.categories contains 'bikes' %}
<i class="fa fa-bicycle"></i>
{% elsif post.categories contains 'rides' %}
<i class="fa fa-map-marker"></i>
{% endif %}
</small>
</h2>
<p>
<mark>
{{ post.date | date: '%Y-%m-%d' }}
</mark>
{{ post.content | split: '<!---->' | last | strip_html | strip_newlines | split: ' ' | join: ' ' | truncate: 200 }}
</p>
</a>
{% endfor %}
</div>
|
Move icons to other side
|
Move icons to other side
|
HTML
|
mit
|
fixedtouring/fixedtouring.github.io,fixedtouring/fixedtouring.github.io
|
html
|
## Code Before:
---
layout: default
---
<div class="jumbotron">
<h1>{{ site.data.config.title | escape }}</h1>
<p>Ride Dallas.</p>
<a class="btn btn-primary btn-lg" href="https://www.facebook.com/fixed.touring" role="button">
<i class="fa fa-facebook"></i> Fixed Touring
</a>
<a class="btn btn-info btn-lg" href="https://twitter.com/fixedtouring" role="button">
<i class="fa fa-twitter"></i> Fixed Touring
</a>
</div>
<div class="list-group">
{% for post in site.posts %}
<a href="{{ post.url | escape }}" class="list-group-item">
<h2>
{% if post.categories contains 'bikes' %}
<i class="fa fa-bicycle"></i>
{% elsif post.categories contains 'rides' %}
<i class="fa fa-map-marker"></i>
{% endif %}
{{ post.title | escape }}
</h2>
<p>
<mark>
{{ post.date | date: '%Y-%m-%d' }}
</mark>
{{ post.content | split: '<!---->' | last | strip_html | strip_newlines | split: ' ' | join: ' ' | truncate: 200 }}
</p>
</a>
{% endfor %}
</div>
## Instruction:
Move icons to other side
## Code After:
---
layout: default
---
<div class="jumbotron">
<h1>{{ site.data.config.title | escape }}</h1>
<p>Ride Dallas.</p>
<a class="btn btn-primary btn-lg" href="https://www.facebook.com/fixed.touring" role="button">
<i class="fa fa-facebook"></i> Fixed Touring
</a>
<a class="btn btn-info btn-lg" href="https://twitter.com/fixedtouring" role="button">
<i class="fa fa-twitter"></i> Fixed Touring
</a>
</div>
<div class="list-group">
{% for post in site.posts %}
<a href="{{ post.url | escape }}" class="list-group-item">
<h2>
{{ post.title | escape }}
<small>
{% if post.categories contains 'bikes' %}
<i class="fa fa-bicycle"></i>
{% elsif post.categories contains 'rides' %}
<i class="fa fa-map-marker"></i>
{% endif %}
</small>
</h2>
<p>
<mark>
{{ post.date | date: '%Y-%m-%d' }}
</mark>
{{ post.content | split: '<!---->' | last | strip_html | strip_newlines | split: ' ' | join: ' ' | truncate: 200 }}
</p>
</a>
{% endfor %}
</div>
|
7c85e2b278667e7340c7c6bf57c3c0c91210c471
|
coolfig/__init__.py
|
coolfig/__init__.py
|
from .schema import Value, Settings
__version__ = '0.2.0'
__url__ = 'https://github.com/GaretJax/coolfig'
__all__ = ['Value', 'Settings']
|
from .schema import Value, Settings
from .providers import EnvConfig, DictConfig
from .django import load_django_settings
__version__ = '0.2.0'
__url__ = 'https://github.com/GaretJax/coolfig'
__all__ = ['Value', 'Settings', 'EnvConfig', 'DictConfig',
'load_django_settings']
|
Add some more importing shortcuts
|
Add some more importing shortcuts
|
Python
|
mit
|
GaretJax/coolfig
|
python
|
## Code Before:
from .schema import Value, Settings
__version__ = '0.2.0'
__url__ = 'https://github.com/GaretJax/coolfig'
__all__ = ['Value', 'Settings']
## Instruction:
Add some more importing shortcuts
## Code After:
from .schema import Value, Settings
from .providers import EnvConfig, DictConfig
from .django import load_django_settings
__version__ = '0.2.0'
__url__ = 'https://github.com/GaretJax/coolfig'
__all__ = ['Value', 'Settings', 'EnvConfig', 'DictConfig',
'load_django_settings']
|
e9e090d4c79a63f2b27a6d981f71ebec05dab80f
|
packages/gitignore/src/test/ls.test.js
|
packages/gitignore/src/test/ls.test.js
|
import ls from '../ls'
const dir = process.cwd()
describe('ls', () => {
it('lists all files', async () => {
const files = await ls(dir)
expect(files).not.toEqual([])
})
it('ignores given patterns', async () => {
const files = await ls(dir, ['package.json'])
expect(files.indexOf('node_modules/jest')).toEqual(-1)
expect(files.indexOf('package.json')).toEqual(-1)
})
})
|
import ls from '../ls'
import { join } from 'path'
const dir = join(__dirname, '..', '..')
describe('ls', () => {
it('lists all files', async () => {
const files = await ls(dir)
expect(files).not.toEqual([])
})
it('ignores given patterns', async () => {
const files = await ls(dir, ['package.json'])
expect(files.indexOf('node_modules/jest')).toEqual(-1)
expect(files.indexOf('package.json')).toEqual(-1)
})
})
|
Use __dirname instead of process.cwd()
|
[core] Use __dirname instead of process.cwd()
|
JavaScript
|
mit
|
ahmed-taj/handy-gi
|
javascript
|
## Code Before:
import ls from '../ls'
const dir = process.cwd()
describe('ls', () => {
it('lists all files', async () => {
const files = await ls(dir)
expect(files).not.toEqual([])
})
it('ignores given patterns', async () => {
const files = await ls(dir, ['package.json'])
expect(files.indexOf('node_modules/jest')).toEqual(-1)
expect(files.indexOf('package.json')).toEqual(-1)
})
})
## Instruction:
[core] Use __dirname instead of process.cwd()
## Code After:
import ls from '../ls'
import { join } from 'path'
const dir = join(__dirname, '..', '..')
describe('ls', () => {
it('lists all files', async () => {
const files = await ls(dir)
expect(files).not.toEqual([])
})
it('ignores given patterns', async () => {
const files = await ls(dir, ['package.json'])
expect(files.indexOf('node_modules/jest')).toEqual(-1)
expect(files.indexOf('package.json')).toEqual(-1)
})
})
|
bcd76779699d9f18244739db4c86596045e39ff8
|
lib/travis/api/v3/services/repository/activate.rb
|
lib/travis/api/v3/services/repository/activate.rb
|
require 'travis/api/v3/services/repository/deactivate'
module Travis::API::V3
class Services::Repository::Activate < Service
def run!
repository = check_login_and_find(:repository)
check_access(repository)
check_repo_key(repository)
return repo_migrated if migrated?(repository)
admin = access_control.admin_for(repository)
if Travis::Features.user_active?(:use_vcs, admin)
remote_vcs_repository.set_hook(
repository_id: repository.id,
user_id: admin.id
)
else
github(admin).set_hook(repository, true)
end
repository.update_attributes(active: true)
if repository.private? || access_control.enterprise?
if Travis::Features.user_active?(:use_vcs, access_control.user)
remote_vcs_repository.upload_key(
repository_id: repository.id,
user_id: admin.id,
read_only: !Travis::Features.owner_active?(:read_write_github_keys, repository.owner)
)
else
github(admin).upload_key(repository)
end
end
query.sync(access_control.user || access_control.admin_for(repository))
result repository
end
def check_access(repository)
access_control.permissions(repository).activate!
end
def check_repo_key(repository)
raise RepoSshKeyMissing if repository.key.nil?
end
end
end
|
require 'travis/api/v3/services/repository/deactivate'
module Travis::API::V3
class Services::Repository::Activate < Service
def run!
repository = check_login_and_find(:repository)
check_access(repository)
check_repo_key(repository)
return repo_migrated if migrated?(repository)
admin = access_control.admin_for(repository)
if Travis::Features.user_active?(:use_vcs, admin)
remote_vcs_repository.set_hook(
repository_id: repository.id,
user_id: admin.id
)
else
github(admin).set_hook(repository, true)
end
repository.update_attributes(active: true)
if repository.private? || access_control.enterprise?
if Travis::Features.user_active?(:use_vcs, admin)
remote_vcs_repository.upload_key(
repository_id: repository.id,
user_id: admin.id,
read_only: !Travis::Features.owner_active?(:read_write_github_keys, repository.owner)
)
else
github(admin).upload_key(repository)
end
end
query.sync(access_control.user || access_control.admin_for(repository))
result repository
end
def check_access(repository)
access_control.permissions(repository).activate!
end
def check_repo_key(repository)
raise RepoSshKeyMissing if repository.key.nil?
end
end
end
|
Add user vcs_type query param
|
Add user vcs_type query param
|
Ruby
|
mit
|
travis-ci/travis-api,travis-ci/travis-api,travis-ci/travis-api
|
ruby
|
## Code Before:
require 'travis/api/v3/services/repository/deactivate'
module Travis::API::V3
class Services::Repository::Activate < Service
def run!
repository = check_login_and_find(:repository)
check_access(repository)
check_repo_key(repository)
return repo_migrated if migrated?(repository)
admin = access_control.admin_for(repository)
if Travis::Features.user_active?(:use_vcs, admin)
remote_vcs_repository.set_hook(
repository_id: repository.id,
user_id: admin.id
)
else
github(admin).set_hook(repository, true)
end
repository.update_attributes(active: true)
if repository.private? || access_control.enterprise?
if Travis::Features.user_active?(:use_vcs, access_control.user)
remote_vcs_repository.upload_key(
repository_id: repository.id,
user_id: admin.id,
read_only: !Travis::Features.owner_active?(:read_write_github_keys, repository.owner)
)
else
github(admin).upload_key(repository)
end
end
query.sync(access_control.user || access_control.admin_for(repository))
result repository
end
def check_access(repository)
access_control.permissions(repository).activate!
end
def check_repo_key(repository)
raise RepoSshKeyMissing if repository.key.nil?
end
end
end
## Instruction:
Add user vcs_type query param
## Code After:
require 'travis/api/v3/services/repository/deactivate'
module Travis::API::V3
class Services::Repository::Activate < Service
def run!
repository = check_login_and_find(:repository)
check_access(repository)
check_repo_key(repository)
return repo_migrated if migrated?(repository)
admin = access_control.admin_for(repository)
if Travis::Features.user_active?(:use_vcs, admin)
remote_vcs_repository.set_hook(
repository_id: repository.id,
user_id: admin.id
)
else
github(admin).set_hook(repository, true)
end
repository.update_attributes(active: true)
if repository.private? || access_control.enterprise?
if Travis::Features.user_active?(:use_vcs, admin)
remote_vcs_repository.upload_key(
repository_id: repository.id,
user_id: admin.id,
read_only: !Travis::Features.owner_active?(:read_write_github_keys, repository.owner)
)
else
github(admin).upload_key(repository)
end
end
query.sync(access_control.user || access_control.admin_for(repository))
result repository
end
def check_access(repository)
access_control.permissions(repository).activate!
end
def check_repo_key(repository)
raise RepoSshKeyMissing if repository.key.nil?
end
end
end
|
6d1b39acbc439ab38e6a9e18603b5303bcf5d07b
|
functions/man.fish
|
functions/man.fish
|
function man --description 'Format and display manual pages'
set -lx MANPATH $__fish_datadir/man $MANPATH ""
set -q man_blink; and set blink (set_color $man_blink); or set blink (set_color -o red)
set -q man_bold; and set bold (set_color $man_bold); or set bold (set_color -o 5fafd7)
set -q man_standout; and set standout (set_color $man_standout); or set standout (set_color 949494)
set -q man_underline; and set underline (set_color $man_underline); or set underline (set_color -u afafd7)
set end (printf "\e[0m")
set -lx LESS_TERMCAP_mb $blink
set -lx LESS_TERMCAP_md $bold
set -lx LESS_TERMCAP_me $end
set -lx LESS_TERMCAP_so $standout
set -lx LESS_TERMCAP_se $end
set -lx LESS_TERMCAP_us $underline
set -lx LESS_TERMCAP_ue $end
set -lx LESS '-R -s'
set -lx GROFF_NO_SGR yes # fedora
command man $argv
end
|
function man --description 'Format and display manual pages'
set -lx MANPATH $__fish_datadir/man $MANPATH ""
set -q man_blink; and set -l blink (set_color $man_blink); or set -l blink (set_color -o red)
set -q man_bold; and set -l bold (set_color $man_bold); or set -l bold (set_color -o 5fafd7)
set -q man_standout; and set -l standout (set_color $man_standout); or set -l standout (set_color 949494)
set -q man_underline; and set -l underline (set_color $man_underline); or set -l underline (set_color -u afafd7)
set -l end (printf "\e[0m")
set -lx LESS_TERMCAP_mb $blink
set -lx LESS_TERMCAP_md $bold
set -lx LESS_TERMCAP_me $end
set -lx LESS_TERMCAP_so $standout
set -lx LESS_TERMCAP_se $end
set -lx LESS_TERMCAP_us $underline
set -lx LESS_TERMCAP_ue $end
set -lx LESS '-R -s'
set -lx GROFF_NO_SGR yes # fedora
command man $argv
end
|
Add `-l` option to `set`
|
Add `-l` option to `set`
|
fish
|
mit
|
decors/fish-colored-man-pages
|
fish
|
## Code Before:
function man --description 'Format and display manual pages'
set -lx MANPATH $__fish_datadir/man $MANPATH ""
set -q man_blink; and set blink (set_color $man_blink); or set blink (set_color -o red)
set -q man_bold; and set bold (set_color $man_bold); or set bold (set_color -o 5fafd7)
set -q man_standout; and set standout (set_color $man_standout); or set standout (set_color 949494)
set -q man_underline; and set underline (set_color $man_underline); or set underline (set_color -u afafd7)
set end (printf "\e[0m")
set -lx LESS_TERMCAP_mb $blink
set -lx LESS_TERMCAP_md $bold
set -lx LESS_TERMCAP_me $end
set -lx LESS_TERMCAP_so $standout
set -lx LESS_TERMCAP_se $end
set -lx LESS_TERMCAP_us $underline
set -lx LESS_TERMCAP_ue $end
set -lx LESS '-R -s'
set -lx GROFF_NO_SGR yes # fedora
command man $argv
end
## Instruction:
Add `-l` option to `set`
## Code After:
function man --description 'Format and display manual pages'
set -lx MANPATH $__fish_datadir/man $MANPATH ""
set -q man_blink; and set -l blink (set_color $man_blink); or set -l blink (set_color -o red)
set -q man_bold; and set -l bold (set_color $man_bold); or set -l bold (set_color -o 5fafd7)
set -q man_standout; and set -l standout (set_color $man_standout); or set -l standout (set_color 949494)
set -q man_underline; and set -l underline (set_color $man_underline); or set -l underline (set_color -u afafd7)
set -l end (printf "\e[0m")
set -lx LESS_TERMCAP_mb $blink
set -lx LESS_TERMCAP_md $bold
set -lx LESS_TERMCAP_me $end
set -lx LESS_TERMCAP_so $standout
set -lx LESS_TERMCAP_se $end
set -lx LESS_TERMCAP_us $underline
set -lx LESS_TERMCAP_ue $end
set -lx LESS '-R -s'
set -lx GROFF_NO_SGR yes # fedora
command man $argv
end
|
cf0b42464f87240ca6826b70d71a9b70acc2f8ef
|
layouts/partials/article/contribute.html
|
layouts/partials/article/contribute.html
|
{{ if and .IsSection (ne .Page.Params.generated true) }}
<br/>
<footer class="well">
<i class="fa fa-heart text-primary"></i>
Help expand the docs! Add <input id="github-add-name" type="text" value="Your Content" size=18> as a
{{ $File := .File }}
{{ $Site := .Site }}
{{with $File.Path }}
<button class="github-link github-add" data-url="{{ $Site.Params.newURL }}?filename={{ replace $File.Dir "\\" "/" }}TEMPLATE_FILE.md&value={{ partial "template-page" | safeHTMLAttr }}&message=Add page TEMPLATE_NAME under {{ $.Title }}&description=Briefly describe your new page">
<i class="fa fa-file-o"></i>
New Page
</button>
or
<button class="github-link github-add" data-url="{{ $Site.Params.newURL }}TEMPLATE_FILE?filename={{ replace $File.Dir "\\" "/" }}_index.md&value={{ partial "template-section" | safeHTMLAttr }}&message=Add section TEMPLATE_NAME under {{ $.Title }}&description=Briefly describe your new section">
<i class="fa fa-folder-open"></i>
New Section
</button>
{{- end }}
</footer>
{{ end }}
|
{{ if and .IsSection (ne .Page.Params.generated true) }}
<br/>
<footer class="well">
<i class="fa fa-heart text-primary"></i>
Help expand the docs! Add <input id="github-add-name" type="text" value="Your Content" size=18> as a
{{ $File := .File }}
{{ $Site := .Site }}
{{with $File.Path }}
<button class="github-link github-add" data-url="{{- $Site.Params.newURL -}}?filename=content/{{ replace $File.Dir "\\" "/" }}TEMPLATE_FILE.md&value={{ partial "template-page" | safeHTMLAttr }}&message=Add page TEMPLATE_NAME under {{ $.Title }}&description=Briefly describe your new page">
<i class="fa fa-file-o"></i>
New Page
</button>
or
<button class="github-link github-add" data-url="{{- $Site.Params.newURL -}}?filename=content/{{ replace $File.Dir "\\" "/" }}/TEMPLATE_FILE/_index.md&value={{ partial "template-section" | safeHTMLAttr }}&message=Add section TEMPLATE_NAME under {{ $.Title }}&description=Briefly describe your new section">
<i class="fa fa-folder-open"></i>
New Section
</button>
{{- end }}
</footer>
{{ end }}
|
Fix GitHub new page and new section links in Contribute partial
|
Fix GitHub new page and new section links in Contribute partial
|
HTML
|
mit
|
SKuipers/hugo-theme-gibbon,SKuipers/hugo-theme-gibbon
|
html
|
## Code Before:
{{ if and .IsSection (ne .Page.Params.generated true) }}
<br/>
<footer class="well">
<i class="fa fa-heart text-primary"></i>
Help expand the docs! Add <input id="github-add-name" type="text" value="Your Content" size=18> as a
{{ $File := .File }}
{{ $Site := .Site }}
{{with $File.Path }}
<button class="github-link github-add" data-url="{{ $Site.Params.newURL }}?filename={{ replace $File.Dir "\\" "/" }}TEMPLATE_FILE.md&value={{ partial "template-page" | safeHTMLAttr }}&message=Add page TEMPLATE_NAME under {{ $.Title }}&description=Briefly describe your new page">
<i class="fa fa-file-o"></i>
New Page
</button>
or
<button class="github-link github-add" data-url="{{ $Site.Params.newURL }}TEMPLATE_FILE?filename={{ replace $File.Dir "\\" "/" }}_index.md&value={{ partial "template-section" | safeHTMLAttr }}&message=Add section TEMPLATE_NAME under {{ $.Title }}&description=Briefly describe your new section">
<i class="fa fa-folder-open"></i>
New Section
</button>
{{- end }}
</footer>
{{ end }}
## Instruction:
Fix GitHub new page and new section links in Contribute partial
## Code After:
{{ if and .IsSection (ne .Page.Params.generated true) }}
<br/>
<footer class="well">
<i class="fa fa-heart text-primary"></i>
Help expand the docs! Add <input id="github-add-name" type="text" value="Your Content" size=18> as a
{{ $File := .File }}
{{ $Site := .Site }}
{{with $File.Path }}
<button class="github-link github-add" data-url="{{- $Site.Params.newURL -}}?filename=content/{{ replace $File.Dir "\\" "/" }}TEMPLATE_FILE.md&value={{ partial "template-page" | safeHTMLAttr }}&message=Add page TEMPLATE_NAME under {{ $.Title }}&description=Briefly describe your new page">
<i class="fa fa-file-o"></i>
New Page
</button>
or
<button class="github-link github-add" data-url="{{- $Site.Params.newURL -}}?filename=content/{{ replace $File.Dir "\\" "/" }}/TEMPLATE_FILE/_index.md&value={{ partial "template-section" | safeHTMLAttr }}&message=Add section TEMPLATE_NAME under {{ $.Title }}&description=Briefly describe your new section">
<i class="fa fa-folder-open"></i>
New Section
</button>
{{- end }}
</footer>
{{ end }}
|
de7687b4ebcfc9760a13f0b191e3b56857ed6ec9
|
icons/sox_search.svg
|
icons/sox_search.svg
|
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24"><path d="M15.5 14h-.79l-.28-.27C15.41 12.59 16 11.11 16 9.5 16 5.91 13.09 3 9.5 3S3 5.91 3 9.5 5.91 16 9.5 16c1.61 0 3.09-.59 4.23-1.57l.27.28v.79l5 4.99L20.49 19l-4.99-5zm-6 0C7.01 14 5 11.99 5 9.5S7.01 5 9.5 5 14 7.01 14 9.5 11.99 14 9.5 14z"/><path d="M0 0h24v24H0z" fill="none"/></svg>
|
<svg xmlns="http://www.w3.org/2000/svg" class="svg-icon s-input-icon s-input-icon__search iconSearch" aria-hidden="true" viewBox="0 0 18 18" width="18" height="18"><path d="M 18 16.5 l -5.14 -5.18 h -0.35 a 7 7 0 1 0 -1.19 1.19 v 0.35 L 16.5 18 l 1.5 -1.5 Z M 12 7 A 5 5 0 1 1 2 7 a 5 5 0 0 1 10 0 Z" /></svg>
|
Update to SE search icon
|
Update to SE search icon
|
SVG
|
mit
|
soscripted/sox,soscripted/sox
|
svg
|
## Code Before:
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24"><path d="M15.5 14h-.79l-.28-.27C15.41 12.59 16 11.11 16 9.5 16 5.91 13.09 3 9.5 3S3 5.91 3 9.5 5.91 16 9.5 16c1.61 0 3.09-.59 4.23-1.57l.27.28v.79l5 4.99L20.49 19l-4.99-5zm-6 0C7.01 14 5 11.99 5 9.5S7.01 5 9.5 5 14 7.01 14 9.5 11.99 14 9.5 14z"/><path d="M0 0h24v24H0z" fill="none"/></svg>
## Instruction:
Update to SE search icon
## Code After:
<svg xmlns="http://www.w3.org/2000/svg" class="svg-icon s-input-icon s-input-icon__search iconSearch" aria-hidden="true" viewBox="0 0 18 18" width="18" height="18"><path d="M 18 16.5 l -5.14 -5.18 h -0.35 a 7 7 0 1 0 -1.19 1.19 v 0.35 L 16.5 18 l 1.5 -1.5 Z M 12 7 A 5 5 0 1 1 2 7 a 5 5 0 0 1 10 0 Z" /></svg>
|
192def661baa17b4a0ee21ec82bb4efb8f228e42
|
tests/suites/BasicObject/RelationsTest.php
|
tests/suites/BasicObject/RelationsTest.php
|
<?php
class RelationTest extends DatabaseTestCase {
public function testGetOtherModel() {
}
public function testSetOtherModel() {
}
}
|
<?php
class RelationTest extends DatabaseTestCase {
public function testGetOtherModel() {
$m1 = Blueprint::make('Model1');
$m2 = Blueprint::make('Model2');
$m1->model2_id = $m2->id;
$m1->commit();
$m2_ret = $m1->Model2();
$this->assertEquals($m2, $m2_ret);
}
/**
* @depends testGetOtherModel
*/
public function testSetOtherModel() {
$m1 = Blueprint::make('Model1');
$m2 = Blueprint::make('Model2');
$m1->Model2 = $m2;
$this->assertEquals($m2->id, $m1->model2_id);
$this->assertEquals($m2, $m1->Model2());
$m1->commit();
}
}
|
Add test for relation options
|
Add test for relation options
|
PHP
|
mit
|
NitroXy/BasicObject,NitroXy/BasicObject
|
php
|
## Code Before:
<?php
class RelationTest extends DatabaseTestCase {
public function testGetOtherModel() {
}
public function testSetOtherModel() {
}
}
## Instruction:
Add test for relation options
## Code After:
<?php
class RelationTest extends DatabaseTestCase {
public function testGetOtherModel() {
$m1 = Blueprint::make('Model1');
$m2 = Blueprint::make('Model2');
$m1->model2_id = $m2->id;
$m1->commit();
$m2_ret = $m1->Model2();
$this->assertEquals($m2, $m2_ret);
}
/**
* @depends testGetOtherModel
*/
public function testSetOtherModel() {
$m1 = Blueprint::make('Model1');
$m2 = Blueprint::make('Model2');
$m1->Model2 = $m2;
$this->assertEquals($m2->id, $m1->model2_id);
$this->assertEquals($m2, $m1->Model2());
$m1->commit();
}
}
|
7ba8d4fe43f2c2739c632258be76399865cdb7c4
|
app/controllers/styleguide_controller.rb
|
app/controllers/styleguide_controller.rb
|
class StyleguideController < ApplicationController
def show
end
def css
@sections ||= Styleguide.new.sections.inject({}) do |h, (k, v)|
h[k] = StyleguideSectionDecorator.new(v); h
end
end
end
|
class StyleguideController < ApplicationController
def css
@sections ||= Styleguide.new.sections.inject({}) do |h, (k, v)|
h[k] = StyleguideSectionDecorator.new(v); h
end
end
end
|
Remove empty styleguide controller action
|
Remove empty styleguide controller action
|
Ruby
|
mit
|
moneyadviceservice/frontend,moneyadviceservice/frontend,moneyadviceservice/frontend,moneyadviceservice/frontend
|
ruby
|
## Code Before:
class StyleguideController < ApplicationController
def show
end
def css
@sections ||= Styleguide.new.sections.inject({}) do |h, (k, v)|
h[k] = StyleguideSectionDecorator.new(v); h
end
end
end
## Instruction:
Remove empty styleguide controller action
## Code After:
class StyleguideController < ApplicationController
def css
@sections ||= Styleguide.new.sections.inject({}) do |h, (k, v)|
h[k] = StyleguideSectionDecorator.new(v); h
end
end
end
|
53306b3183a306c3c99be99e389ef0377e7ccf1d
|
app/controllers/registrations_controller.rb
|
app/controllers/registrations_controller.rb
|
class RegistrationsController < Devise::RegistrationsController
def after_update_path_for(_resource)
edit_user_registration_path
end
def update_resource(resource, params)
if current_user.provider == 'google_oauth2'
params.delete('current_password')
resource.update_without_password(params)
else
resource.update_with_password(params)
end
end
end
|
class RegistrationsController < Devise::RegistrationsController
protected
def after_update_path_for(_resource)
edit_user_registration_path
end
def update_resource(resource, params)
if current_user.provider == 'google_oauth2'
params.delete('current_password')
resource.update_without_password(params)
else
resource.update_with_password(params)
end
end
end
|
Make methods in RegistrationsController protected
|
Make methods in RegistrationsController protected
|
Ruby
|
agpl-3.0
|
julianguyen/ifme,julianguyen/ifme,julianguyen/ifme,cartothemax/ifme,cartothemax/ifme,cartothemax/ifme,cartothemax/ifme,julianguyen/ifme
|
ruby
|
## Code Before:
class RegistrationsController < Devise::RegistrationsController
def after_update_path_for(_resource)
edit_user_registration_path
end
def update_resource(resource, params)
if current_user.provider == 'google_oauth2'
params.delete('current_password')
resource.update_without_password(params)
else
resource.update_with_password(params)
end
end
end
## Instruction:
Make methods in RegistrationsController protected
## Code After:
class RegistrationsController < Devise::RegistrationsController
protected
def after_update_path_for(_resource)
edit_user_registration_path
end
def update_resource(resource, params)
if current_user.provider == 'google_oauth2'
params.delete('current_password')
resource.update_without_password(params)
else
resource.update_with_password(params)
end
end
end
|
8b0df1c72381b314090935410cf6aa75eb7e2c47
|
setup/create_symlinks.sh
|
setup/create_symlinks.sh
|
source ./setup/header.sh
echo "Creating symlinks..."
# Symlink Atom configuration
mkdir -p ~/.atom
ln -sf "$PWD"/atom/* ~/.atom
# Symlink Bash configuration
ln -snf "$PWD"/terminal/.bashrc ~/.bashrc
ln -snf "$PWD"/terminal/.bashrc ~/.bash_profile
# Symlink custom completions and remove overridden completions
ln -snf "$PWD"/terminal/bash/completions.sh /usr/local/etc/bash_completion.d/dotfiles-completions.sh
rm -f /usr/local/etc/bash_completion.d/brew
rm -f /usr/local/etc/bash_completion.d/npm
# Symlink miscellaneous configuration
ln -snf "$PWD"/terminal/.vimrc ~/.vimrc
ln -snf "$PWD"/git/.gitconfig ~/.gitconfig
# Symlink SSH configuration
mkdir -p ~/.ssh
mkdir -p ~/.ssh/sockets
ln -snf "$PWD"/ssh/ssh_config ~/.ssh/config
mkdir -p ~/.gnupg
ln -snf "$PWD"/gpg/gpg.conf ~/.gnupg/gpg.conf
ln -snf "$PWD"/gpg/gpg-agent.conf ~/.gnupg/gpg-agent.conf
# Disable Bash Sessions feature in OS X El Capitan
touch ~/.bash_sessions_disable
|
source ./setup/header.sh
echo "Creating symlinks..."
# Symlink Atom configuration
mkdir -p ~/.atom
ln -sf "$PWD"/atom/* ~/.atom
# Symlink Bash configuration
ln -snf "$PWD"/terminal/.bashrc ~/.bashrc
ln -snf "$PWD"/terminal/.bashrc ~/.bash_profile
# Symlink custom completions and remove overridden completions
ln -snf "$PWD"/terminal/bash/completions.sh /usr/local/etc/bash_completion.d/dotfiles-completions.sh
rm -f /usr/local/etc/bash_completion.d/brew
rm -f /usr/local/etc/bash_completion.d/npm
# Symlink miscellaneous configuration
ln -snf "$PWD"/terminal/.vimrc ~/.vimrc
ln -snf "$PWD"/git/.gitconfig ~/.gitconfig
# Symlink SSH configuration
mkdir -p ~/.ssh
mkdir -p ~/.ssh/sockets
ln -snf "$PWD"/ssh/ssh_config ~/.ssh/config
mkdir -p ~/.gnupg
ln -sf "$PWD"/gpg/* ~/.gnupg
# Disable Bash Sessions feature in OS X El Capitan
touch ~/.bash_sessions_disable
|
Reduce GPG config symlink creation to one line
|
Reduce GPG config symlink creation to one line
|
Shell
|
mit
|
caleb531/dotfiles,caleb531/dotfiles,caleb531/dotfiles,caleb531/dotfiles
|
shell
|
## Code Before:
source ./setup/header.sh
echo "Creating symlinks..."
# Symlink Atom configuration
mkdir -p ~/.atom
ln -sf "$PWD"/atom/* ~/.atom
# Symlink Bash configuration
ln -snf "$PWD"/terminal/.bashrc ~/.bashrc
ln -snf "$PWD"/terminal/.bashrc ~/.bash_profile
# Symlink custom completions and remove overridden completions
ln -snf "$PWD"/terminal/bash/completions.sh /usr/local/etc/bash_completion.d/dotfiles-completions.sh
rm -f /usr/local/etc/bash_completion.d/brew
rm -f /usr/local/etc/bash_completion.d/npm
# Symlink miscellaneous configuration
ln -snf "$PWD"/terminal/.vimrc ~/.vimrc
ln -snf "$PWD"/git/.gitconfig ~/.gitconfig
# Symlink SSH configuration
mkdir -p ~/.ssh
mkdir -p ~/.ssh/sockets
ln -snf "$PWD"/ssh/ssh_config ~/.ssh/config
mkdir -p ~/.gnupg
ln -snf "$PWD"/gpg/gpg.conf ~/.gnupg/gpg.conf
ln -snf "$PWD"/gpg/gpg-agent.conf ~/.gnupg/gpg-agent.conf
# Disable Bash Sessions feature in OS X El Capitan
touch ~/.bash_sessions_disable
## Instruction:
Reduce GPG config symlink creation to one line
## Code After:
source ./setup/header.sh
echo "Creating symlinks..."
# Symlink Atom configuration
mkdir -p ~/.atom
ln -sf "$PWD"/atom/* ~/.atom
# Symlink Bash configuration
ln -snf "$PWD"/terminal/.bashrc ~/.bashrc
ln -snf "$PWD"/terminal/.bashrc ~/.bash_profile
# Symlink custom completions and remove overridden completions
ln -snf "$PWD"/terminal/bash/completions.sh /usr/local/etc/bash_completion.d/dotfiles-completions.sh
rm -f /usr/local/etc/bash_completion.d/brew
rm -f /usr/local/etc/bash_completion.d/npm
# Symlink miscellaneous configuration
ln -snf "$PWD"/terminal/.vimrc ~/.vimrc
ln -snf "$PWD"/git/.gitconfig ~/.gitconfig
# Symlink SSH configuration
mkdir -p ~/.ssh
mkdir -p ~/.ssh/sockets
ln -snf "$PWD"/ssh/ssh_config ~/.ssh/config
mkdir -p ~/.gnupg
ln -sf "$PWD"/gpg/* ~/.gnupg
# Disable Bash Sessions feature in OS X El Capitan
touch ~/.bash_sessions_disable
|
6ae09e1ca7567bab291b9b114593e7f201c4dfb5
|
terraform/ecs/task-definition.tf
|
terraform/ecs/task-definition.tf
|
resource "aws_ecs_task_definition" "ecs-pace-task-definition" {
family = "pace-task-definition"
container_definitions = <<EOF
[{
"name": "pace",
"image": "lplotni/pace-app",
"cpu": 1024,
"memory": 512,
"logConfiguration": {
"logDriver": "awslogs",
"options": {
"awslogs-region": "eu-central-1",
"awslogs-create-group": "true",
"awslogs-group": "pace-logs"
}
},
"environment": [{
"name": "REDISHOST",
"value": "${var.redis-ip}:6379"
},
{
"name":"DATABASE_URL",
"value": "postgres://root:${var.postgres-password}@${var.postgres-ip}/pacedb"
}],
"portMappings": [{
"containerPort": 3000,
"hostPort": 3000
}],
"essential": true,
"command": ["./node_modules/db-migrate/bin/db-migrate up && ./node_modules//usr/local/bin/npm", "start"]
}
]
EOF
}
|
resource "aws_ecs_task_definition" "ecs-pace-task-definition" {
family = "pace-task-definition"
container_definitions = <<EOF
[{
"name": "pace",
"image": "lplotni/pace-app",
"cpu": 1024,
"memory": 512,
"logConfiguration": {
"logDriver": "awslogs",
"options": {
"awslogs-region": "eu-central-1",
"awslogs-create-group": "true",
"awslogs-group": "pace-logs"
}
},
"environment": [{
"name": "REDISHOST",
"value": "${var.redis-ip}:6379"
},
{
"name":"DATABASE_URL",
"value": "postgres://root:${var.postgres-password}@${var.postgres-ip}/pacedb"
}],
"portMappings": [{
"containerPort": 3000,
"hostPort": 3000
}],
"essential": true,
"command": ["/usr/local/bin/npm", "start"]
}
]
EOF
}
|
Revert "migrate db on startup"
|
Revert "migrate db on startup"
This reverts commit bf73a6690c42ab81d020b6db85b51c456f329b4d.
|
HCL
|
apache-2.0
|
lplotni/pace,lplotni/pace,cz8s/pace,cz8s/pace,lplotni/pace,cz8s/pace
|
hcl
|
## Code Before:
resource "aws_ecs_task_definition" "ecs-pace-task-definition" {
family = "pace-task-definition"
container_definitions = <<EOF
[{
"name": "pace",
"image": "lplotni/pace-app",
"cpu": 1024,
"memory": 512,
"logConfiguration": {
"logDriver": "awslogs",
"options": {
"awslogs-region": "eu-central-1",
"awslogs-create-group": "true",
"awslogs-group": "pace-logs"
}
},
"environment": [{
"name": "REDISHOST",
"value": "${var.redis-ip}:6379"
},
{
"name":"DATABASE_URL",
"value": "postgres://root:${var.postgres-password}@${var.postgres-ip}/pacedb"
}],
"portMappings": [{
"containerPort": 3000,
"hostPort": 3000
}],
"essential": true,
"command": ["./node_modules/db-migrate/bin/db-migrate up && ./node_modules//usr/local/bin/npm", "start"]
}
]
EOF
}
## Instruction:
Revert "migrate db on startup"
This reverts commit bf73a6690c42ab81d020b6db85b51c456f329b4d.
## Code After:
resource "aws_ecs_task_definition" "ecs-pace-task-definition" {
family = "pace-task-definition"
container_definitions = <<EOF
[{
"name": "pace",
"image": "lplotni/pace-app",
"cpu": 1024,
"memory": 512,
"logConfiguration": {
"logDriver": "awslogs",
"options": {
"awslogs-region": "eu-central-1",
"awslogs-create-group": "true",
"awslogs-group": "pace-logs"
}
},
"environment": [{
"name": "REDISHOST",
"value": "${var.redis-ip}:6379"
},
{
"name":"DATABASE_URL",
"value": "postgres://root:${var.postgres-password}@${var.postgres-ip}/pacedb"
}],
"portMappings": [{
"containerPort": 3000,
"hostPort": 3000
}],
"essential": true,
"command": ["/usr/local/bin/npm", "start"]
}
]
EOF
}
|
0cb48b8b7fe8b44e369abf213ffc2909bf978159
|
lib/json/api.rb
|
lib/json/api.rb
|
require "json/api/version"
require 'json'
require 'json-schema'
module JSON
module Api
def self.parse(source, opts={})
json = JSON.parse(source, opts)
validate(json, opts)
json
# rescue
# nil
end
def self.validate(source, opts={})
source = JSON.parse(source) if source.kind_of?(String)
JSON::Validator.validate!('lib/json/api/schema.json', source)
end
end
end
|
require "json/api/version"
require 'json'
require 'json-schema'
module JSON
module Api
def self.parse(source, opts={})
json = JSON.parse(source, opts)
validate(json, opts)
json
# rescue
# nil
end
def self.validate(source, opts={})
source = JSON.parse(source) if source.kind_of?(String)
JSON::Validator.validate!(File.expand_path('api/schema.json', File.dirname(__FILE__)), source)
end
end
end
|
Fix file paths outside gem
|
Fix file paths outside gem
|
Ruby
|
mit
|
json-api/parser-ruby,davidtrogers/parser-ruby,janusnic/parser-ruby
|
ruby
|
## Code Before:
require "json/api/version"
require 'json'
require 'json-schema'
module JSON
module Api
def self.parse(source, opts={})
json = JSON.parse(source, opts)
validate(json, opts)
json
# rescue
# nil
end
def self.validate(source, opts={})
source = JSON.parse(source) if source.kind_of?(String)
JSON::Validator.validate!('lib/json/api/schema.json', source)
end
end
end
## Instruction:
Fix file paths outside gem
## Code After:
require "json/api/version"
require 'json'
require 'json-schema'
module JSON
module Api
def self.parse(source, opts={})
json = JSON.parse(source, opts)
validate(json, opts)
json
# rescue
# nil
end
def self.validate(source, opts={})
source = JSON.parse(source) if source.kind_of?(String)
JSON::Validator.validate!(File.expand_path('api/schema.json', File.dirname(__FILE__)), source)
end
end
end
|
ea30c74f6049849510b8722da4cfbf6e6894e3cb
|
spec/factories/systems.rb
|
spec/factories/systems.rb
|
FactoryGirl.define do
factory :system, class: System do
name 'stack-12345678'
template_parameters '{}'
parameters '{}'
pattern { create(:pattern) }
after(:build) do
System.skip_callback :save, :before, :create_stack
end
after(:create) do
System.set_callback :save, :before, :create_stack, if: -> { status == :NOT_CREATED }
end
before(:create) do |system|
system.add_cloud create(:cloud_aws), 1
end
end
end
|
FactoryGirl.define do
factory :system, class: System do
sequence(:name) { |n| "stack-#{n}" }
template_parameters '{}'
parameters '{}'
pattern { create(:pattern) }
after(:build) do
System.skip_callback :save, :before, :create_stack
end
after(:create) do
System.set_callback :save, :before, :create_stack, if: -> { status == :NOT_CREATED }
end
before(:create) do |system|
system.add_cloud create(:cloud_aws), 1
end
end
end
|
Change the system name to be unique
|
Change the system name to be unique
|
Ruby
|
apache-2.0
|
cloudconductor/cloud_conductor,cloudconductor/cloud_conductor,cloudconductor/cloud_conductor
|
ruby
|
## Code Before:
FactoryGirl.define do
factory :system, class: System do
name 'stack-12345678'
template_parameters '{}'
parameters '{}'
pattern { create(:pattern) }
after(:build) do
System.skip_callback :save, :before, :create_stack
end
after(:create) do
System.set_callback :save, :before, :create_stack, if: -> { status == :NOT_CREATED }
end
before(:create) do |system|
system.add_cloud create(:cloud_aws), 1
end
end
end
## Instruction:
Change the system name to be unique
## Code After:
FactoryGirl.define do
factory :system, class: System do
sequence(:name) { |n| "stack-#{n}" }
template_parameters '{}'
parameters '{}'
pattern { create(:pattern) }
after(:build) do
System.skip_callback :save, :before, :create_stack
end
after(:create) do
System.set_callback :save, :before, :create_stack, if: -> { status == :NOT_CREATED }
end
before(:create) do |system|
system.add_cloud create(:cloud_aws), 1
end
end
end
|
077b35eac4fca4535dc84e9bc636e8d9098c1781
|
scripts/install_sccache.sh
|
scripts/install_sccache.sh
|
set -ex
# https://stackoverflow.com/a/34676160/2489366
# the directory of the script
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
# the temp directory used, within $DIR
# omit the -p parameter to create a temporal directory in the default location
WORK_DIR=`mktemp -d -p "$DIR"`
# check if tmp dir was created
if [[ ! "$WORK_DIR" || ! -d "$WORK_DIR" ]]; then
echo "Could not create temp dir"
exit 1
fi
# deletes the temp directory
function cleanup {
rm -rf "$WORK_DIR"
echo "Deleted temp working directory $WORK_DIR"
}
# register the cleanup function to be called on the EXIT signal
trap cleanup EXIT
if [[ $1 == *"apple"* ]]; then
TARGET=$1
else
TARGET='x86_64-unknown-linux-musl'
fi
VERSION="0.2.10"
SCCACHE_VERSION="sccache-${VERSION}-${TARGET}"
pushd ${WORK_DIR}
curl -L "https://github.com/mozilla/sccache/releases/download/${VERSION}/$SCCACHE_VERSION.tar.gz" | tar -xvz
mv $SCCACHE_VERSION/sccache .
chmod +x ./sccache
mv ./sccache $HOME/bin/
popd
|
set -ex
# https://stackoverflow.com/a/34676160/2489366
# the directory of the script
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
# the temp directory used, within $DIR
# omit the -p parameter to create a temporal directory in the default location
WORK_DIR=`mktemp -d "$DIR.XXXXXXXX"`
# check if tmp dir was created
if [[ ! "$WORK_DIR" || ! -d "$WORK_DIR" ]]; then
echo "Could not create temp dir"
exit 1
fi
# deletes the temp directory
function cleanup {
rm -rf "$WORK_DIR"
echo "Deleted temp working directory $WORK_DIR"
}
# register the cleanup function to be called on the EXIT signal
trap cleanup EXIT
if [[ $1 == *"apple"* ]]; then
TARGET=$1
else
TARGET='x86_64-unknown-linux-musl'
fi
VERSION="0.2.10"
SCCACHE_VERSION="sccache-${VERSION}-${TARGET}"
pushd ${WORK_DIR}
curl -L "https://github.com/mozilla/sccache/releases/download/${VERSION}/$SCCACHE_VERSION.tar.gz" | tar -xvz
mv $SCCACHE_VERSION/sccache .
chmod +x ./sccache
mv ./sccache $HOME/bin/
popd
|
Fix sccache installation on OSX
|
Fix sccache installation on OSX
|
Shell
|
mit
|
gluon-lang/gluon,Marwes/embed_lang,gluon-lang/gluon,Marwes/embed_lang
|
shell
|
## Code Before:
set -ex
# https://stackoverflow.com/a/34676160/2489366
# the directory of the script
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
# the temp directory used, within $DIR
# omit the -p parameter to create a temporal directory in the default location
WORK_DIR=`mktemp -d -p "$DIR"`
# check if tmp dir was created
if [[ ! "$WORK_DIR" || ! -d "$WORK_DIR" ]]; then
echo "Could not create temp dir"
exit 1
fi
# deletes the temp directory
function cleanup {
rm -rf "$WORK_DIR"
echo "Deleted temp working directory $WORK_DIR"
}
# register the cleanup function to be called on the EXIT signal
trap cleanup EXIT
if [[ $1 == *"apple"* ]]; then
TARGET=$1
else
TARGET='x86_64-unknown-linux-musl'
fi
VERSION="0.2.10"
SCCACHE_VERSION="sccache-${VERSION}-${TARGET}"
pushd ${WORK_DIR}
curl -L "https://github.com/mozilla/sccache/releases/download/${VERSION}/$SCCACHE_VERSION.tar.gz" | tar -xvz
mv $SCCACHE_VERSION/sccache .
chmod +x ./sccache
mv ./sccache $HOME/bin/
popd
## Instruction:
Fix sccache installation on OSX
## Code After:
set -ex
# https://stackoverflow.com/a/34676160/2489366
# the directory of the script
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
# the temp directory used, within $DIR
# omit the -p parameter to create a temporal directory in the default location
WORK_DIR=`mktemp -d "$DIR.XXXXXXXX"`
# check if tmp dir was created
if [[ ! "$WORK_DIR" || ! -d "$WORK_DIR" ]]; then
echo "Could not create temp dir"
exit 1
fi
# deletes the temp directory
function cleanup {
rm -rf "$WORK_DIR"
echo "Deleted temp working directory $WORK_DIR"
}
# register the cleanup function to be called on the EXIT signal
trap cleanup EXIT
if [[ $1 == *"apple"* ]]; then
TARGET=$1
else
TARGET='x86_64-unknown-linux-musl'
fi
VERSION="0.2.10"
SCCACHE_VERSION="sccache-${VERSION}-${TARGET}"
pushd ${WORK_DIR}
curl -L "https://github.com/mozilla/sccache/releases/download/${VERSION}/$SCCACHE_VERSION.tar.gz" | tar -xvz
mv $SCCACHE_VERSION/sccache .
chmod +x ./sccache
mv ./sccache $HOME/bin/
popd
|
55112fd0169b941a0d6fab21d4a11ddfcdc0c577
|
unittests/mixer_test.cpp
|
unittests/mixer_test.cpp
|
int main(int argc, char *argv[]) {
warnx("Host execution started");
char* args[] = {argv[0], "../../ROMFS/px4fmu_common/mixers/IO_pass.mix",
"../../ROMFS/px4fmu_common/mixers/FMU_quad_w.mix"};
test_mixer(3, args);
test_conv(1, args);
}
|
int main(int argc, char *argv[]) {
int ret;
warnx("Host execution started");
char* args[] = {argv[0], "../ROMFS/px4fmu_common/mixers/IO_pass.mix",
"../ROMFS/px4fmu_common/mixers/FMU_quad_w.mix"};
if (ret = test_mixer(3, args));
test_conv(1, args);
return 0;
}
|
Improve mixer test, no firm checks yet
|
Improve mixer test, no firm checks yet
|
C++
|
bsd-3-clause
|
jlecoeur/Firmware,PX4/Firmware,jlecoeur/Firmware,mje-nz/PX4-Firmware,PX4/Firmware,jlecoeur/Firmware,mje-nz/PX4-Firmware,acfloria/Firmware,PX4/Firmware,dagar/Firmware,Aerotenna/Firmware,krbeverx/Firmware,acfloria/Firmware,Aerotenna/Firmware,krbeverx/Firmware,mcgill-robotics/Firmware,mcgill-robotics/Firmware,dagar/Firmware,Aerotenna/Firmware,darknight-007/Firmware,krbeverx/Firmware,dagar/Firmware,mje-nz/PX4-Firmware,dagar/Firmware,acfloria/Firmware,mje-nz/PX4-Firmware,mje-nz/PX4-Firmware,jlecoeur/Firmware,darknight-007/Firmware,PX4/Firmware,darknight-007/Firmware,dagar/Firmware,mcgill-robotics/Firmware,acfloria/Firmware,krbeverx/Firmware,krbeverx/Firmware,Aerotenna/Firmware,jlecoeur/Firmware,jlecoeur/Firmware,Aerotenna/Firmware,krbeverx/Firmware,PX4/Firmware,PX4/Firmware,acfloria/Firmware,krbeverx/Firmware,acfloria/Firmware,acfloria/Firmware,mcgill-robotics/Firmware,jlecoeur/Firmware,darknight-007/Firmware,dagar/Firmware,mcgill-robotics/Firmware,Aerotenna/Firmware,mje-nz/PX4-Firmware,mcgill-robotics/Firmware,jlecoeur/Firmware,PX4/Firmware,darknight-007/Firmware,dagar/Firmware,Aerotenna/Firmware,mcgill-robotics/Firmware,mje-nz/PX4-Firmware
|
c++
|
## Code Before:
int main(int argc, char *argv[]) {
warnx("Host execution started");
char* args[] = {argv[0], "../../ROMFS/px4fmu_common/mixers/IO_pass.mix",
"../../ROMFS/px4fmu_common/mixers/FMU_quad_w.mix"};
test_mixer(3, args);
test_conv(1, args);
}
## Instruction:
Improve mixer test, no firm checks yet
## Code After:
int main(int argc, char *argv[]) {
int ret;
warnx("Host execution started");
char* args[] = {argv[0], "../ROMFS/px4fmu_common/mixers/IO_pass.mix",
"../ROMFS/px4fmu_common/mixers/FMU_quad_w.mix"};
if (ret = test_mixer(3, args));
test_conv(1, args);
return 0;
}
|
aa5e3418ffa359197eeea0334982925b4bf38218
|
commands.js
|
commands.js
|
// Commands are called in the following manner:
// commands[command](message, config, msg, ...parameters)
// msg is the message content without the prefix or command
module.exports = {
'ping': (message) => {
message.edit("pong!");
},
// _ denotes arguments we don't care about
'selfbot_off': (message) => {
message.edit("Selfbot logging off.").then(() => {
console.log("Forced to disconnect.");
process.exit(0);
});
},
'reply': (message, _, msg) => {
let arr = msg.split(/\.s\.(.+)/)
message.edit(
`\`\`\`css\n> ${arr[0]}\`\`\`${arr[1]}`
);
},
'shrug': (message, _, msg) => {
message.edit(msg + "¯\\_(ツ)_\/¯");
},
'hideyourshame': (message, config, msg, n=0) => {
msg = msg.slice(n.length + 1),
n = parseInt(n) + 1;
message.channel.fetchMessages( {limit: 99} )
.then((messages) => {
messages.array()
.filter(m => m.author.id === config.soupmaster)
.slice(0,n)
.forEach(message => message.delete());
});
}
}
|
// Commands are called in the following manner:
// commands[command](message, config, msg, ...parameters)
// msg is the message content without the prefix or command
module.exports = {
'ping': (message) => {
message.edit("pong!");
},
// _ denotes arguments we don't care about
'selfbot_off': (message) => {
message.edit("Selfbot logging off.").then(() => {
console.log("Forced to disconnect.");
process.exit(0);
});
},
'reply': (message, _, msg) => {
let arr = msg.split(/\.s\.(.+)/)
message.edit(
`\`\`\`css\n> ${arr[0]}\`\`\`${arr[1]}`
);
},
'shrug': (message, _, msg) => {
message.edit(msg + "¯\\_(ツ)_\/¯");
},
'hideyourshame': (message, config, _, n=0) => {
n = parseInt(n) + 1;
message.channel.fetchMessages( {limit: 99} )
.then((messages) => {
messages.array()
.filter(m => m.author.id === config.soupmaster)
.slice(0,n)
.forEach(message => message.delete());
});
}
}
|
Remove unnecessary msg use in hideyourshame
|
Remove unnecessary msg use in hideyourshame
|
JavaScript
|
unlicense
|
Rafer45/Warhol
|
javascript
|
## Code Before:
// Commands are called in the following manner:
// commands[command](message, config, msg, ...parameters)
// msg is the message content without the prefix or command
module.exports = {
'ping': (message) => {
message.edit("pong!");
},
// _ denotes arguments we don't care about
'selfbot_off': (message) => {
message.edit("Selfbot logging off.").then(() => {
console.log("Forced to disconnect.");
process.exit(0);
});
},
'reply': (message, _, msg) => {
let arr = msg.split(/\.s\.(.+)/)
message.edit(
`\`\`\`css\n> ${arr[0]}\`\`\`${arr[1]}`
);
},
'shrug': (message, _, msg) => {
message.edit(msg + "¯\\_(ツ)_\/¯");
},
'hideyourshame': (message, config, msg, n=0) => {
msg = msg.slice(n.length + 1),
n = parseInt(n) + 1;
message.channel.fetchMessages( {limit: 99} )
.then((messages) => {
messages.array()
.filter(m => m.author.id === config.soupmaster)
.slice(0,n)
.forEach(message => message.delete());
});
}
}
## Instruction:
Remove unnecessary msg use in hideyourshame
## Code After:
// Commands are called in the following manner:
// commands[command](message, config, msg, ...parameters)
// msg is the message content without the prefix or command
module.exports = {
'ping': (message) => {
message.edit("pong!");
},
// _ denotes arguments we don't care about
'selfbot_off': (message) => {
message.edit("Selfbot logging off.").then(() => {
console.log("Forced to disconnect.");
process.exit(0);
});
},
'reply': (message, _, msg) => {
let arr = msg.split(/\.s\.(.+)/)
message.edit(
`\`\`\`css\n> ${arr[0]}\`\`\`${arr[1]}`
);
},
'shrug': (message, _, msg) => {
message.edit(msg + "¯\\_(ツ)_\/¯");
},
'hideyourshame': (message, config, _, n=0) => {
n = parseInt(n) + 1;
message.channel.fetchMessages( {limit: 99} )
.then((messages) => {
messages.array()
.filter(m => m.author.id === config.soupmaster)
.slice(0,n)
.forEach(message => message.delete());
});
}
}
|
cab7f80534c70cbd3afe77af8868bd656112a2f6
|
.travis.yml
|
.travis.yml
|
language: node_js
node_js:
- 0.8
|
language: node_js
node_js:
- 0.8
language: ruby
rvm:
- 1.8.7
install:
- gem update --system
- gem install sass
|
Update Travis file with Ruby and Sass info
|
Update Travis file with Ruby and Sass info
|
YAML
|
mit
|
freeyiyi1993/grunt-contrib-sass,kentendo/grunt-contrib-sass,cvrebert/grunt-contrib-sass,gruntjs/grunt-contrib-sass,konder/grunt-contrib-sass,linemanjs/grunt-contrib-sass,liquidmetal/grunt-contrib-sass,vladikoff/grunt-contrib-sass,craigweston/grunt-contrib-sass,cvrebert/grunt-contrib-sass,matsumos/grunt-slim
|
yaml
|
## Code Before:
language: node_js
node_js:
- 0.8
## Instruction:
Update Travis file with Ruby and Sass info
## Code After:
language: node_js
node_js:
- 0.8
language: ruby
rvm:
- 1.8.7
install:
- gem update --system
- gem install sass
|
e58853dfdef416ee98ff7a032415f47799be7f6d
|
install-salt.sh
|
install-salt.sh
|
apt-get update
apt-get install -y python-software-properties
add-apt-repository -y ppa:saltstack/salt
apt-get update
apt-get install -y salt-minion
sed -i 's/#master: salt/master: '$MASTERIP'/g' /etc/salt/minion
service salt-minion restart
exit 0
|
apt-get update
echo 'deb http://repo.saltstack.com/apt/ubuntu/14.04/amd64/latest trusty main' > /etc/apt/sources.list.d/saltstack.list
curl https://repo.saltstack.com/apt/ubuntu/14.04/amd64/latest/SALTSTACK-GPG-KEY.pub | sudo apt-key add -
apt-get update
apt-get install -y salt-minion
sed -i 's/#master: salt/master: '$MASTERIP'/g' /etc/salt/minion
service salt-minion restart
exit 0
|
Update the minion installer to use the real repository.
|
Update the minion installer to use the real repository.
|
Shell
|
mit
|
ZacharyDuBois/Random-Scripts
|
shell
|
## Code Before:
apt-get update
apt-get install -y python-software-properties
add-apt-repository -y ppa:saltstack/salt
apt-get update
apt-get install -y salt-minion
sed -i 's/#master: salt/master: '$MASTERIP'/g' /etc/salt/minion
service salt-minion restart
exit 0
## Instruction:
Update the minion installer to use the real repository.
## Code After:
apt-get update
echo 'deb http://repo.saltstack.com/apt/ubuntu/14.04/amd64/latest trusty main' > /etc/apt/sources.list.d/saltstack.list
curl https://repo.saltstack.com/apt/ubuntu/14.04/amd64/latest/SALTSTACK-GPG-KEY.pub | sudo apt-key add -
apt-get update
apt-get install -y salt-minion
sed -i 's/#master: salt/master: '$MASTERIP'/g' /etc/salt/minion
service salt-minion restart
exit 0
|
f6fedc4284a2b85b9acfdbe3b0d7b48293ac0df5
|
README.md
|
README.md
|
SheetMusicViewer
================
Simple sheet music PDF viewer implemented in Qt (and thus for Linux, Embedded Linux, Android, Windows, probably Mac...)
Build instructions
------------------
Clone the repository using the git command line utility (or a tool of your choice). Make sure the submodules are (recursively) cloned as well:
git clone https://github.com/felixhaedicke/SheetMusicViewer.git
git submodule update --init --recursive
For the mupdf subproject, some files need to be generated:
cd mupdf
make generate
cd ..
Generate icon png files:
rsvg-convert icon.svg -w 48 -h 48 -o icon-48.png
rsvg-convert icon.svg -w 72 -h 72 -o icon-72.png
rsvg-convert icon.svg -w 96 -h 96 -o icon-96.png
Build SheetMusicViewer using qmake (or an IDE of your choice):
qmake
At least when compiling for Android, you can get errors like this:
mupdf/fitz/memento.h:182:1: error: unknown type name 'size_t'
To fix it, you need to apply a patch on mupdf:
cd mupdf
patch -Np1 -i ../mupdf-stddef.diff
cd ..
|
SheetMusicViewer
================
Simple sheet music PDF viewer implemented in Qt (and thus for Linux, Embedded Linux, Android, Windows, probably Mac...)
Build instructions
------------------
Clone the repository using the git command line utility (or a tool of your choice). Make sure the submodules are (recursively) cloned as well:
git clone https://github.com/felixhaedicke/SheetMusicViewer.git
git submodule update --init --recursive
For the mupdf subproject, some files need to be generated:
cd mupdf
make generate
cd ..
or alternatively, when building with MSVC on Windows:
cd mupdf/win32
generate.bat
cd ../..
Generate icon png files:
rsvg-convert icon.svg -w 48 -h 48 -o icon-48.png
rsvg-convert icon.svg -w 72 -h 72 -o icon-72.png
rsvg-convert icon.svg -w 96 -h 96 -o icon-96.png
Build SheetMusicViewer using qmake (or an IDE of your choice):
qmake
At least when compiling for Android, you can get errors like this:
mupdf/fitz/memento.h:182:1: error: unknown type name 'size_t'
To fix it, you need to apply a patch on mupdf:
cd mupdf
patch -Np1 -i ../mupdf-stddef.diff
cd ..
|
Add mupdf file generation hint for Windows / MSVC
|
Add mupdf file generation hint for Windows / MSVC
|
Markdown
|
agpl-3.0
|
felixhaedicke/SheetMusicViewer
|
markdown
|
## Code Before:
SheetMusicViewer
================
Simple sheet music PDF viewer implemented in Qt (and thus for Linux, Embedded Linux, Android, Windows, probably Mac...)
Build instructions
------------------
Clone the repository using the git command line utility (or a tool of your choice). Make sure the submodules are (recursively) cloned as well:
git clone https://github.com/felixhaedicke/SheetMusicViewer.git
git submodule update --init --recursive
For the mupdf subproject, some files need to be generated:
cd mupdf
make generate
cd ..
Generate icon png files:
rsvg-convert icon.svg -w 48 -h 48 -o icon-48.png
rsvg-convert icon.svg -w 72 -h 72 -o icon-72.png
rsvg-convert icon.svg -w 96 -h 96 -o icon-96.png
Build SheetMusicViewer using qmake (or an IDE of your choice):
qmake
At least when compiling for Android, you can get errors like this:
mupdf/fitz/memento.h:182:1: error: unknown type name 'size_t'
To fix it, you need to apply a patch on mupdf:
cd mupdf
patch -Np1 -i ../mupdf-stddef.diff
cd ..
## Instruction:
Add mupdf file generation hint for Windows / MSVC
## Code After:
SheetMusicViewer
================
Simple sheet music PDF viewer implemented in Qt (and thus for Linux, Embedded Linux, Android, Windows, probably Mac...)
Build instructions
------------------
Clone the repository using the git command line utility (or a tool of your choice). Make sure the submodules are (recursively) cloned as well:
git clone https://github.com/felixhaedicke/SheetMusicViewer.git
git submodule update --init --recursive
For the mupdf subproject, some files need to be generated:
cd mupdf
make generate
cd ..
or alternatively, when building with MSVC on Windows:
cd mupdf/win32
generate.bat
cd ../..
Generate icon png files:
rsvg-convert icon.svg -w 48 -h 48 -o icon-48.png
rsvg-convert icon.svg -w 72 -h 72 -o icon-72.png
rsvg-convert icon.svg -w 96 -h 96 -o icon-96.png
Build SheetMusicViewer using qmake (or an IDE of your choice):
qmake
At least when compiling for Android, you can get errors like this:
mupdf/fitz/memento.h:182:1: error: unknown type name 'size_t'
To fix it, you need to apply a patch on mupdf:
cd mupdf
patch -Np1 -i ../mupdf-stddef.diff
cd ..
|
81fecc378da0a39ea87b75be0323b028349de792
|
app/views/downloads/download_linux.html.erb
|
app/views/downloads/download_linux.html.erb
|
<%- @section = "downloads" %>
<%- @subsection = "" %>
<div id="main">
<h1>Download for Linux and Unix</h1>
<p>It is easiest to install Git on Linux using the preferred package manager of your Linux distribution.</p>
<h3>Debian/Ubuntu</h3>
<code>$ apt-get install git</code>
<h3>Fedora</h3>
<code>$ yum install git</code> (up to Fedora 21)<br>
<code>$ dnf install git</code> (Fedora 22 and later)
<h3>Gentoo</h3>
<code>$ emerge --ask --verbose dev-vcs/git</code>
<h3>Arch Linux</h3>
<code>$ pacman -S git</code>
<h3>openSUSE</h3>
<code>$ zypper install git</code>
<h3>FreeBSD</h3>
<code>$ cd /usr/ports/devel/git</code><br><code>$ make install</code>
<h3>Solaris 11 Express</h3>
<code>$ pkg install developer/versioning/git</code>
<h3>OpenBSD</h3>
<code>$ pkg_add git</code>
</div>
|
<%- @section = "downloads" %>
<%- @subsection = "" %>
<div id="main">
<h1>Download for Linux and Unix</h1>
<p>It is easiest to install Git on Linux using the preferred package manager of your Linux distribution.</p>
<h3>Debian/Ubuntu</h3>
<code>$ apt-get install git</code>
<h3>Fedora</h3>
<code>$ yum install git</code> (up to Fedora 21)<br>
<code>$ dnf install git</code> (Fedora 22 and later)
<h3>Gentoo</h3>
<code>$ emerge --ask --verbose dev-vcs/git</code>
<h3>Arch Linux</h3>
<code>$ pacman -S git</code>
<h3>openSUSE</h3>
<code>$ zypper install git</code>
<h3>FreeBSD</h3>
<code>$ cd /usr/ports/devel/git</code><br><code>$ make install</code>
<h3>Solaris 11 Express</h3>
<code>$ pkg install developer/versioning/git</code>
<h3>OpenBSD</h3>
<code>$ pkg_add git</code>
<h3>Slitaz</h3>
<code>$ tazpkg get-install git</code>
</div>
|
Add install instructions for Slitaz
|
Add install instructions for Slitaz
|
HTML+ERB
|
mit
|
Mokolea/git-scm.com,mosoft521/gitscm-next,jasonlong/git-scm.com,mosoft521/gitscm-next,git/git-scm.com,git/git-scm.com,jasonlong/git-scm.com,jasonlong/git-scm.com,Mokolea/git-scm.com,Mokolea/git-scm.com,git/git-scm.com,git/git-scm.com,mosoft521/gitscm-next,mosoft521/gitscm-next,Mokolea/git-scm.com,jasonlong/git-scm.com
|
html+erb
|
## Code Before:
<%- @section = "downloads" %>
<%- @subsection = "" %>
<div id="main">
<h1>Download for Linux and Unix</h1>
<p>It is easiest to install Git on Linux using the preferred package manager of your Linux distribution.</p>
<h3>Debian/Ubuntu</h3>
<code>$ apt-get install git</code>
<h3>Fedora</h3>
<code>$ yum install git</code> (up to Fedora 21)<br>
<code>$ dnf install git</code> (Fedora 22 and later)
<h3>Gentoo</h3>
<code>$ emerge --ask --verbose dev-vcs/git</code>
<h3>Arch Linux</h3>
<code>$ pacman -S git</code>
<h3>openSUSE</h3>
<code>$ zypper install git</code>
<h3>FreeBSD</h3>
<code>$ cd /usr/ports/devel/git</code><br><code>$ make install</code>
<h3>Solaris 11 Express</h3>
<code>$ pkg install developer/versioning/git</code>
<h3>OpenBSD</h3>
<code>$ pkg_add git</code>
</div>
## Instruction:
Add install instructions for Slitaz
## Code After:
<%- @section = "downloads" %>
<%- @subsection = "" %>
<div id="main">
<h1>Download for Linux and Unix</h1>
<p>It is easiest to install Git on Linux using the preferred package manager of your Linux distribution.</p>
<h3>Debian/Ubuntu</h3>
<code>$ apt-get install git</code>
<h3>Fedora</h3>
<code>$ yum install git</code> (up to Fedora 21)<br>
<code>$ dnf install git</code> (Fedora 22 and later)
<h3>Gentoo</h3>
<code>$ emerge --ask --verbose dev-vcs/git</code>
<h3>Arch Linux</h3>
<code>$ pacman -S git</code>
<h3>openSUSE</h3>
<code>$ zypper install git</code>
<h3>FreeBSD</h3>
<code>$ cd /usr/ports/devel/git</code><br><code>$ make install</code>
<h3>Solaris 11 Express</h3>
<code>$ pkg install developer/versioning/git</code>
<h3>OpenBSD</h3>
<code>$ pkg_add git</code>
<h3>Slitaz</h3>
<code>$ tazpkg get-install git</code>
</div>
|
4cb96edfec89f243aea063bb09ebdfb8b173fc3c
|
src/app/core/errorhandler/error-handler.service.ts
|
src/app/core/errorhandler/error-handler.service.ts
|
import {Injectable, ErrorHandler} from '@angular/core';
import {Response} from "@angular/http";
import {Observable} from "rxjs";
@Injectable()
export class ErrorHandlerService implements ErrorHandler {
constructor() { }
/*
* @description: handler for http-request catch-clauses
*/
handleError(error: Response | any) {
let errorMessage: string;
if (error instanceof Response) {
const body = error.json() || '';
const err = body.error || JSON.stringify(body);
errorMessage = `${error.status} - ${error.statusText || ''} ${err}`;
} else {
errorMessage = error.message ? error.message : error.toString();
}
return Observable.throw(errorMessage);
}
}
|
import {Injectable, ErrorHandler} from '@angular/core';
import {Response} from "@angular/http";
import {Observable} from "rxjs";
@Injectable()
export class ErrorHandlerService implements ErrorHandler {
constructor() { }
/*
* @description: handler for http-request catch-clauses
*/
handleError(error: Response | any) {
let errorMessage: string;
if (error instanceof Response) {
//const body = error.json() || '';
//const err = body.error || JSON.stringify(body);
const err = error.text() || '';
errorMessage = `${error.status} - ${error.statusText || ''} (${err})`;
} else {
errorMessage = error.message ? error.message : error.toString();
}
return Observable.throw(errorMessage);
}
}
|
Handle errors as plain text
|
Handle errors as plain text
|
TypeScript
|
mit
|
chipster/chipster-web,chipster/chipster-web,chipster/chipster-web
|
typescript
|
## Code Before:
import {Injectable, ErrorHandler} from '@angular/core';
import {Response} from "@angular/http";
import {Observable} from "rxjs";
@Injectable()
export class ErrorHandlerService implements ErrorHandler {
constructor() { }
/*
* @description: handler for http-request catch-clauses
*/
handleError(error: Response | any) {
let errorMessage: string;
if (error instanceof Response) {
const body = error.json() || '';
const err = body.error || JSON.stringify(body);
errorMessage = `${error.status} - ${error.statusText || ''} ${err}`;
} else {
errorMessage = error.message ? error.message : error.toString();
}
return Observable.throw(errorMessage);
}
}
## Instruction:
Handle errors as plain text
## Code After:
import {Injectable, ErrorHandler} from '@angular/core';
import {Response} from "@angular/http";
import {Observable} from "rxjs";
@Injectable()
export class ErrorHandlerService implements ErrorHandler {
constructor() { }
/*
* @description: handler for http-request catch-clauses
*/
handleError(error: Response | any) {
let errorMessage: string;
if (error instanceof Response) {
//const body = error.json() || '';
//const err = body.error || JSON.stringify(body);
const err = error.text() || '';
errorMessage = `${error.status} - ${error.statusText || ''} (${err})`;
} else {
errorMessage = error.message ? error.message : error.toString();
}
return Observable.throw(errorMessage);
}
}
|
e95097a7580946db105802ed283d821b1c61186a
|
templates/blocks/home_slider.html
|
templates/blocks/home_slider.html
|
<dl class="tabs">
<dd><a href="#simple1">Geographical Map</a></dd>
<dd><a href="#simple2">List of committees</a></dd>
<dd><a href="#simple3">European Parliament hemicycle</a></dd>
</dl>
<ul class="tabs-content">
<li id="simple1Tab">
{% include "map-eu.html" %}
</li>
<li id="simple2Tab">
{% include "hemicycle-eu.html" %}
</li>
<li id="simple3Tab">
{% include "committees-eu.html" %}
</li>
</ul>
|
<dl class="tabs">
<dd class="active"><a href="#simple1">Geographical Map</a></dd>
<dd><a href="#simple2">List of committees</a></dd>
<dd><a href="#simple3">European Parliament hemicycle</a></dd>
</dl>
<ul class="tabs-content">
<li class="active" id="simple1Tab">
{% include "map-eu.html" %}
</li>
<li id="simple2Tab">
{% include "committees-eu.html" %}
</li>
<li id="simple3Tab">
{% include "hemicycle-eu.html" %}
</li>
</ul>
|
Add a default tab on home page (still a bug on the third tab...)
|
[fix] Add a default tab on home page (still a bug on the third tab...)
|
HTML
|
agpl-3.0
|
yohanboniface/memopol-core,yohanboniface/memopol-core,yohanboniface/memopol-core
|
html
|
## Code Before:
<dl class="tabs">
<dd><a href="#simple1">Geographical Map</a></dd>
<dd><a href="#simple2">List of committees</a></dd>
<dd><a href="#simple3">European Parliament hemicycle</a></dd>
</dl>
<ul class="tabs-content">
<li id="simple1Tab">
{% include "map-eu.html" %}
</li>
<li id="simple2Tab">
{% include "hemicycle-eu.html" %}
</li>
<li id="simple3Tab">
{% include "committees-eu.html" %}
</li>
</ul>
## Instruction:
[fix] Add a default tab on home page (still a bug on the third tab...)
## Code After:
<dl class="tabs">
<dd class="active"><a href="#simple1">Geographical Map</a></dd>
<dd><a href="#simple2">List of committees</a></dd>
<dd><a href="#simple3">European Parliament hemicycle</a></dd>
</dl>
<ul class="tabs-content">
<li class="active" id="simple1Tab">
{% include "map-eu.html" %}
</li>
<li id="simple2Tab">
{% include "committees-eu.html" %}
</li>
<li id="simple3Tab">
{% include "hemicycle-eu.html" %}
</li>
</ul>
|
b04d0e863c4387c147ef8e9e7d067f3b0e302148
|
src/app/views/sessions/session/session.component.less
|
src/app/views/sessions/session/session.component.less
|
.session-view {
position: fixed;
width: 100%;
height: calc(~"100vh - 85px");
}
.close-well-button {
position: relative;
right: -10px;
top: -15px;
}
.split-1 {
padding-left: 15px;
}
.split-2 {
padding-right: 15px;
padding-bottom: 15px;
}
:host ::ng-deep split-gutter {
background-color: white !important;
}
|
.session-view {
position: fixed;
width: 100%;
height: calc(~"100vh - 85px");
}
.close-well-button {
position: relative;
right: -10px;
top: -15px;
}
.split-1 {
padding-left: 15px;
}
.split-2 {
padding-right: 15px;
}
:host ::ng-deep split-gutter {
background-color: white !important;
}
|
Remove bottom padding from split2
|
Remove bottom padding from split2
|
Less
|
mit
|
chipster/chipster-web,chipster/chipster-web,chipster/chipster-web
|
less
|
## Code Before:
.session-view {
position: fixed;
width: 100%;
height: calc(~"100vh - 85px");
}
.close-well-button {
position: relative;
right: -10px;
top: -15px;
}
.split-1 {
padding-left: 15px;
}
.split-2 {
padding-right: 15px;
padding-bottom: 15px;
}
:host ::ng-deep split-gutter {
background-color: white !important;
}
## Instruction:
Remove bottom padding from split2
## Code After:
.session-view {
position: fixed;
width: 100%;
height: calc(~"100vh - 85px");
}
.close-well-button {
position: relative;
right: -10px;
top: -15px;
}
.split-1 {
padding-left: 15px;
}
.split-2 {
padding-right: 15px;
}
:host ::ng-deep split-gutter {
background-color: white !important;
}
|
30a4281f2602bd6b9d90d89375785a2645854a0d
|
enthought/enable2/pyglet_backend/pyglet_app.py
|
enthought/enable2/pyglet_backend/pyglet_app.py
|
from enthought.enable.pyglet_backend.pyglet_app import *
|
__all__ = ["get_app", "PygletApp"]
from enthought.enable.pyglet_backend.pyglet_app import *
# Import the objects which are not declared in __all__,
# but are still defined in the real module, such that people
# can import them explicitly when needed, just as they could
# with the real module.
#
# It is unlikely that someone will import these objects, since
# they start with '_'. However, the proxy's job is to mimic the
# behavior of the real module as closely as possible.
# The proxy's job is not to define or change the API.
#
from enthought.enable.pyglet_backend.pyglet_app import _CurrentApp, _PygletApp
|
Improve the proxy module which maps to a module which uses __all__.
|
Improve the proxy module which maps to a module which uses __all__.
The notes I made in the code apply to all proxy modules which map
to a module which uses __all__.
|
Python
|
bsd-3-clause
|
tommy-u/enable,tommy-u/enable,tommy-u/enable,tommy-u/enable
|
python
|
## Code Before:
from enthought.enable.pyglet_backend.pyglet_app import *
## Instruction:
Improve the proxy module which maps to a module which uses __all__.
The notes I made in the code apply to all proxy modules which map
to a module which uses __all__.
## Code After:
__all__ = ["get_app", "PygletApp"]
from enthought.enable.pyglet_backend.pyglet_app import *
# Import the objects which are not declared in __all__,
# but are still defined in the real module, such that people
# can import them explicitly when needed, just as they could
# with the real module.
#
# It is unlikely that someone will import these objects, since
# they start with '_'. However, the proxy's job is to mimic the
# behavior of the real module as closely as possible.
# The proxy's job is not to define or change the API.
#
from enthought.enable.pyglet_backend.pyglet_app import _CurrentApp, _PygletApp
|
e5fe61936977bbad43dfeabb56df6da768ac3580
|
app/views/welcome/index.html.erb
|
app/views/welcome/index.html.erb
|
<br />
<p>
<%=t('hyrax.product_name') %> is a secure application that enables
<%=t('hyrax.institution.name') %> to efficiently carry out varied
and complex workflows related to digitization while adhereing to standards
and best practices which ensure that our content will be preserved and
accessible to our community and the world for generations.
</p>
<%= render partial: 'welcome/work_types', locals: { label: 'Books and Manuscripts',
types: [ScannedResource, MultiVolumeWork] } %>
<%= render partial: 'welcome/work_types', locals: { label: 'Maps and Geospatial Data',
types: [ImageWork, RasterWork, VectorWork] } %>
<%= render partial: 'welcome/work_types', locals: { label: 'Collections',
types: [Collection] } %>
|
<br />
<p>
<%=t('hyrax.product_name') %> is a secure application that enables
<%=t('hyrax.institution.name') %> to efficiently carry out varied
and complex workflows related to digitization while adhereing to standards
and best practices which ensure that our content will be preserved and
accessible to our community and the world for generations.
</p>
<%= render partial: 'welcome/work_types', locals: { label: 'Books, Manuscripts, and Scanned Documents',
types: [ScannedResource, MultiVolumeWork] } %>
<%= render partial: 'welcome/work_types', locals: { label: 'Maps and Geospatial Data',
types: [ImageWork, RasterWork, VectorWork] } %>
<%= render partial: 'welcome/work_types', locals: { label: 'Collections',
types: [Collection] } %>
|
Add Scanned Documents to scanned resource and multi-volume work heading on welcome page
|
Add Scanned Documents to scanned resource and multi-volume work heading on welcome page
|
HTML+ERB
|
apache-2.0
|
pulibrary/plum,pulibrary/plum,pulibrary/plum,pulibrary/plum
|
html+erb
|
## Code Before:
<br />
<p>
<%=t('hyrax.product_name') %> is a secure application that enables
<%=t('hyrax.institution.name') %> to efficiently carry out varied
and complex workflows related to digitization while adhereing to standards
and best practices which ensure that our content will be preserved and
accessible to our community and the world for generations.
</p>
<%= render partial: 'welcome/work_types', locals: { label: 'Books and Manuscripts',
types: [ScannedResource, MultiVolumeWork] } %>
<%= render partial: 'welcome/work_types', locals: { label: 'Maps and Geospatial Data',
types: [ImageWork, RasterWork, VectorWork] } %>
<%= render partial: 'welcome/work_types', locals: { label: 'Collections',
types: [Collection] } %>
## Instruction:
Add Scanned Documents to scanned resource and multi-volume work heading on welcome page
## Code After:
<br />
<p>
<%=t('hyrax.product_name') %> is a secure application that enables
<%=t('hyrax.institution.name') %> to efficiently carry out varied
and complex workflows related to digitization while adhereing to standards
and best practices which ensure that our content will be preserved and
accessible to our community and the world for generations.
</p>
<%= render partial: 'welcome/work_types', locals: { label: 'Books, Manuscripts, and Scanned Documents',
types: [ScannedResource, MultiVolumeWork] } %>
<%= render partial: 'welcome/work_types', locals: { label: 'Maps and Geospatial Data',
types: [ImageWork, RasterWork, VectorWork] } %>
<%= render partial: 'welcome/work_types', locals: { label: 'Collections',
types: [Collection] } %>
|
14778ffe45dfa5fe89914f1161ada0e7f20a8f43
|
Source/CoreData/Entities/DPerk.swift
|
Source/CoreData/Entities/DPerk.swift
|
import Foundation
extension DPerk
{
}
|
import Foundation
extension DPerk
{
var type:DPerkType
{
get
{
guard
let type:DPerkType = DPerkType(
rawValue:rawType)
else
{
return DPerkType.error
}
return type
}
set(newValue)
{
rawType = newValue.rawValue
}
}
}
|
Add computed property for perk type
|
Add computed property for perk type
|
Swift
|
mit
|
velvetroom/columbus
|
swift
|
## Code Before:
import Foundation
extension DPerk
{
}
## Instruction:
Add computed property for perk type
## Code After:
import Foundation
extension DPerk
{
var type:DPerkType
{
get
{
guard
let type:DPerkType = DPerkType(
rawValue:rawType)
else
{
return DPerkType.error
}
return type
}
set(newValue)
{
rawType = newValue.rawValue
}
}
}
|
644660b6c41f029f271a0b8866387f358f8fdf54
|
frappe/patches/v4_0/enable_scheduler_in_system_settings.py
|
frappe/patches/v4_0/enable_scheduler_in_system_settings.py
|
from __future__ import unicode_literals
import frappe
from frappe.utils.scheduler import disable_scheduler, enable_scheduler
def execute():
frappe.reload_doc("core", "doctype", "system_settings")
if frappe.db.get_global("disable_scheduler"):
disable_scheduler()
else:
enable_scheduler()
|
from __future__ import unicode_literals
import frappe
from frappe.utils.scheduler import disable_scheduler, enable_scheduler
from frappe.utils import cint
def execute():
frappe.reload_doc("core", "doctype", "system_settings")
if cint(frappe.db.get_global("disable_scheduler")):
disable_scheduler()
else:
enable_scheduler()
|
Fix in enable scheduler patch
|
Fix in enable scheduler patch
|
Python
|
mit
|
BhupeshGupta/frappe,letzerp/framework,saurabh6790/frappe,rmehta/frappe,elba7r/builder,suyashphadtare/sajil-frappe,rohitw1991/frappe,saguas/frappe,indictranstech/tele-frappe,nerevu/frappe,indictranstech/omnitech-frappe,vCentre/vFRP-6233,gangadharkadam/saloon_frappe,aboganas/frappe,indictranstech/phr-frappe,gangadharkadam/letzfrappe,hatwar/buyback-frappe,mbauskar/tele-frappe,erpletzerp/letzerpcore,Amber-Creative/amber-frappe,indictranstech/ebuy-now-frappe,elba7r/frameworking,vjFaLk/frappe,shitolepriya/test-frappe,mbauskar/Das_frappe,frappe/frappe,gangadharkadam/v4_frappe,gangadhar-kadam/helpdesk-frappe,vCentre/vFRP-6233,RicardoJohann/frappe,rohitw1991/frappe,ESS-LLP/frappe,nerevu/frappe,gangadharkadam/stfrappe,sbktechnology/trufil-frappe,rohitwaghchaure/frappe_smart,aboganas/frappe,StrellaGroup/frappe,manassolanki/frappe,drukhil/frappe,sbktechnology/sap_frappe,rmehta/frappe,indictranstech/Das_frappe,letzerp/framework,erpletzerp/letzerpcore,MaxMorais/frappe,drukhil/frappe,rohitwaghchaure/frappe-alec,ashokrajbathu/secondrep,hernad/frappe,jevonearth/frappe,suyashphadtare/propshikhari-frappe,BhupeshGupta/frappe,gangadharkadam/v4_frappe,indictranstech/fbd_frappe,gangadharkadam/tailorfrappe,hernad/frappe,shitolepriya/test-frappe,bcornwellmott/frappe,gangadhar-kadam/lgnlvefrape,sbkolate/sap_frappe_v6,indautgrp/frappe,indictranstech/frappe,mbauskar/omnitech-frappe,indictranstech/frappe,praba230890/frappe,gangadharkadam/v6_frappe,indictranstech/osmosis-frappe,pawaranand/phr-frappe,saurabh6790/frappe,rohitwaghchaure/frappe_smart,gangadharkadam/v4_frappe,indictranstech/osmosis-frappe,gangadharkadam/v6_frappe,gangadhar-kadam/verve_live_frappe,adityahase/frappe,bcornwellmott/frappe,almeidapaulopt/frappe,indictranstech/frappe-digitales,indictranstech/reciphergroup-frappe,mbauskar/omnitech-frappe,gangadharkadam/saloon_frappe,gangadhar-kadam/laganfrappe,gangadharkadam/vervefrappe,neilLasrado/frappe,mbauskar/frappe,gangadhar-kadam/verve_live_frappe,MaxMorais/frappe,saurabh6790/test-frappe,rohitw1991/smarttailorfrappe,StrellaGroup/frappe,gangadharkadam/vervefrappe,rohitwaghchaure/frappe-digitales,elba7r/builder,mbauskar/helpdesk-frappe,deveninfotech/deven-frappe,paurosello/frappe,mbauskar/frappe,adityahase/frappe,gangadhar-kadam/lgnlvefrape,indictranstech/trufil-frappe,indictranstech/omnitech-frappe,saurabh6790/test-frappe,ESS-LLP/frappe,RicardoJohann/frappe,pombredanne/frappe,mbauskar/phr-frappe,tundebabzy/frappe,tmimori/frappe,paurosello/frappe,anandpdoshi/frappe,neilLasrado/frappe,maxtorete/frappe,gangadharkadam/frappecontribution,gangadharkadam/frappecontribution,nerevu/frappe,maxtorete/frappe,Tejal011089/digitales_frappe,pawaranand/phr-frappe,saguas/frappe,mbauskar/omnitech-demo-frappe,sbkolate/sap_frappe_v6,sbktechnology/sap_frappe,indictranstech/trufil-frappe,gangadhar-kadam/smrterpfrappe,suyashphadtare/sajil-final-frappe,indictranstech/internal-frappe,pawaranand/phr-frappe,saguas/frappe,erpletzerp/letzerpcore,praba230890/frappe,Amber-Creative/amber-frappe,gangadhar-kadam/lgnlvefrape,indictranstech/osmosis-frappe,ShashaQin/frappe,indautgrp/frappe,gangadharkadam/saloon_frappe,indictranstech/phr-frappe,indictranstech/omnitech-frappe,suyashphadtare/propshikhari-frappe,frappe/frappe,nerevu/frappe,hatwar/buyback-frappe,sbktechnology/trufil-frappe,mbauskar/tele-frappe,aboganas/frappe,indictranstech/frappe-digitales,gangadhar-kadam/helpdesk-frappe,vjFaLk/frappe,almeidapaulopt/frappe,StrellaGroup/frappe,mbauskar/phr-frappe,mbauskar/helpdesk-frappe,indictranstech/frappe-digitales,tundebabzy/frappe,rohitwaghchaure/New_Theme_frappe,indictranstech/trufil-frappe,sbkolate/sap_frappe_v6,hernad/frappe,suyashphadtare/propshikhari-frappe,gangadharkadam/vlinkfrappe,geo-poland/frappe,indictranstech/omnitech-frappe,gangadhar-kadam/smrterpfrappe,gangadhar-kadam/verve_test_frappe,gangadhar-kadam/verve_test_frappe,rohitwaghchaure/frappe,gangadharkadam/office_frappe,gangadharkadam/letzfrappe,chdecultot/frappe,gangadharkadam/letzfrappe,gangadharkadam/saloon_frappe_install,indautgrp/frappe,MaxMorais/frappe,maxtorete/frappe,mhbu50/frappe,vCentre/vFRP-6233,chdecultot/frappe,almeidapaulopt/frappe,indictranstech/tele-frappe,jevonearth/frappe,hatwar/buyback-frappe,gangadharkadam/letzfrappe,mbauskar/Das_frappe,gangadharkadam/v6_frappe,saurabh6790/phr-frappe,gangadhar-kadam/verve_live_frappe,gangadharkadam/stfrappe,indictranstech/Das_frappe,gangadhar-kadam/helpdesk-frappe,mbauskar/phr-frappe,indictranstech/phr-frappe,aboganas/frappe,gangadharkadam/saloon_frappe_install,indictranstech/frappe,rohitwaghchaure/frappe-alec,paurosello/frappe,saurabh6790/phr-frappe,mbauskar/tele-frappe,mbauskar/omnitech-demo-frappe,bohlian/frappe,gangadhar-kadam/verve_frappe,paurosello/frappe,BhupeshGupta/frappe,ESS-LLP/frappe,mhbu50/frappe,chdecultot/frappe,rohitwaghchaure/frappe-alec,gangadhar-kadam/helpdesk-frappe,indictranstech/fbd_frappe,pranalik/frappe-bb,geo-poland/frappe,Tejal011089/digitales_frappe,rohitwaghchaure/vestasi-frappe,mhbu50/frappe,gangadharkadam/shfr,pombredanne/frappe,bohlian/frappe,pawaranand/phr_frappe,gangadhar-kadam/laganfrappe,bohlian/frappe,manassolanki/frappe,indictranstech/trufil-frappe,maxtorete/frappe,jevonearth/frappe,mbauskar/Das_frappe,rohitwaghchaure/frappe-digitales,tmimori/frappe,vqw/frappe,vjFaLk/frappe,saurabh6790/test-frappe,indictranstech/osmosis-frappe,mbauskar/helpdesk-frappe,pombredanne/frappe,vqw/frappe,bohlian/frappe,tmimori/frappe,letzerp/framework,ashokrajbathu/secondrep,geo-poland/frappe,RicardoJohann/frappe,manassolanki/frappe,bcornwellmott/frappe,gangadhar-kadam/verve_frappe,hatwar/buyback-frappe,pranalik/frappe-bb,gangadhar-kadam/verve_test_frappe,yashodhank/frappe,neilLasrado/frappe,gangadharkadam/smrtfrappe,rohitwaghchaure/vestasi-frappe,shitolepriya/test-frappe,indictranstech/ebuy-now-frappe,elba7r/builder,gangadharkadam/office_frappe,indictranstech/tele-frappe,gangadharkadam/johnfrappe,ShashaQin/frappe,indictranstech/reciphergroup-frappe,gangadharkadam/smrtfrappe,adityahase/frappe,vqw/frappe,reachalpineswift/frappe-bench,gangadharkadam/tailorfrappe,rohitwaghchaure/New_Theme_frappe,mbauskar/omnitech-demo-frappe,BhupeshGupta/frappe,gangadharkadam/v5_frappe,gangadharkadam/saloon_frappe_install,hernad/frappe,pawaranand/phr_frappe,indictranstech/tele-frappe,indictranstech/frappe-digitales,gangadharkadam/johnfrappe,gangadharkadam/frappecontribution,anandpdoshi/frappe,rohitwaghchaure/frappe-digitales,gangadhar-kadam/laganfrappe,shitolepriya/test-frappe,deveninfotech/deven-frappe,erpletzerp/letzerpcore,rmehta/frappe,pranalik/frappe-bb,chdecultot/frappe,indictranstech/reciphergroup-frappe,saurabh6790/phr-frappe,mbauskar/helpdesk-frappe,mbauskar/omnitech-frappe,indictranstech/ebuy-now-frappe,gangadharkadam/vervefrappe,indictranstech/Das_frappe,gangadhar-kadam/verve_frappe,vjFaLk/frappe,RicardoJohann/frappe,anandpdoshi/frappe,rohitwaghchaure/frappe,gangadharkadam/shfr,indictranstech/internal-frappe,indictranstech/internal-frappe,praba230890/frappe,gangadharkadam/v5_frappe,rohitwaghchaure/vestasi-frappe,mbauskar/frappe,suyashphadtare/propshikhari-frappe,reachalpineswift/frappe-bench,yashodhank/frappe,indictranstech/reciphergroup-frappe,rohitwaghchaure/vestasi-frappe,indautgrp/frappe,drukhil/frappe,gangadhar-kadam/verve_test_frappe,adityahase/frappe,saurabh6790/phr-frappe,praba230890/frappe,sbktechnology/sap_frappe,elba7r/builder,rmehta/frappe,manassolanki/frappe,gangadharkadam/vlinkfrappe,ShashaQin/frappe,mbauskar/omnitech-demo-frappe,pombredanne/frappe,pawaranand/phr_frappe,tmimori/frappe,deveninfotech/deven-frappe,rohitw1991/smartfrappe,gangadharkadam/v4_frappe,gangadharkadam/vervefrappe,rohitwaghchaure/New_Theme_frappe,MaxMorais/frappe,gangadharkadam/vlinkfrappe,almeidapaulopt/frappe,mhbu50/frappe,saurabh6790/frappe,indictranstech/frappe,neilLasrado/frappe,rohitwaghchaure/frappe,pranalik/frappe-bb,gangadharkadam/v6_frappe,drukhil/frappe,suyashphadtare/sajil-frappe,yashodhank/frappe,vCentre/vFRP-6233,saurabh6790/frappe,elba7r/frameworking,indictranstech/ebuy-now-frappe,indictranstech/fbd_frappe,ShashaQin/frappe,sbkolate/sap_frappe_v6,rohitwaghchaure/frappe,reachalpineswift/frappe-bench,suyashphadtare/sajil-final-frappe,mbauskar/omnitech-frappe,ashokrajbathu/secondrep,saguas/frappe,bcornwellmott/frappe,Tejal011089/digitales_frappe,letzerp/framework,gangadharkadam/v5_frappe,ESS-LLP/frappe,deveninfotech/deven-frappe,suyashphadtare/sajil-final-frappe,saurabh6790/test-frappe,anandpdoshi/frappe,frappe/frappe,mbauskar/Das_frappe,gangadhar-kadam/verve_frappe,jevonearth/frappe,gangadhar-kadam/verve_live_frappe,reachalpineswift/frappe-bench,mbauskar/tele-frappe,sbktechnology/sap_frappe,rohitw1991/smartfrappe,elba7r/frameworking,mbauskar/phr-frappe,Tejal011089/digitales_frappe,rohitwaghchaure/frappe-digitales,gangadharkadam/v5_frappe,elba7r/frameworking,gangadharkadam/frappecontribution,indictranstech/fbd_frappe,indictranstech/phr-frappe,tundebabzy/frappe,suyashphadtare/sajil-frappe,gangadharkadam/vlinkfrappe,Amber-Creative/amber-frappe,tundebabzy/frappe,indictranstech/internal-frappe,gangadharkadam/office_frappe,mbauskar/frappe,Amber-Creative/amber-frappe,indictranstech/Das_frappe,rohitw1991/smarttailorfrappe,vqw/frappe,sbktechnology/trufil-frappe,sbktechnology/trufil-frappe,yashodhank/frappe,ashokrajbathu/secondrep,pawaranand/phr_frappe,gangadharkadam/saloon_frappe_install,gangadharkadam/saloon_frappe
|
python
|
## Code Before:
from __future__ import unicode_literals
import frappe
from frappe.utils.scheduler import disable_scheduler, enable_scheduler
def execute():
frappe.reload_doc("core", "doctype", "system_settings")
if frappe.db.get_global("disable_scheduler"):
disable_scheduler()
else:
enable_scheduler()
## Instruction:
Fix in enable scheduler patch
## Code After:
from __future__ import unicode_literals
import frappe
from frappe.utils.scheduler import disable_scheduler, enable_scheduler
from frappe.utils import cint
def execute():
frappe.reload_doc("core", "doctype", "system_settings")
if cint(frappe.db.get_global("disable_scheduler")):
disable_scheduler()
else:
enable_scheduler()
|
2d8a3b8c9ca6196317758e58cefc76163b88607f
|
falcom/table.py
|
falcom/table.py
|
class Table:
class InputStrContainsCarriageReturn (RuntimeError):
pass
def __init__ (self, tab_separated_text = None):
if tab_separated_text and "\r" in tab_separated_text:
raise self.InputStrContainsCarriageReturn
self.text = tab_separated_text
if self.text:
self.text = self.text.rstrip("\n")
@property
def rows (self):
return len(self)
@property
def cols (self):
return len(self.text.split("\n")[0].split("\t")) if self.text else 0
def __len__ (self):
return len(self.text.split("\n")) if self.text else 0
def __iter__ (self):
if self.text:
for row in self.text.split("\n"):
yield(tuple(row.split("\t")))
else:
return iter(())
def __getitem__ (self, key):
if self.text:
return tuple(self.text.split("\n")[key].split("\t"))
else:
raise IndexError
def __repr__ (self):
return "<{} {}>".format(self.__class__.__name__,
repr(self.text))
|
class Table:
class InputStrContainsCarriageReturn (RuntimeError):
pass
def __init__ (self, tab_separated_text = None):
if tab_separated_text:
if "\r" in tab_separated_text:
raise self.InputStrContainsCarriageReturn
self.text = tab_separated_text.rstrip("\n")
else:
self.text = tab_separated_text
@property
def rows (self):
return len(self)
@property
def cols (self):
return len(self.text.split("\n")[0].split("\t")) if self.text else 0
def __len__ (self):
return len(self.text.split("\n")) if self.text else 0
def __iter__ (self):
if self.text:
for row in self.text.split("\n"):
yield(tuple(row.split("\t")))
else:
return iter(())
def __getitem__ (self, key):
if self.text:
return tuple(self.text.split("\n")[key].split("\t"))
else:
raise IndexError
def __repr__ (self):
return "<{} {}>".format(self.__class__.__name__,
repr(self.text))
|
Move assignments in Table.__init__ around
|
Move assignments in Table.__init__ around
|
Python
|
bsd-3-clause
|
mlibrary/image-conversion-and-validation,mlibrary/image-conversion-and-validation
|
python
|
## Code Before:
class Table:
class InputStrContainsCarriageReturn (RuntimeError):
pass
def __init__ (self, tab_separated_text = None):
if tab_separated_text and "\r" in tab_separated_text:
raise self.InputStrContainsCarriageReturn
self.text = tab_separated_text
if self.text:
self.text = self.text.rstrip("\n")
@property
def rows (self):
return len(self)
@property
def cols (self):
return len(self.text.split("\n")[0].split("\t")) if self.text else 0
def __len__ (self):
return len(self.text.split("\n")) if self.text else 0
def __iter__ (self):
if self.text:
for row in self.text.split("\n"):
yield(tuple(row.split("\t")))
else:
return iter(())
def __getitem__ (self, key):
if self.text:
return tuple(self.text.split("\n")[key].split("\t"))
else:
raise IndexError
def __repr__ (self):
return "<{} {}>".format(self.__class__.__name__,
repr(self.text))
## Instruction:
Move assignments in Table.__init__ around
## Code After:
class Table:
class InputStrContainsCarriageReturn (RuntimeError):
pass
def __init__ (self, tab_separated_text = None):
if tab_separated_text:
if "\r" in tab_separated_text:
raise self.InputStrContainsCarriageReturn
self.text = tab_separated_text.rstrip("\n")
else:
self.text = tab_separated_text
@property
def rows (self):
return len(self)
@property
def cols (self):
return len(self.text.split("\n")[0].split("\t")) if self.text else 0
def __len__ (self):
return len(self.text.split("\n")) if self.text else 0
def __iter__ (self):
if self.text:
for row in self.text.split("\n"):
yield(tuple(row.split("\t")))
else:
return iter(())
def __getitem__ (self, key):
if self.text:
return tuple(self.text.split("\n")[key].split("\t"))
else:
raise IndexError
def __repr__ (self):
return "<{} {}>".format(self.__class__.__name__,
repr(self.text))
|
972a4760f10299cb2a5e2e7969665e4665ff3bcc
|
deploy/GetFiles.ps1
|
deploy/GetFiles.ps1
|
$binPath = "..\src\RoslynPad\bin\Release"
$exclude =
@(
"Xceed.Wpf.AvalonDock.Themes.Aero.dll",
"Xceed.Wpf.AvalonDock.Themes.Metro.dll",
"Xceed.Wpf.AvalonDock.Themes.VS2010.dll",
"Xceed.Wpf.DataGrid.dll"
);
$files = get-childitem "$location\$binPath\*.dll" | select -ExpandProperty Name | where { $exclude -notcontains $_ }
$files +=
@(
"RoslynPad.exe",
"RoslynPad.Host32.exe",
"RoslynPad.Host64.exe",
"RoslynPad.exe.config"
)
|
$binPath = "..\src\RoslynPad\bin\Release"
$exclude =
@(
"Xceed.Wpf.AvalonDock.Themes.Aero.dll",
"Xceed.Wpf.AvalonDock.Themes.Metro.dll",
"Xceed.Wpf.AvalonDock.Themes.VS2010.dll",
"Xceed.Wpf.DataGrid.dll"
);
$files = get-childitem "$location\$binPath\*.dll" | select -ExpandProperty Name | where { $exclude -notcontains $_ }
$files +=
@(
"RoslynPad.exe",
"RoslynPad.Host32.exe",
"RoslynPad.Host64.exe",
"RoslynPad.exe.config",
"RoslynPad.Host32.exe.config",
"RoslynPad.Host64.exe.config"
)
|
Add config files to deployment
|
Add config files to deployment
|
PowerShell
|
apache-2.0
|
aelij/roslynpad
|
powershell
|
## Code Before:
$binPath = "..\src\RoslynPad\bin\Release"
$exclude =
@(
"Xceed.Wpf.AvalonDock.Themes.Aero.dll",
"Xceed.Wpf.AvalonDock.Themes.Metro.dll",
"Xceed.Wpf.AvalonDock.Themes.VS2010.dll",
"Xceed.Wpf.DataGrid.dll"
);
$files = get-childitem "$location\$binPath\*.dll" | select -ExpandProperty Name | where { $exclude -notcontains $_ }
$files +=
@(
"RoslynPad.exe",
"RoslynPad.Host32.exe",
"RoslynPad.Host64.exe",
"RoslynPad.exe.config"
)
## Instruction:
Add config files to deployment
## Code After:
$binPath = "..\src\RoslynPad\bin\Release"
$exclude =
@(
"Xceed.Wpf.AvalonDock.Themes.Aero.dll",
"Xceed.Wpf.AvalonDock.Themes.Metro.dll",
"Xceed.Wpf.AvalonDock.Themes.VS2010.dll",
"Xceed.Wpf.DataGrid.dll"
);
$files = get-childitem "$location\$binPath\*.dll" | select -ExpandProperty Name | where { $exclude -notcontains $_ }
$files +=
@(
"RoslynPad.exe",
"RoslynPad.Host32.exe",
"RoslynPad.Host64.exe",
"RoslynPad.exe.config",
"RoslynPad.Host32.exe.config",
"RoslynPad.Host64.exe.config"
)
|
0ce14be170e09530b225f2f7526ad68ee1758095
|
peering/migrations/0027_auto_20190105_1600.py
|
peering/migrations/0027_auto_20190105_1600.py
|
import django.contrib.postgres.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
(
"peering",
"0026_autonomoussystem_potential_internet_exchange_peering_sessions",
)
]
operations = [
migrations.AlterField(
model_name="autonomoussystem",
name="potential_internet_exchange_peering_sessions",
field=django.contrib.postgres.fields.ArrayField(
base_field=models.GenericIPAddressField(),
blank=True,
default=list,
size=None,
),
)
]
|
import django.contrib.postgres.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
(
"peering",
"0026_autonomoussystem_potential_internet_exchange_peering_sessions",
)
]
def forwards_func(apps, schema_editor):
AutonomousSystem = apps.get_model("peering", "AutonomousSystem")
db_alias = schema_editor.connection.alias
AutonomousSystem.objects.using(db_alias).filter(
potential_internet_exchange_peering_sessions=None
).update(potential_internet_exchange_peering_sessions=[])
def reverse_func(apps, schema_editor):
AutonomousSystem = apps.get_model("peering", "AutonomousSystem")
db_alias = schema_editor.connection.alias
AutonomousSystem.objects.using(db_alias).filter(
potential_internet_exchange_peering_sessions=[]
).update(potential_internet_exchange_peering_sessions=None)
operations = [
migrations.AlterField(
model_name="autonomoussystem",
name="potential_internet_exchange_peering_sessions",
field=django.contrib.postgres.fields.ArrayField(
base_field=models.GenericIPAddressField(),
blank=True,
default=list,
size=None,
),
),
migrations.RunPython(forwards_func, reverse_func),
]
|
Fix issue with migrations introduced lately.
|
Fix issue with migrations introduced lately.
|
Python
|
apache-2.0
|
respawner/peering-manager,respawner/peering-manager,respawner/peering-manager,respawner/peering-manager
|
python
|
## Code Before:
import django.contrib.postgres.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
(
"peering",
"0026_autonomoussystem_potential_internet_exchange_peering_sessions",
)
]
operations = [
migrations.AlterField(
model_name="autonomoussystem",
name="potential_internet_exchange_peering_sessions",
field=django.contrib.postgres.fields.ArrayField(
base_field=models.GenericIPAddressField(),
blank=True,
default=list,
size=None,
),
)
]
## Instruction:
Fix issue with migrations introduced lately.
## Code After:
import django.contrib.postgres.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
(
"peering",
"0026_autonomoussystem_potential_internet_exchange_peering_sessions",
)
]
def forwards_func(apps, schema_editor):
AutonomousSystem = apps.get_model("peering", "AutonomousSystem")
db_alias = schema_editor.connection.alias
AutonomousSystem.objects.using(db_alias).filter(
potential_internet_exchange_peering_sessions=None
).update(potential_internet_exchange_peering_sessions=[])
def reverse_func(apps, schema_editor):
AutonomousSystem = apps.get_model("peering", "AutonomousSystem")
db_alias = schema_editor.connection.alias
AutonomousSystem.objects.using(db_alias).filter(
potential_internet_exchange_peering_sessions=[]
).update(potential_internet_exchange_peering_sessions=None)
operations = [
migrations.AlterField(
model_name="autonomoussystem",
name="potential_internet_exchange_peering_sessions",
field=django.contrib.postgres.fields.ArrayField(
base_field=models.GenericIPAddressField(),
blank=True,
default=list,
size=None,
),
),
migrations.RunPython(forwards_func, reverse_func),
]
|
b6402d6116fbea0d9368931db3e3415028ac9035
|
presets/node.js
|
presets/node.js
|
const baseRules = [
'eslint:recommended',
].concat([
'../src/rules/ava',
'../src/rules/best-practices',
'../src/rules/errors',
'../src/rules/es6',
'../src/rules/imports',
'../src/rules/jsdoc',
'../src/rules/node',
'../src/rules/react',
'../src/rules/strict',
'../src/rules/style',
'../src/rules/variables',
].map(require.resolve))
module.exports = {
parser: 'babel-eslint',
env: {
node: true
},
extends: baseRules,
parserOptions: {
ecmaVersion: 7,
sourceType: 'module',
}
}
|
const baseRules = [
'eslint:recommended',
].concat([
'../src/rules/ava',
'../src/rules/best-practices',
'../src/rules/errors',
'../src/rules/es6',
'../src/rules/imports',
'../src/rules/jsdoc',
'../src/rules/node',
'../src/rules/react',
'../src/rules/strict',
'../src/rules/style',
'../src/rules/variables',
].map(require.resolve))
module.exports = {
parser: 'babel-eslint',
env: {
node: true,
browser: true,
},
extends: baseRules,
parserOptions: {
ecmaVersion: 7,
sourceType: 'module',
},
}
|
Add env browser for browser globals support
|
Add env browser for browser globals support
|
JavaScript
|
mit
|
labs42/eslint-config-labs42
|
javascript
|
## Code Before:
const baseRules = [
'eslint:recommended',
].concat([
'../src/rules/ava',
'../src/rules/best-practices',
'../src/rules/errors',
'../src/rules/es6',
'../src/rules/imports',
'../src/rules/jsdoc',
'../src/rules/node',
'../src/rules/react',
'../src/rules/strict',
'../src/rules/style',
'../src/rules/variables',
].map(require.resolve))
module.exports = {
parser: 'babel-eslint',
env: {
node: true
},
extends: baseRules,
parserOptions: {
ecmaVersion: 7,
sourceType: 'module',
}
}
## Instruction:
Add env browser for browser globals support
## Code After:
const baseRules = [
'eslint:recommended',
].concat([
'../src/rules/ava',
'../src/rules/best-practices',
'../src/rules/errors',
'../src/rules/es6',
'../src/rules/imports',
'../src/rules/jsdoc',
'../src/rules/node',
'../src/rules/react',
'../src/rules/strict',
'../src/rules/style',
'../src/rules/variables',
].map(require.resolve))
module.exports = {
parser: 'babel-eslint',
env: {
node: true,
browser: true,
},
extends: baseRules,
parserOptions: {
ecmaVersion: 7,
sourceType: 'module',
},
}
|
7b9f5a209e5d51378143a8bf1aa0c22b1790bcaa
|
data/fedora-update.yaml
|
data/fedora-update.yaml
|
python:
status: dropped
note: |
This is the Python 2 package.
Please port to Python 3.
python-kerberos:
status: dropped
note: |
Suggested replacement: `python-gssapi`
python-krbV:
status: dropped
note: |
Suggested replacement: `python-gssapi`
m2crypto:
status: dropped
note: |
Suggested replacement: `python-cryptography`
cmake:
status: released
note: |
Does not depend on Python
|
python:
status: dropped
note: |
This is the Python 2 package.
Please port to Python 3.
python-kerberos:
status: dropped
note: |
Suggested replacement: `python-gssapi`
python-krbV:
status: dropped
note: |
Suggested replacement: `python-gssapi`
m2crypto:
status: dropped
note: |
Suggested replacement: `python-cryptography`
pygtk2:
status: dropped
note: |
Suggested replacement: `python-gobject`
pygobject2:
status: dropped
note: |
Suggested replacement: `python-gobject`
cmake:
status: released
note: |
Does not depend on Python
|
Add several more packages as dropped
|
Add several more packages as dropped
|
YAML
|
mit
|
ari3s/portingdb,irushchyshyn/portingdb,sYnfo/portingdb,sYnfo/portingdb,fedora-python/portingdb,irushchyshyn/portingdb,ari3s/portingdb,fedora-python/portingdb,sYnfo/portingdb,irushchyshyn/portingdb,irushchyshyn/portingdb,fedora-python/portingdb,ari3s/portingdb,ari3s/portingdb
|
yaml
|
## Code Before:
python:
status: dropped
note: |
This is the Python 2 package.
Please port to Python 3.
python-kerberos:
status: dropped
note: |
Suggested replacement: `python-gssapi`
python-krbV:
status: dropped
note: |
Suggested replacement: `python-gssapi`
m2crypto:
status: dropped
note: |
Suggested replacement: `python-cryptography`
cmake:
status: released
note: |
Does not depend on Python
## Instruction:
Add several more packages as dropped
## Code After:
python:
status: dropped
note: |
This is the Python 2 package.
Please port to Python 3.
python-kerberos:
status: dropped
note: |
Suggested replacement: `python-gssapi`
python-krbV:
status: dropped
note: |
Suggested replacement: `python-gssapi`
m2crypto:
status: dropped
note: |
Suggested replacement: `python-cryptography`
pygtk2:
status: dropped
note: |
Suggested replacement: `python-gobject`
pygobject2:
status: dropped
note: |
Suggested replacement: `python-gobject`
cmake:
status: released
note: |
Does not depend on Python
|
d54150b9a86c3cc0a08376e3b1cf9c223a7c0096
|
index.js
|
index.js
|
;(_ => {
'use strict';
var tagContent = 'router2-content';
function matchHash() {
var containers = document.querySelectorAll(`${tagContent}:not([hidden])`);
var container;
for (var i = 0; i < containers.length; i++) {
containers[i].hidden = true;
}
var hash = window.location.hash.slice(1);
// nothing to unhide...
if (!hash) {
return;
}
// this selector selects the children items too... that's incorrect
var containers = document.querySelectorAll(`${tagContent}`);
for (var i = 0; i < containers.length; i++) {
container = containers[i];
var matcher = new RegExp(`^${container.getAttribute('hash')}`);
var match = matcher.test(hash);
if (match) {
container.hidden = false;
return;
}
}
throw new Error(`hash "${hash}" does not match any content`);
}
window.addEventListener('hashchange', (e) => {
matchHash();
});
window.addEventListener('load', (e) => {
matchHash();
});
})();
|
;(_ => {
'use strict';
var tagContent = 'router2-content';
function matchHash(parent, hash) {
var containers;
var container;
var _hash = hash || window.location.hash;
if (!parent) {
containers = document.querySelectorAll(`${tagContent}:not([hidden])`);
for (var i = 0; i < containers.length; i++) {
containers[i].hidden = true;
}
_hash = _hash.slice(1);
// nothing to unhide...
if (!_hash) {
return;
}
containers = document.querySelectorAll(`${tagContent}`);
} else {
containers = parent.querySelectorAll(`${tagContent}`);
if (_hash[0] === '/') {
_hash = _hash.slice(1);
}
if (containers.length === 0) {
return;
}
}
// this selector selects the children items too... that's incorrect
for (var i = 0; i < containers.length; i++) {
container = containers[i];
var matcher = new RegExp(`^${container.getAttribute('hash')}`);
var match = matcher.test(_hash);
if (match) {
container.hidden = false;
matchHash(container, _hash.split(matcher)[1]);
return;
}
}
throw new Error(`hash "${_hash}" does not match any content`);
}
window.addEventListener('hashchange', (e) => {
matchHash();
});
window.addEventListener('load', (e) => {
matchHash();
});
})();
|
Complete the test for case 3
|
Complete the test for case 3
|
JavaScript
|
isc
|
m3co/router3,m3co/router3
|
javascript
|
## Code Before:
;(_ => {
'use strict';
var tagContent = 'router2-content';
function matchHash() {
var containers = document.querySelectorAll(`${tagContent}:not([hidden])`);
var container;
for (var i = 0; i < containers.length; i++) {
containers[i].hidden = true;
}
var hash = window.location.hash.slice(1);
// nothing to unhide...
if (!hash) {
return;
}
// this selector selects the children items too... that's incorrect
var containers = document.querySelectorAll(`${tagContent}`);
for (var i = 0; i < containers.length; i++) {
container = containers[i];
var matcher = new RegExp(`^${container.getAttribute('hash')}`);
var match = matcher.test(hash);
if (match) {
container.hidden = false;
return;
}
}
throw new Error(`hash "${hash}" does not match any content`);
}
window.addEventListener('hashchange', (e) => {
matchHash();
});
window.addEventListener('load', (e) => {
matchHash();
});
})();
## Instruction:
Complete the test for case 3
## Code After:
;(_ => {
'use strict';
var tagContent = 'router2-content';
function matchHash(parent, hash) {
var containers;
var container;
var _hash = hash || window.location.hash;
if (!parent) {
containers = document.querySelectorAll(`${tagContent}:not([hidden])`);
for (var i = 0; i < containers.length; i++) {
containers[i].hidden = true;
}
_hash = _hash.slice(1);
// nothing to unhide...
if (!_hash) {
return;
}
containers = document.querySelectorAll(`${tagContent}`);
} else {
containers = parent.querySelectorAll(`${tagContent}`);
if (_hash[0] === '/') {
_hash = _hash.slice(1);
}
if (containers.length === 0) {
return;
}
}
// this selector selects the children items too... that's incorrect
for (var i = 0; i < containers.length; i++) {
container = containers[i];
var matcher = new RegExp(`^${container.getAttribute('hash')}`);
var match = matcher.test(_hash);
if (match) {
container.hidden = false;
matchHash(container, _hash.split(matcher)[1]);
return;
}
}
throw new Error(`hash "${_hash}" does not match any content`);
}
window.addEventListener('hashchange', (e) => {
matchHash();
});
window.addEventListener('load', (e) => {
matchHash();
});
})();
|
ba6d4125830adb7308588947a5e91cfbdb49c00a
|
DataCollector/MemcachedDataCollector.php
|
DataCollector/MemcachedDataCollector.php
|
<?php
namespace Blablacar\MemcachedBundle\DataCollector;
use Symfony\Component\HttpKernel\DataCollector\DataCollector;
use Symfony\Component\HttpFoundation\Request;
use Symfony\Component\HttpFoundation\Response;
use Blablacar\MemcachedBundle\Memcached\ClientLogger;
class MemcachedDataCollector extends DataCollector
{
protected $clients = array();
public function addClient($name, ClientLogger $client)
{
$this->clients[$name] = $client;
}
/**
* {@inheritDoc}
*/
public function collect(Request $request, Response $response, \Exception $exception = null)
{
foreach ($this->clients as $name => $client) {
foreach ($client->getCommands() as $command) {
$this->data[] = array(
'command' => $command['name'],
'arguments' => implode(', ', $command['arguments']),
'duration' => $command['duration'],
'connection' => $name,
'return' => implode(', ', $command['return'])
);
}
$client->reset();
}
}
/**
* {@inheritDoc}
*/
public function getName()
{
return 'memcached';
}
/**
* getCommands
*
* @return array
*/
public function getCommands()
{
return $this->data;
}
/**
* getDuration
*
* @return int
*/
public function getDuration()
{
$time = 0;
foreach ($this->data as $data) {
$time += $data['duration'];
}
return $time;
}
}
|
<?php
namespace Blablacar\MemcachedBundle\DataCollector;
use Symfony\Component\HttpKernel\DataCollector\DataCollector;
use Symfony\Component\HttpFoundation\Request;
use Symfony\Component\HttpFoundation\Response;
use Blablacar\MemcachedBundle\Memcached\ClientLogger;
class MemcachedDataCollector extends DataCollector
{
protected $clients = array();
public function addClient($name, ClientLogger $client)
{
$this->clients[$name] = $client;
}
/**
* {@inheritDoc}
*/
public function collect(Request $request, Response $response, \Exception $exception = null)
{
foreach ($this->clients as $name => $client) {
foreach ($client->getCommands() as $command) {
$this->data[] = array(
'command' => $command['name'],
'arguments' => implode(', ', $command['arguments']),
'duration' => $command['duration'],
'connection' => $name,
'return' => implode(', ', $command['return'])
);
}
$client->reset();
}
}
/**
* {@inheritDoc}
*/
public function getName()
{
return 'memcached';
}
/**
* getCommands
*
* @return array
*/
public function getCommands()
{
return $this->data;
}
/**
* getDuration
*
* @return int
*/
public function getDuration()
{
if (null === $this->data) {
return 0;
}
$time = 0;
foreach ($this->data as $data) {
$time += $data['duration'];
}
return $time;
}
}
|
Correct duration when no commands is logged
|
Correct duration when no commands is logged
|
PHP
|
mit
|
odolbeau/BlablacarMemcachedBundle,odolbeau/BlablacarMemcachedBundle,blablacar/BlablacarMemcachedBundle,blablacar/BlablacarMemcachedBundle
|
php
|
## Code Before:
<?php
namespace Blablacar\MemcachedBundle\DataCollector;
use Symfony\Component\HttpKernel\DataCollector\DataCollector;
use Symfony\Component\HttpFoundation\Request;
use Symfony\Component\HttpFoundation\Response;
use Blablacar\MemcachedBundle\Memcached\ClientLogger;
class MemcachedDataCollector extends DataCollector
{
protected $clients = array();
public function addClient($name, ClientLogger $client)
{
$this->clients[$name] = $client;
}
/**
* {@inheritDoc}
*/
public function collect(Request $request, Response $response, \Exception $exception = null)
{
foreach ($this->clients as $name => $client) {
foreach ($client->getCommands() as $command) {
$this->data[] = array(
'command' => $command['name'],
'arguments' => implode(', ', $command['arguments']),
'duration' => $command['duration'],
'connection' => $name,
'return' => implode(', ', $command['return'])
);
}
$client->reset();
}
}
/**
* {@inheritDoc}
*/
public function getName()
{
return 'memcached';
}
/**
* getCommands
*
* @return array
*/
public function getCommands()
{
return $this->data;
}
/**
* getDuration
*
* @return int
*/
public function getDuration()
{
$time = 0;
foreach ($this->data as $data) {
$time += $data['duration'];
}
return $time;
}
}
## Instruction:
Correct duration when no commands is logged
## Code After:
<?php
namespace Blablacar\MemcachedBundle\DataCollector;
use Symfony\Component\HttpKernel\DataCollector\DataCollector;
use Symfony\Component\HttpFoundation\Request;
use Symfony\Component\HttpFoundation\Response;
use Blablacar\MemcachedBundle\Memcached\ClientLogger;
class MemcachedDataCollector extends DataCollector
{
protected $clients = array();
public function addClient($name, ClientLogger $client)
{
$this->clients[$name] = $client;
}
/**
* {@inheritDoc}
*/
public function collect(Request $request, Response $response, \Exception $exception = null)
{
foreach ($this->clients as $name => $client) {
foreach ($client->getCommands() as $command) {
$this->data[] = array(
'command' => $command['name'],
'arguments' => implode(', ', $command['arguments']),
'duration' => $command['duration'],
'connection' => $name,
'return' => implode(', ', $command['return'])
);
}
$client->reset();
}
}
/**
* {@inheritDoc}
*/
public function getName()
{
return 'memcached';
}
/**
* getCommands
*
* @return array
*/
public function getCommands()
{
return $this->data;
}
/**
* getDuration
*
* @return int
*/
public function getDuration()
{
if (null === $this->data) {
return 0;
}
$time = 0;
foreach ($this->data as $data) {
$time += $data['duration'];
}
return $time;
}
}
|
a760b00c88476d2d0f1822baff7ada18d59edb0f
|
test/project/html/partials/erb.html.erb
|
test/project/html/partials/erb.html.erb
|
<h1>Partials in document</h1>
<h2>ERB</h2>
<%= partial "test/erb", :locals => {:value => "value"} %>
<h2>MD</h2>
<%= partial "test/markdown" %>
<h2>Mockup</h2>
<%= partial "test/mockup" %>
|
<h1>Partials in document</h1>
<h2>ERB</h2>
<%= partial "test/erb", :locals => {:value => "value"} %>
<h2>MD</h2>
<%= partial "test/markdown" %>
<h2>Mockup</h2>
<%= partial "test/mockup", :locals => {:value => "value"} %>
|
Test rendering of old style partials with locals
|
Test rendering of old style partials with locals
|
HTML+ERB
|
mit
|
DigitPaint/html_mockup,DigitPaint/roger,DigitPaint/roger
|
html+erb
|
## Code Before:
<h1>Partials in document</h1>
<h2>ERB</h2>
<%= partial "test/erb", :locals => {:value => "value"} %>
<h2>MD</h2>
<%= partial "test/markdown" %>
<h2>Mockup</h2>
<%= partial "test/mockup" %>
## Instruction:
Test rendering of old style partials with locals
## Code After:
<h1>Partials in document</h1>
<h2>ERB</h2>
<%= partial "test/erb", :locals => {:value => "value"} %>
<h2>MD</h2>
<%= partial "test/markdown" %>
<h2>Mockup</h2>
<%= partial "test/mockup", :locals => {:value => "value"} %>
|
67220449f36b92de9b8d86524fa4dc4a24c3311b
|
piface/test_piface.sh
|
piface/test_piface.sh
|
python /usr/share/doc/python-pifacedigitalio/examples/blink.py
|
piface_examples_dir=/usr/share/doc/python-pifacedigitalio/examples
echo "available examples - run with python:"
find $piface_examples_dir -type f
echo "\nAn LED should now blink - hit CTRL+C to stop it"
python $piface_examples_dir/blink.py
|
Add more info to the output when testing piface
|
Add more info to the output when testing piface
|
Shell
|
mit
|
claremacrae/raspi_code,claremacrae/raspi_code,claremacrae/raspi_code
|
shell
|
## Code Before:
python /usr/share/doc/python-pifacedigitalio/examples/blink.py
## Instruction:
Add more info to the output when testing piface
## Code After:
piface_examples_dir=/usr/share/doc/python-pifacedigitalio/examples
echo "available examples - run with python:"
find $piface_examples_dir -type f
echo "\nAn LED should now blink - hit CTRL+C to stop it"
python $piface_examples_dir/blink.py
|
e93d77ad5fa75c04ddd38172d051d62111e0f88f
|
challenge_1/solution.rb
|
challenge_1/solution.rb
|
require 'test/unit'
extend Test::Unit::Assertions
def getInfo(test=false)
puts "What's your name?"
name = test ? "max" : gets.chomp()
puts "What's your age?"
age = test ? 26 : gets.chomp()
puts "What's your reddit username?"
redditUsername = test ? "iammax" : gets.chomp()
result = "Your name is #{name}, your age is #{age}, and your reddit username is #{redditUsername}."
puts result if !test
return result
end
def testGetInfo
shouldBeOutput = "Your name is max, your age is 26, and your reddit username is iammax."
assert_equal shouldBeOutput, getInfo(true)
end
testGetInfo()
getInfo()
|
require 'test/unit'
extend Test::Unit::Assertions
def get_info(test = false)
puts "What's your name?"
name = test ? 'max' : gets.chomp
puts "What's your age?"
age = test ? 26 : gets.chomp
puts "What's your reddit username?"
reddit_username = test ? 'iammax' : gets.chomp
result = 'Your name is #{name}, your age is #{age}, '\
'and your reddit username is #{reddit_username}.'
puts result unless test
end
def test_get_info
should_be_output = 'Your name is max, your age is 26, '\
'and your reddit username is iammax.'
assert_equal should_be_output, get_info(true)
end
test_get_info
get_info
|
Save files before refreshing line endings
|
Save files before refreshing line endings
|
Ruby
|
apache-2.0
|
maxArturo/programmerDaily
|
ruby
|
## Code Before:
require 'test/unit'
extend Test::Unit::Assertions
def getInfo(test=false)
puts "What's your name?"
name = test ? "max" : gets.chomp()
puts "What's your age?"
age = test ? 26 : gets.chomp()
puts "What's your reddit username?"
redditUsername = test ? "iammax" : gets.chomp()
result = "Your name is #{name}, your age is #{age}, and your reddit username is #{redditUsername}."
puts result if !test
return result
end
def testGetInfo
shouldBeOutput = "Your name is max, your age is 26, and your reddit username is iammax."
assert_equal shouldBeOutput, getInfo(true)
end
testGetInfo()
getInfo()
## Instruction:
Save files before refreshing line endings
## Code After:
require 'test/unit'
extend Test::Unit::Assertions
def get_info(test = false)
puts "What's your name?"
name = test ? 'max' : gets.chomp
puts "What's your age?"
age = test ? 26 : gets.chomp
puts "What's your reddit username?"
reddit_username = test ? 'iammax' : gets.chomp
result = 'Your name is #{name}, your age is #{age}, '\
'and your reddit username is #{reddit_username}.'
puts result unless test
end
def test_get_info
should_be_output = 'Your name is max, your age is 26, '\
'and your reddit username is iammax.'
assert_equal should_be_output, get_info(true)
end
test_get_info
get_info
|
18dc92ad8283b22d803c5a4ae717db80492e5c17
|
sauce-connect.rb
|
sauce-connect.rb
|
require "formula"
class SauceConnect < Formula
homepage "https://docs.saucelabs.com/reference/sauce-connect/"
url "https://saucelabs.com/downloads/sc-4.3.11-osx.zip"
sha1 "5d0aa851d21f3d4a21f298b6a921761c6aa15217"
def install
bin.install 'bin/sc'
end
test do
system "#{bin}/sc", "--version"
end
end
|
require "formula"
class SauceConnect < Formula
homepage "https://docs.saucelabs.com/reference/sauce-connect/"
url "https://saucelabs.com/downloads/sc-4.3.13-osx.zip"
sha1 "d29ce847880ece5ea8c7cfa94b0c89de5a4f328c"
def install
bin.install 'bin/sc'
end
test do
system "#{bin}/sc", "--version"
end
end
|
Upgrade to Sauce Connect 4.3.13
|
Upgrade to Sauce Connect 4.3.13
|
Ruby
|
unlicense
|
retornam/homebrew-saucelabs
|
ruby
|
## Code Before:
require "formula"
class SauceConnect < Formula
homepage "https://docs.saucelabs.com/reference/sauce-connect/"
url "https://saucelabs.com/downloads/sc-4.3.11-osx.zip"
sha1 "5d0aa851d21f3d4a21f298b6a921761c6aa15217"
def install
bin.install 'bin/sc'
end
test do
system "#{bin}/sc", "--version"
end
end
## Instruction:
Upgrade to Sauce Connect 4.3.13
## Code After:
require "formula"
class SauceConnect < Formula
homepage "https://docs.saucelabs.com/reference/sauce-connect/"
url "https://saucelabs.com/downloads/sc-4.3.13-osx.zip"
sha1 "d29ce847880ece5ea8c7cfa94b0c89de5a4f328c"
def install
bin.install 'bin/sc'
end
test do
system "#{bin}/sc", "--version"
end
end
|
9e3219996121608647195df5386a19a94ff430f2
|
.github/workflows/push.yml
|
.github/workflows/push.yml
|
on: push
name: Test
jobs:
buildImage:
name: Build Image
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@master
- name: Build Image
uses: actions/docker/cli@c08a5fc9e0286844156fefff2c141072048141f6
with:
args: build -t veryhappythings/discord-gather:${{ github.sha }} .
- name: Run tests
uses: actions/docker/cli@c08a5fc9e0286844156fefff2c141072048141f6
env:
COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }}
with:
args: run --env COVERALLS_REPO_TOKEN=$COVERALLS_REPO_TOKEN veryhappythings/discord-gather:${{
github.sha }} make test-with-coverage
|
on: push
name: Test
jobs:
buildImage:
name: Build Image
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@master
- name: Build Image
run: docker build -t veryhappythings/discord-gather:${{ github.sha }} .
- name: Run tests
env:
COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }}
run: docker run --env COVERALLS_REPO_TOKEN=$COVERALLS_REPO_TOKEN veryhappythings/discord-gather:${{ github.sha }} make test-with-coverage
|
Switch over to using the run command
|
Switch over to using the run command
|
YAML
|
mit
|
veryhappythings/discord-gather
|
yaml
|
## Code Before:
on: push
name: Test
jobs:
buildImage:
name: Build Image
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@master
- name: Build Image
uses: actions/docker/cli@c08a5fc9e0286844156fefff2c141072048141f6
with:
args: build -t veryhappythings/discord-gather:${{ github.sha }} .
- name: Run tests
uses: actions/docker/cli@c08a5fc9e0286844156fefff2c141072048141f6
env:
COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }}
with:
args: run --env COVERALLS_REPO_TOKEN=$COVERALLS_REPO_TOKEN veryhappythings/discord-gather:${{
github.sha }} make test-with-coverage
## Instruction:
Switch over to using the run command
## Code After:
on: push
name: Test
jobs:
buildImage:
name: Build Image
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@master
- name: Build Image
run: docker build -t veryhappythings/discord-gather:${{ github.sha }} .
- name: Run tests
env:
COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }}
run: docker run --env COVERALLS_REPO_TOKEN=$COVERALLS_REPO_TOKEN veryhappythings/discord-gather:${{ github.sha }} make test-with-coverage
|
10d170fa4b12345c2450bb3d83f92c2abc0eab3c
|
app/src/main/java/org/zeropage/apps/zeropage/database/notification/NotificationHistory.java
|
app/src/main/java/org/zeropage/apps/zeropage/database/notification/NotificationHistory.java
|
package org.zeropage.apps.zeropage.database.notification;
import android.content.ContentValues;
import android.content.Context;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import org.zeropage.apps.zeropage.notification.Notification;
import java.util.ArrayList;
import java.util.List;
public class NotificationHistory {
private static NotificationHistory sInstance;
private SQLiteDatabase mDatabase;
public NotificationHistory getInstance(Context context) {
sInstance = new Notifi
}
private NotificationHistory(Context context) {
mDatabase = new NotificationOpenHelper(context).getWritableDatabase();
}
public void addToHistory(Notification newNotification) {
ContentValues values = NotificationTable.getContentValues(newNotification);
mDatabase.insert(NotificationTable.NAME, null, values);
}
public List<Notification> getAllHistory() {
List<Notification> notifications = new ArrayList<>();
try (NotificationCursorWrapper wrapper = queryHistory(null, null)) {
wrapper.moveToFirst();
while (!wrapper.isAfterLast()) {
notifications.add(wrapper.getNotification());
wrapper.moveToNext();
}
}
return notifications;
}
private NotificationCursorWrapper queryHistory(String whereClause, String[] whereArgs) {
Cursor cursor = mDatabase.query(NotificationTable.NAME,
null,
whereClause,
whereArgs,
null,
null,
null,
null);
return new NotificationCursorWrapper(cursor);
}
}
|
package org.zeropage.apps.zeropage.database.notification;
import android.content.ContentValues;
import android.content.Context;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import org.zeropage.apps.zeropage.notification.Notification;
import java.util.ArrayList;
import java.util.List;
public class NotificationHistory {
private static NotificationHistory sInstance;
private SQLiteDatabase mDatabase;
public synchronized static NotificationHistory getInstance(Context context) {
if (sInstance == null) {
sInstance = new NotificationHistory(context.getApplicationContext());
}
return sInstance;
}
private NotificationHistory(Context context) {
mDatabase = new NotificationOpenHelper(context).getWritableDatabase();
}
public void addToHistory(Notification newNotification) {
ContentValues values = NotificationTable.getContentValues(newNotification);
mDatabase.insert(NotificationTable.NAME, null, values);
}
public List<Notification> getAllHistory() {
List<Notification> notifications = new ArrayList<>();
try (NotificationCursorWrapper wrapper = queryHistory(null, null)) {
wrapper.moveToFirst();
while (!wrapper.isAfterLast()) {
notifications.add(wrapper.getNotification());
wrapper.moveToNext();
}
}
return notifications;
}
private NotificationCursorWrapper queryHistory(String whereClause, String[] whereArgs) {
Cursor cursor = mDatabase.query(NotificationTable.NAME,
null,
whereClause,
whereArgs,
null,
null,
null,
null);
return new NotificationCursorWrapper(cursor);
}
}
|
Add Helper singleton class for querying/adding notification to db.
|
Add Helper singleton class for querying/adding notification to db.
|
Java
|
mit
|
ZeroPage/i_have_no_apps
|
java
|
## Code Before:
package org.zeropage.apps.zeropage.database.notification;
import android.content.ContentValues;
import android.content.Context;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import org.zeropage.apps.zeropage.notification.Notification;
import java.util.ArrayList;
import java.util.List;
public class NotificationHistory {
private static NotificationHistory sInstance;
private SQLiteDatabase mDatabase;
public NotificationHistory getInstance(Context context) {
sInstance = new Notifi
}
private NotificationHistory(Context context) {
mDatabase = new NotificationOpenHelper(context).getWritableDatabase();
}
public void addToHistory(Notification newNotification) {
ContentValues values = NotificationTable.getContentValues(newNotification);
mDatabase.insert(NotificationTable.NAME, null, values);
}
public List<Notification> getAllHistory() {
List<Notification> notifications = new ArrayList<>();
try (NotificationCursorWrapper wrapper = queryHistory(null, null)) {
wrapper.moveToFirst();
while (!wrapper.isAfterLast()) {
notifications.add(wrapper.getNotification());
wrapper.moveToNext();
}
}
return notifications;
}
private NotificationCursorWrapper queryHistory(String whereClause, String[] whereArgs) {
Cursor cursor = mDatabase.query(NotificationTable.NAME,
null,
whereClause,
whereArgs,
null,
null,
null,
null);
return new NotificationCursorWrapper(cursor);
}
}
## Instruction:
Add Helper singleton class for querying/adding notification to db.
## Code After:
package org.zeropage.apps.zeropage.database.notification;
import android.content.ContentValues;
import android.content.Context;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import org.zeropage.apps.zeropage.notification.Notification;
import java.util.ArrayList;
import java.util.List;
public class NotificationHistory {
private static NotificationHistory sInstance;
private SQLiteDatabase mDatabase;
public synchronized static NotificationHistory getInstance(Context context) {
if (sInstance == null) {
sInstance = new NotificationHistory(context.getApplicationContext());
}
return sInstance;
}
private NotificationHistory(Context context) {
mDatabase = new NotificationOpenHelper(context).getWritableDatabase();
}
public void addToHistory(Notification newNotification) {
ContentValues values = NotificationTable.getContentValues(newNotification);
mDatabase.insert(NotificationTable.NAME, null, values);
}
public List<Notification> getAllHistory() {
List<Notification> notifications = new ArrayList<>();
try (NotificationCursorWrapper wrapper = queryHistory(null, null)) {
wrapper.moveToFirst();
while (!wrapper.isAfterLast()) {
notifications.add(wrapper.getNotification());
wrapper.moveToNext();
}
}
return notifications;
}
private NotificationCursorWrapper queryHistory(String whereClause, String[] whereArgs) {
Cursor cursor = mDatabase.query(NotificationTable.NAME,
null,
whereClause,
whereArgs,
null,
null,
null,
null);
return new NotificationCursorWrapper(cursor);
}
}
|
e69520b11402b32d1cdfc0f58eb6a9ed2b659707
|
tenacity-testing/src/main/java/com/yammer/tenacity/testing/TenacityTest.java
|
tenacity-testing/src/main/java/com/yammer/tenacity/testing/TenacityTest.java
|
package com.yammer.tenacity.testing;
import com.netflix.config.ConfigurationManager;
import com.netflix.hystrix.Hystrix;
import com.netflix.hystrix.contrib.yammermetricspublisher.HystrixYammerMetricsPublisher;
import com.netflix.hystrix.strategy.HystrixPlugins;
import org.junit.After;
import java.util.concurrent.TimeUnit;
public abstract class TenacityTest {
static {
initialization();
}
private static void initialization() {
HystrixPlugins.getInstance().registerMetricsPublisher(new HystrixYammerMetricsPublisher());
ConfigurationManager
.getConfigInstance()
.setProperty("hystrix.command.default.metrics.healthSnapshot.intervalInMilliseconds", "1");
}
@After
public void testTeardown() {
Hystrix.reset(1, TimeUnit.SECONDS);
}
}
|
package com.yammer.tenacity.testing;
import com.netflix.config.ConfigurationManager;
import com.netflix.hystrix.Hystrix;
import com.netflix.hystrix.contrib.yammermetricspublisher.HystrixYammerMetricsPublisher;
import com.netflix.hystrix.strategy.HystrixPlugins;
import org.junit.After;
import org.junit.Before;
import java.util.concurrent.TimeUnit;
public abstract class TenacityTest {
static {
HystrixPlugins.getInstance().registerMetricsPublisher(new HystrixYammerMetricsPublisher());
}
@Before
public void testInitialization() {
ConfigurationManager
.getConfigInstance()
.setProperty("hystrix.command.default.metrics.healthSnapshot.intervalInMilliseconds", "1");
}
@After
public void testTeardown() {
Hystrix.reset(1, TimeUnit.SECONDS);
ConfigurationManager.getConfigInstance().clear();
}
}
|
Reset the ConfigurationManager when testing Tenacity
|
Reset the ConfigurationManager when testing Tenacity
|
Java
|
apache-2.0
|
skinzer/tenacity,yonglehou/tenacity,yammer/tenacity,jplock/tenacity,mauricionr/tenacity,samaitra/tenacity,Trundle/tenacity
|
java
|
## Code Before:
package com.yammer.tenacity.testing;
import com.netflix.config.ConfigurationManager;
import com.netflix.hystrix.Hystrix;
import com.netflix.hystrix.contrib.yammermetricspublisher.HystrixYammerMetricsPublisher;
import com.netflix.hystrix.strategy.HystrixPlugins;
import org.junit.After;
import java.util.concurrent.TimeUnit;
public abstract class TenacityTest {
static {
initialization();
}
private static void initialization() {
HystrixPlugins.getInstance().registerMetricsPublisher(new HystrixYammerMetricsPublisher());
ConfigurationManager
.getConfigInstance()
.setProperty("hystrix.command.default.metrics.healthSnapshot.intervalInMilliseconds", "1");
}
@After
public void testTeardown() {
Hystrix.reset(1, TimeUnit.SECONDS);
}
}
## Instruction:
Reset the ConfigurationManager when testing Tenacity
## Code After:
package com.yammer.tenacity.testing;
import com.netflix.config.ConfigurationManager;
import com.netflix.hystrix.Hystrix;
import com.netflix.hystrix.contrib.yammermetricspublisher.HystrixYammerMetricsPublisher;
import com.netflix.hystrix.strategy.HystrixPlugins;
import org.junit.After;
import org.junit.Before;
import java.util.concurrent.TimeUnit;
public abstract class TenacityTest {
static {
HystrixPlugins.getInstance().registerMetricsPublisher(new HystrixYammerMetricsPublisher());
}
@Before
public void testInitialization() {
ConfigurationManager
.getConfigInstance()
.setProperty("hystrix.command.default.metrics.healthSnapshot.intervalInMilliseconds", "1");
}
@After
public void testTeardown() {
Hystrix.reset(1, TimeUnit.SECONDS);
ConfigurationManager.getConfigInstance().clear();
}
}
|
f00a3026f43a9dee33bbb6de3080cc36e5c4a759
|
scripts/cleanup.sh
|
scripts/cleanup.sh
|
/usr/bin/yes | /usr/bin/pacman -Scc
# Write zeros to improve virtual disk compaction.
zerofile=$(/usr/bin/mktemp /zerofile.XXXXX)
/usr/bin/dd if=/dev/zero of="$zerofile" bs=1M
/usr/bin/rm -f "$zerofile"
/usr/bin/sync
|
/usr/bin/yes | /usr/bin/pacman -Scc
/usr/bin/pacman-optimize
# Write zeros to improve virtual disk compaction.
zerofile=$(/usr/bin/mktemp /zerofile.XXXXX)
/usr/bin/dd if=/dev/zero of="$zerofile" bs=1M
/usr/bin/rm -f "$zerofile"
/usr/bin/sync
|
Optimize the pacman database before zeroing the drive
|
Optimize the pacman database before zeroing the drive
|
Shell
|
isc
|
agt-the-walker/packer-arch,aidanharris/packer-arch,slash170/packer-arch,elasticdog/packer-arch,tomaspapan/packer-arch,cosmo0920/packer-arch,tomswartz07/packer-arch,appleby/Lisp-In-Small-Pieces-VM
|
shell
|
## Code Before:
/usr/bin/yes | /usr/bin/pacman -Scc
# Write zeros to improve virtual disk compaction.
zerofile=$(/usr/bin/mktemp /zerofile.XXXXX)
/usr/bin/dd if=/dev/zero of="$zerofile" bs=1M
/usr/bin/rm -f "$zerofile"
/usr/bin/sync
## Instruction:
Optimize the pacman database before zeroing the drive
## Code After:
/usr/bin/yes | /usr/bin/pacman -Scc
/usr/bin/pacman-optimize
# Write zeros to improve virtual disk compaction.
zerofile=$(/usr/bin/mktemp /zerofile.XXXXX)
/usr/bin/dd if=/dev/zero of="$zerofile" bs=1M
/usr/bin/rm -f "$zerofile"
/usr/bin/sync
|
9c531f2324773e4922d37761059edccb8d7b74e4
|
lib/organic-sitemap/redis_manager.rb
|
lib/organic-sitemap/redis_manager.rb
|
module OrganicSitemap
class RedisManager
def self.add(key)
return unless key
OrganicSitemap.configuration.
redis_connection.
zadd(OrganicSitemap.configuration.storage_key,
(DateTime.now + OrganicSitemap.configuration.expiry_time.to_i).to_time.to_i,
key)
end
def self.clean_set(time = Time.now)
OrganicSitemap.configuration.
redis_connection.
zremrangebyscore(OrganicSitemap.configuration.storage_key,
"-inf",
time.to_i)
end
def self.sitemap_urls(from: nil, to: nil)
from = from ? from.to_time.to_i : '-inf'
to = to ? to.to_time.to_i : '+inf'
OrganicSitemap.configuration.
redis_connection.
zrangebyscore(OrganicSitemap.configuration.storage_key,
from,
to)
end
end
end
|
module OrganicSitemap
class RedisManager
def self.add(key)
return unless key
redis_connection.zadd(storage_key, (DateTime.now + expiry_time).to_time.to_i, key)
end
def self.clean_set(time = Time.now)
redis_connection.zremrangebyscore(storage_key, "-inf", time.to_i)
end
def self.sitemap_urls(from: nil, to: nil)
from = from ? from.to_time.to_i : '-inf'
to = to ? to.to_time.to_i : '+inf'
redis_connection.zrangebyscore(storage_key, from, to)
end
def self.remove_key(key: nil)
return if key.nil?
redis_connection.zrem(storage_key, key)
end
def self.remove_keys(keys: [])
return unless keys.any?
keys.each do |key|
remove_key key
end
end
private
def self.redis_connection
OrganicSitemap.configuration.redis_connection
end
def self.storage_key
OrganicSitemap.configuration.storage_key
end
def self.expiry_time
OrganicSitemap.configuration.expiry_time.to_i
end
end
end
|
Add remove_key and remove_keys functions on redis manager
|
Add remove_key and remove_keys functions on redis manager
|
Ruby
|
mit
|
abelardogilm/organic-sitemap,abelardogilm/organic-sitemap
|
ruby
|
## Code Before:
module OrganicSitemap
class RedisManager
def self.add(key)
return unless key
OrganicSitemap.configuration.
redis_connection.
zadd(OrganicSitemap.configuration.storage_key,
(DateTime.now + OrganicSitemap.configuration.expiry_time.to_i).to_time.to_i,
key)
end
def self.clean_set(time = Time.now)
OrganicSitemap.configuration.
redis_connection.
zremrangebyscore(OrganicSitemap.configuration.storage_key,
"-inf",
time.to_i)
end
def self.sitemap_urls(from: nil, to: nil)
from = from ? from.to_time.to_i : '-inf'
to = to ? to.to_time.to_i : '+inf'
OrganicSitemap.configuration.
redis_connection.
zrangebyscore(OrganicSitemap.configuration.storage_key,
from,
to)
end
end
end
## Instruction:
Add remove_key and remove_keys functions on redis manager
## Code After:
module OrganicSitemap
class RedisManager
def self.add(key)
return unless key
redis_connection.zadd(storage_key, (DateTime.now + expiry_time).to_time.to_i, key)
end
def self.clean_set(time = Time.now)
redis_connection.zremrangebyscore(storage_key, "-inf", time.to_i)
end
def self.sitemap_urls(from: nil, to: nil)
from = from ? from.to_time.to_i : '-inf'
to = to ? to.to_time.to_i : '+inf'
redis_connection.zrangebyscore(storage_key, from, to)
end
def self.remove_key(key: nil)
return if key.nil?
redis_connection.zrem(storage_key, key)
end
def self.remove_keys(keys: [])
return unless keys.any?
keys.each do |key|
remove_key key
end
end
private
def self.redis_connection
OrganicSitemap.configuration.redis_connection
end
def self.storage_key
OrganicSitemap.configuration.storage_key
end
def self.expiry_time
OrganicSitemap.configuration.expiry_time.to_i
end
end
end
|
f3ab09ab817e7df0c1e1cdc9400d85de5be64919
|
css/brainwallet.css
|
css/brainwallet.css
|
body {
padding-top: 72px;
padding-bottom: 32px;
}
.form-control[disabled], .form-control[readonly] { cursor: auto; }
.dropdown-menu-two-column {
-moz-column-count: 2;
-webkit-column-count: 2;
column-count:2;
width: 360px;
padding: 7px 0px;
}
|
body {
padding-top: 72px;
padding-bottom: 32px;
}
.form-control { font-family: monospace; }
.form-control[disabled], .form-control[readonly] { cursor: auto; }
.dropdown-menu-two-column {
-moz-column-count: 2;
-webkit-column-count: 2;
column-count:2;
width: 360px;
padding: 7px 0px;
}
|
Use monospace font for form text boxes.
|
Use monospace font for form text boxes.
|
CSS
|
mit
|
jamescarter-le/brainwallet.github.io,Colored-Coins/brainwallet.github.io,sipak/brainwallet,2XL/brainwallet.github.io,brainwallet/brainwallet.github.io,sipak/brainwallet,Colored-Coins/brainwallet.github.io,brainwalletX/brainwalletX.github.io,Taiiwo/bitcoin-signature-tool,nvoron23/brainwallet.github.io,jhoenicke/brainwallet.github.io,btcspry/brainwallet.github.io,jamescarter-le/brainwallet.github.io,brainwalletX/brainwalletX.github.io,Taiiwo/bitcoin-signature-tool,btcspry/brainwallet.github.io,rexquigg/brainwallet,2XL/brainwallet.github.io,nvoron23/brainwallet.github.io,A-Zak/brainwallet.github.io,jhoenicke/brainwallet.github.io,A-Zak/brainwallet.github.io
|
css
|
## Code Before:
body {
padding-top: 72px;
padding-bottom: 32px;
}
.form-control[disabled], .form-control[readonly] { cursor: auto; }
.dropdown-menu-two-column {
-moz-column-count: 2;
-webkit-column-count: 2;
column-count:2;
width: 360px;
padding: 7px 0px;
}
## Instruction:
Use monospace font for form text boxes.
## Code After:
body {
padding-top: 72px;
padding-bottom: 32px;
}
.form-control { font-family: monospace; }
.form-control[disabled], .form-control[readonly] { cursor: auto; }
.dropdown-menu-two-column {
-moz-column-count: 2;
-webkit-column-count: 2;
column-count:2;
width: 360px;
padding: 7px 0px;
}
|
fe9779d294a1ed6de411e672ccb2f0c7e742d628
|
_data/categories.yml
|
_data/categories.yml
|
toplevel:
- url: government
name: "Government"
description: "Blah blah"
- url: jobs
name: "Jobs"
description: "Blah blah"
- url: public-safety
name: "Public Safety"
description: "Blah blah"
- url: licensing-permits
name: "Licensing & Permits"
description: "Blah blah"
- url: utilities-streets
name: "Utilities & Streets"
description: "Blah blah"
- url: transport
name: "Transport"
description: "Blah blah"
- url: parks-recreation
name: "Parks & Recreation"
description: "Blah blah"
# second level nav
government:
- "departments": "Departments"
transport:
- "parking": "Parking"
|
toplevel:
- url: government
name: "Government"
description: "City Departments and council members & meetings and history"
- url: jobs
name: "Jobs"
description: "Finding jobs with the City of Oakland and general employment services"
- url: public-safety
name: "Public Safety"
description: "Includes emergency prepardness and safety advice and crime"
- url: licensing-permits
name: "Licensing & Permits"
description: "Includes applications for building permits and zoning code"
- url: utilities-streets
name: "Utilities & Streets"
description: "Including bill payments, reporting issues and disputes"
- url: transport
name: "Transport"
description: "Including public transport, parking and citations"
- url: parks-recreation
name: "Parks & Recreation"
description: "Includes city provided outdoor activities, attractions and entertainment"
# second level nav
government:
- "departments": "Departments"
transport:
- "parking": "Parking"
|
Add some proper starter descriptions
|
Add some proper starter descriptions
|
YAML
|
isc
|
codeforamerica/ceviche-starter,codeforamerica/ceviche-starter,ceviche/shoop-a-doop,ceviche/shoop-a-doop
|
yaml
|
## Code Before:
toplevel:
- url: government
name: "Government"
description: "Blah blah"
- url: jobs
name: "Jobs"
description: "Blah blah"
- url: public-safety
name: "Public Safety"
description: "Blah blah"
- url: licensing-permits
name: "Licensing & Permits"
description: "Blah blah"
- url: utilities-streets
name: "Utilities & Streets"
description: "Blah blah"
- url: transport
name: "Transport"
description: "Blah blah"
- url: parks-recreation
name: "Parks & Recreation"
description: "Blah blah"
# second level nav
government:
- "departments": "Departments"
transport:
- "parking": "Parking"
## Instruction:
Add some proper starter descriptions
## Code After:
toplevel:
- url: government
name: "Government"
description: "City Departments and council members & meetings and history"
- url: jobs
name: "Jobs"
description: "Finding jobs with the City of Oakland and general employment services"
- url: public-safety
name: "Public Safety"
description: "Includes emergency prepardness and safety advice and crime"
- url: licensing-permits
name: "Licensing & Permits"
description: "Includes applications for building permits and zoning code"
- url: utilities-streets
name: "Utilities & Streets"
description: "Including bill payments, reporting issues and disputes"
- url: transport
name: "Transport"
description: "Including public transport, parking and citations"
- url: parks-recreation
name: "Parks & Recreation"
description: "Includes city provided outdoor activities, attractions and entertainment"
# second level nav
government:
- "departments": "Departments"
transport:
- "parking": "Parking"
|
95bed5b5f45e77d8de864a2c5e5aa156f2e5a50e
|
src/gui.js
|
src/gui.js
|
var app = require('app')
var dialog = require('dialog')
var menu = require('menu')
/*
Build the application menu for the current platform.
*/
exports.application_menu = function(window) {
var template = [
{
label: 'File',
submenu: [
{
label: 'Quit',
accelerator: 'CmdOrCtrl+Q',
click: function() {
app.quit()
}
}
]
},
{
label: 'Help',
submenu: [
{
label: 'About apollo',
click: function() {
dialog.showMessageBox(window, {
title: 'About apollo',
message: 'Apollo v' + app.getVersion(),
buttons: ['ok'],
})
}
}
]
}
]
return menu.buildFromTemplate(template)
}
|
var app = require('app')
var dialog = require('dialog')
var menu = require('menu')
/*
Build the application menu for the current platform.
*/
exports.application_menu = function(window) {
var file = {
label: 'File',
submenu: [
{
label: 'Quit',
accelerator: 'CmdOrCtrl+Q',
click: function() {
app.quit()
}
},
]
}
var help = {
label: 'Help',
submenu: []
}
var about = {
label: 'About apollo',
click: function() {
dialog.showMessageBox(window, {
title: 'About apollo',
message: 'Apollo v' + app.getVersion(),
buttons: ['ok'],
})
}
}
if (process.platform == 'darwin') {
file.submenu.push(about)
} else {
help.submenu.push(about)
}
return menu.buildFromTemplate([
file,
help,
])
}
|
Restructure menus to match mac patterns
|
Restructure menus to match mac patterns
|
JavaScript
|
mit
|
jreese/apollo,jreese/apollo
|
javascript
|
## Code Before:
var app = require('app')
var dialog = require('dialog')
var menu = require('menu')
/*
Build the application menu for the current platform.
*/
exports.application_menu = function(window) {
var template = [
{
label: 'File',
submenu: [
{
label: 'Quit',
accelerator: 'CmdOrCtrl+Q',
click: function() {
app.quit()
}
}
]
},
{
label: 'Help',
submenu: [
{
label: 'About apollo',
click: function() {
dialog.showMessageBox(window, {
title: 'About apollo',
message: 'Apollo v' + app.getVersion(),
buttons: ['ok'],
})
}
}
]
}
]
return menu.buildFromTemplate(template)
}
## Instruction:
Restructure menus to match mac patterns
## Code After:
var app = require('app')
var dialog = require('dialog')
var menu = require('menu')
/*
Build the application menu for the current platform.
*/
exports.application_menu = function(window) {
var file = {
label: 'File',
submenu: [
{
label: 'Quit',
accelerator: 'CmdOrCtrl+Q',
click: function() {
app.quit()
}
},
]
}
var help = {
label: 'Help',
submenu: []
}
var about = {
label: 'About apollo',
click: function() {
dialog.showMessageBox(window, {
title: 'About apollo',
message: 'Apollo v' + app.getVersion(),
buttons: ['ok'],
})
}
}
if (process.platform == 'darwin') {
file.submenu.push(about)
} else {
help.submenu.push(about)
}
return menu.buildFromTemplate([
file,
help,
])
}
|
ccf74e0e790bba5065e7f510324c599023a36f39
|
Argo/Operators/Argo.swift
|
Argo/Operators/Argo.swift
|
import Runes
precedencegroup DecodePrecedence {
associativity: left
higherThan: ApplicativeSequencePrecedence
lowerThan: NilCoalescingPrecedence
}
infix operator <| : DecodePrecedence
infix operator <|? : DecodePrecedence
infix operator <|| : DecodePrecedence
infix operator <||? : DecodePrecedence
|
import Runes
precedencegroup ArgoDecodePrecedence {
associativity: left
higherThan: RunesApplicativeSequencePrecedence
lowerThan: NilCoalescingPrecedence
}
infix operator <| : ArgoDecodePrecedence
infix operator <|? : ArgoDecodePrecedence
infix operator <|| : ArgoDecodePrecedence
infix operator <||? : ArgoDecodePrecedence
|
Reduce the risk of conflicts with precedence group
|
Reduce the risk of conflicts with precedence group
Turns out, these need to be globally unique! Fun stuff. To work around this we
can fall back to our good old friend "Explicit Prefix"! We'll just tack on a
quick package name to the front of this precedence group and voila! No
conflicts!

|
Swift
|
mit
|
thoughtbot/Argo,thoughtbot/Argo,thoughtbot/Argo
|
swift
|
## Code Before:
import Runes
precedencegroup DecodePrecedence {
associativity: left
higherThan: ApplicativeSequencePrecedence
lowerThan: NilCoalescingPrecedence
}
infix operator <| : DecodePrecedence
infix operator <|? : DecodePrecedence
infix operator <|| : DecodePrecedence
infix operator <||? : DecodePrecedence
## Instruction:
Reduce the risk of conflicts with precedence group
Turns out, these need to be globally unique! Fun stuff. To work around this we
can fall back to our good old friend "Explicit Prefix"! We'll just tack on a
quick package name to the front of this precedence group and voila! No
conflicts!

## Code After:
import Runes
precedencegroup ArgoDecodePrecedence {
associativity: left
higherThan: RunesApplicativeSequencePrecedence
lowerThan: NilCoalescingPrecedence
}
infix operator <| : ArgoDecodePrecedence
infix operator <|? : ArgoDecodePrecedence
infix operator <|| : ArgoDecodePrecedence
infix operator <||? : ArgoDecodePrecedence
|
2893be2bf242f8d40078bbc5146ac62a92b974a1
|
.travis.yml
|
.travis.yml
|
language: android
android:
components:
- android-22
- build-tools-22.0.1
licenses:
- 'android-sdk-license.*'
sudo: false
script: 'travis_retry ./gradlew clean build'
|
language: android
android:
components:
- android-22
- build-tools-22.0.1
- extra-android-m2repository
- extra-android-support
licenses:
- 'android-sdk-license.*'
sudo: false
script: 'travis_retry ./gradlew clean build'
|
Add Android repos to Travis-CI configuration
|
Add Android repos to Travis-CI configuration
|
YAML
|
apache-2.0
|
Mukul-Sharma/Spyglass,linkedin/Spyglass,bpappin/Spyglass,RyanTech/Spyglass-1,senorcris/Spyglass,RacZo/Spyglass,conx2share/Spyglass,xiangwei/Spyglass
|
yaml
|
## Code Before:
language: android
android:
components:
- android-22
- build-tools-22.0.1
licenses:
- 'android-sdk-license.*'
sudo: false
script: 'travis_retry ./gradlew clean build'
## Instruction:
Add Android repos to Travis-CI configuration
## Code After:
language: android
android:
components:
- android-22
- build-tools-22.0.1
- extra-android-m2repository
- extra-android-support
licenses:
- 'android-sdk-license.*'
sudo: false
script: 'travis_retry ./gradlew clean build'
|
44266f29f4f84fa239c6c79887b58a2d07c78afd
|
api/README.md
|
api/README.md
|
API contains the controllers and rabl views implementing the REST API of Solidus.
## Testing
Run the API tests:
```bash
bundle exec rspec
```
## Documentation
The API documentation is in the [openapi](https://github.com/solidusio/solidus/tree/master/api/openapi)
directory. It follows the OpenAPI specification and it is hosted on
[Stoplight](https://solidus.docs.stoplight.io/).
If you want to contribute, you can use [Stoplight Studio](https://stoplight.io/p/studio),
an OpenAPI editor, to edit the files visually, and copy-paste the
resulting code into the `openapi` directory.
CircleCI automatically syncs our Git repo with Stoplight when a PR is
merged, and automatically publishes a new version on Stoplight when
a new Solidus version is released.
|
API contains the controllers and rabl views implementing the REST API of Solidus.
## Testing
Run the API tests:
```bash
bundle exec rspec
```
## Documentation
The API documentation is in the [openapi][docs-dir] directory. It follows the
OpenAPI specification and it is hosted on [Stoplight Docs][live-docs].
If you want to contribute, you can use [Stoplight Studio][studio]. Simply
follow these steps:
1. Create a new Stoplight Studio project
2. Copy-paste the content of `openapi/api.oas2.yml` into your project
3. Edit the endpoints and models as needed
4. Copy-paste the result back into `openapi/api.oas2.yml`
5. Open a PR!
**Note: Only use embedded models in Stoplight Studio, as Stoplight Docs is
not compatible with externally-defined models!**
CircleCI automatically syncs our Git repo with Stoplight Docs when a PR is
merged, and automatically publishes a new version on Docs when a new Solidus
version is released.
[docs-dir]: https://github.com/solidusio/solidus/tree/master/api/openapi
[live-docs]: https://solidus.docs.stoplight.io
[studio]: https://stoplight.io/p/studio
|
Document contribution guidelines for API docs
|
Document contribution guidelines for API docs
Because Stoplight Docs is not directly compatible with the output
from Stoplight Studio, there are some intermediate steps and
gotchas users need to be aware of when contributing to our API.
|
Markdown
|
bsd-3-clause
|
pervino/solidus,pervino/solidus,pervino/solidus,pervino/solidus
|
markdown
|
## Code Before:
API contains the controllers and rabl views implementing the REST API of Solidus.
## Testing
Run the API tests:
```bash
bundle exec rspec
```
## Documentation
The API documentation is in the [openapi](https://github.com/solidusio/solidus/tree/master/api/openapi)
directory. It follows the OpenAPI specification and it is hosted on
[Stoplight](https://solidus.docs.stoplight.io/).
If you want to contribute, you can use [Stoplight Studio](https://stoplight.io/p/studio),
an OpenAPI editor, to edit the files visually, and copy-paste the
resulting code into the `openapi` directory.
CircleCI automatically syncs our Git repo with Stoplight when a PR is
merged, and automatically publishes a new version on Stoplight when
a new Solidus version is released.
## Instruction:
Document contribution guidelines for API docs
Because Stoplight Docs is not directly compatible with the output
from Stoplight Studio, there are some intermediate steps and
gotchas users need to be aware of when contributing to our API.
## Code After:
API contains the controllers and rabl views implementing the REST API of Solidus.
## Testing
Run the API tests:
```bash
bundle exec rspec
```
## Documentation
The API documentation is in the [openapi][docs-dir] directory. It follows the
OpenAPI specification and it is hosted on [Stoplight Docs][live-docs].
If you want to contribute, you can use [Stoplight Studio][studio]. Simply
follow these steps:
1. Create a new Stoplight Studio project
2. Copy-paste the content of `openapi/api.oas2.yml` into your project
3. Edit the endpoints and models as needed
4. Copy-paste the result back into `openapi/api.oas2.yml`
5. Open a PR!
**Note: Only use embedded models in Stoplight Studio, as Stoplight Docs is
not compatible with externally-defined models!**
CircleCI automatically syncs our Git repo with Stoplight Docs when a PR is
merged, and automatically publishes a new version on Docs when a new Solidus
version is released.
[docs-dir]: https://github.com/solidusio/solidus/tree/master/api/openapi
[live-docs]: https://solidus.docs.stoplight.io
[studio]: https://stoplight.io/p/studio
|
f0f5e8c8be1f0ccc538340886f765da266ec4590
|
RELEASE-NOTES.rst
|
RELEASE-NOTES.rst
|
=============
Release Notes
=============
Production
==========
The *production* configuration is contained in the *alaska-prod* branch. The
following tagged releases of this configuration have been made:
1.1.0
-----
Tag: ``alaska-prod-1.1.0``
Based on the kayobe 1.1.0 release.
* Adds configuration of fluentd monasca output plugin for forwarding logs to
monasca's log API.
* Adds a static route from the ilab network to the storage network for
accessing the Ceph cluster.
1.0.0
-----
Tag: ``alaska-prod-1.0.0``
Initial release. Based on the kayobe 1.0.0 release.
Alt-1 (Development)
===================
The *alt-1* configuration is contained in the *alaska-alt-1* branch. The
following tagged releases of this configuration have been made:
1.1.0
-----
Tag: ``alaska-alt-1-1.1.0``
Based on the kayobe 1.1.0 release.
* Adds a static route from the ilab network to the storage network for
accessing the Ceph cluster.
1.0.0
-----
Tag: ``alaska-alt-1-1.0.0``
Initial release. Based on the kayobe 1.0.0 release.
|
=============
Release Notes
=============
Production
==========
The *production* configuration is contained in the *alaska-prod* branch. The
following tagged releases of this configuration have been made:
2.0.0
-----
Tag: ``alaska-prod-2.0.0``
Based on the kayobe 2.0.0 release (Ocata).
* Enables the OpenStack murano application lifecycle management service.
* See kayobe 2.0.0 release notes for full feature list.
1.1.0
-----
Tag: ``alaska-prod-1.1.0``
Based on the kayobe 1.1.0 release (Ocata).
* Adds configuration of fluentd monasca output plugin for forwarding logs to
monasca's log API.
* Adds a static route from the ilab network to the storage network for
accessing the Ceph cluster.
1.0.0
-----
Tag: ``alaska-prod-1.0.0``
Initial release. Based on the kayobe 1.0.0 release (Ocata).
Alt-1 (Development)
===================
The *alt-1* configuration is contained in the *alaska-alt-1* branch. The
following tagged releases of this configuration have been made:
1.1.0
-----
Tag: ``alaska-alt-1-1.1.0``
Based on the kayobe 1.1.0 release (Ocata).
* Adds a static route from the ilab network to the storage network for
accessing the Ceph cluster.
1.0.0
-----
Tag: ``alaska-alt-1-1.0.0``
Initial release. Based on the kayobe 1.0.0 release (Ocata).
|
Update release notes for alaska-kayobe-config 2.0.0 releases
|
Update release notes for alaska-kayobe-config 2.0.0 releases
|
reStructuredText
|
apache-2.0
|
SKA-ScienceDataProcessor/alaska-kayobe-config
|
restructuredtext
|
## Code Before:
=============
Release Notes
=============
Production
==========
The *production* configuration is contained in the *alaska-prod* branch. The
following tagged releases of this configuration have been made:
1.1.0
-----
Tag: ``alaska-prod-1.1.0``
Based on the kayobe 1.1.0 release.
* Adds configuration of fluentd monasca output plugin for forwarding logs to
monasca's log API.
* Adds a static route from the ilab network to the storage network for
accessing the Ceph cluster.
1.0.0
-----
Tag: ``alaska-prod-1.0.0``
Initial release. Based on the kayobe 1.0.0 release.
Alt-1 (Development)
===================
The *alt-1* configuration is contained in the *alaska-alt-1* branch. The
following tagged releases of this configuration have been made:
1.1.0
-----
Tag: ``alaska-alt-1-1.1.0``
Based on the kayobe 1.1.0 release.
* Adds a static route from the ilab network to the storage network for
accessing the Ceph cluster.
1.0.0
-----
Tag: ``alaska-alt-1-1.0.0``
Initial release. Based on the kayobe 1.0.0 release.
## Instruction:
Update release notes for alaska-kayobe-config 2.0.0 releases
## Code After:
=============
Release Notes
=============
Production
==========
The *production* configuration is contained in the *alaska-prod* branch. The
following tagged releases of this configuration have been made:
2.0.0
-----
Tag: ``alaska-prod-2.0.0``
Based on the kayobe 2.0.0 release (Ocata).
* Enables the OpenStack murano application lifecycle management service.
* See kayobe 2.0.0 release notes for full feature list.
1.1.0
-----
Tag: ``alaska-prod-1.1.0``
Based on the kayobe 1.1.0 release (Ocata).
* Adds configuration of fluentd monasca output plugin for forwarding logs to
monasca's log API.
* Adds a static route from the ilab network to the storage network for
accessing the Ceph cluster.
1.0.0
-----
Tag: ``alaska-prod-1.0.0``
Initial release. Based on the kayobe 1.0.0 release (Ocata).
Alt-1 (Development)
===================
The *alt-1* configuration is contained in the *alaska-alt-1* branch. The
following tagged releases of this configuration have been made:
1.1.0
-----
Tag: ``alaska-alt-1-1.1.0``
Based on the kayobe 1.1.0 release (Ocata).
* Adds a static route from the ilab network to the storage network for
accessing the Ceph cluster.
1.0.0
-----
Tag: ``alaska-alt-1-1.0.0``
Initial release. Based on the kayobe 1.0.0 release (Ocata).
|
f8eb6683db888e1bf1f2f45d8d7bd10075720349
|
bin/lazy_quotes.pl
|
bin/lazy_quotes.pl
|
use utf8;
use open ':std', ':encoding(UTF-8)';
# Convert lazy double quote entry styles to something sane
while (<>) {
s#(?<!\s)['‘’`]{3}#'"#g;
s#['‘’`]{3}(?!\s)#"'#g;
s#['‘’`]{2}#"#g;
print;
}
|
use utf8;
use open ':std', ':encoding(UTF-8)';
# Convert lazy double quote entry styles to something sane
while (<>) {
s#(?<!\s)['‘’`]{3}#'"#g;
s#['‘’`]{3}(?!\s)#"'#g;
s#['‘’`]{2}#"#g;
s#\\`#'#g;
print;
}
|
Add fix for one more bogus quote type
|
Add fix for one more bogus quote type
|
Perl
|
agpl-3.0
|
alerque/casile,alerque/casile,alerque/casile,alerque/casile,alerque/casile
|
perl
|
## Code Before:
use utf8;
use open ':std', ':encoding(UTF-8)';
# Convert lazy double quote entry styles to something sane
while (<>) {
s#(?<!\s)['‘’`]{3}#'"#g;
s#['‘’`]{3}(?!\s)#"'#g;
s#['‘’`]{2}#"#g;
print;
}
## Instruction:
Add fix for one more bogus quote type
## Code After:
use utf8;
use open ':std', ':encoding(UTF-8)';
# Convert lazy double quote entry styles to something sane
while (<>) {
s#(?<!\s)['‘’`]{3}#'"#g;
s#['‘’`]{3}(?!\s)#"'#g;
s#['‘’`]{2}#"#g;
s#\\`#'#g;
print;
}
|
303d256bd6615bfef7d26a1b5dadf474dbbb26af
|
cortex/main.py
|
cortex/main.py
|
'''Main file for running experiments.
'''
import logging
from cortex._lib import (config, data, exp, optimizer, setup_cortex,
setup_experiment, train)
from cortex._lib.utils import print_section
import torch
__author__ = 'R Devon Hjelm'
__author_email__ = '[email protected]'
logger = logging.getLogger('cortex')
viz_process = None
def run(model=None):
'''Main function.
'''
# Parse the command-line arguments
try:
args = setup_cortex(model=model)
if args.command == 'setup':
# Performs setup only.
config.setup()
exit(0)
else:
config.set_config()
print_section('EXPERIMENT')
model = setup_experiment(args, model=model)
print_section('DATA')
data.setup(**exp.ARGS['data'])
print_section('NETWORKS')
if args.reload and not args.load_models:
pass
else:
model.build()
if args.load_models:
d = torch.load(args.load_models, map_location='cpu')
for k in args.reloads:
model.nets[k].load_state_dict(d['nets'][k].state_dict())
print_section('OPTIMIZER')
optimizer.setup(model, **exp.ARGS['optimizer'])
except KeyboardInterrupt:
print('Cancelled')
exit(0)
train.main_loop(model, **exp.ARGS['train'])
|
'''Main file for running experiments.
'''
import logging
from cortex._lib import (config, data, exp, optimizer, setup_cortex,
setup_experiment, train)
from cortex._lib.utils import print_section
import torch
__author__ = 'R Devon Hjelm'
__author_email__ = '[email protected]'
logger = logging.getLogger('cortex')
viz_process = None
def run(model=None):
'''Main function.
'''
# Parse the command-line arguments
try:
args = setup_cortex(model=model)
if args.command == 'setup':
# Performs setup only.
config.setup()
exit(0)
else:
config.set_config()
print_section('EXPERIMENT')
model = setup_experiment(args, model=model)
print_section('DATA')
data.setup(**exp.ARGS['data'])
print_section('NETWORKS')
if args.reload and not args.load_models:
pass
else:
model.build()
if args.load_models:
d = torch.load(args.load_models, map_location='cpu')
for k in args.reloads:
model.nets[k].load_state_dict(d['nets'][k].state_dict())
print_section('OPTIMIZER')
optimizer.setup(model, **exp.ARGS['optimizer'])
except KeyboardInterrupt:
print('Cancelled')
exit(0)
train.main_loop(model, **exp.ARGS['train'])
viz_process.terminate()
|
Terminate viz process at end of experiment.
|
Terminate viz process at end of experiment.
|
Python
|
bsd-3-clause
|
rdevon/cortex,rdevon/cortex
|
python
|
## Code Before:
'''Main file for running experiments.
'''
import logging
from cortex._lib import (config, data, exp, optimizer, setup_cortex,
setup_experiment, train)
from cortex._lib.utils import print_section
import torch
__author__ = 'R Devon Hjelm'
__author_email__ = '[email protected]'
logger = logging.getLogger('cortex')
viz_process = None
def run(model=None):
'''Main function.
'''
# Parse the command-line arguments
try:
args = setup_cortex(model=model)
if args.command == 'setup':
# Performs setup only.
config.setup()
exit(0)
else:
config.set_config()
print_section('EXPERIMENT')
model = setup_experiment(args, model=model)
print_section('DATA')
data.setup(**exp.ARGS['data'])
print_section('NETWORKS')
if args.reload and not args.load_models:
pass
else:
model.build()
if args.load_models:
d = torch.load(args.load_models, map_location='cpu')
for k in args.reloads:
model.nets[k].load_state_dict(d['nets'][k].state_dict())
print_section('OPTIMIZER')
optimizer.setup(model, **exp.ARGS['optimizer'])
except KeyboardInterrupt:
print('Cancelled')
exit(0)
train.main_loop(model, **exp.ARGS['train'])
## Instruction:
Terminate viz process at end of experiment.
## Code After:
'''Main file for running experiments.
'''
import logging
from cortex._lib import (config, data, exp, optimizer, setup_cortex,
setup_experiment, train)
from cortex._lib.utils import print_section
import torch
__author__ = 'R Devon Hjelm'
__author_email__ = '[email protected]'
logger = logging.getLogger('cortex')
viz_process = None
def run(model=None):
'''Main function.
'''
# Parse the command-line arguments
try:
args = setup_cortex(model=model)
if args.command == 'setup':
# Performs setup only.
config.setup()
exit(0)
else:
config.set_config()
print_section('EXPERIMENT')
model = setup_experiment(args, model=model)
print_section('DATA')
data.setup(**exp.ARGS['data'])
print_section('NETWORKS')
if args.reload and not args.load_models:
pass
else:
model.build()
if args.load_models:
d = torch.load(args.load_models, map_location='cpu')
for k in args.reloads:
model.nets[k].load_state_dict(d['nets'][k].state_dict())
print_section('OPTIMIZER')
optimizer.setup(model, **exp.ARGS['optimizer'])
except KeyboardInterrupt:
print('Cancelled')
exit(0)
train.main_loop(model, **exp.ARGS['train'])
viz_process.terminate()
|
8cd11782d4b3558d204f438accdc15b3b702839b
|
unn/cli.py
|
unn/cli.py
|
import sys
commands = {}
args = []
kwargs = {}
def EXIT(msg, code=1):
print(msg)
sys.exit(code)
def command(fn):
commands[fn.__name__] = fn
return fn
def run():
if len(sys.argv) < 2:
EXIT('No command provided')
cmd = sys.argv[1]
if cmd not in commands:
EXIT('Unkown command')
args = [x for x in sys.argv[2:] if '=' not in x]
kwargs = dict([x.split('=') for x in sys.argv[2:] if '=' in x])
kwargs = dict([(k.replace('-', ''),v) for k,v in kwargs.items()])
commands[cmd](*args, **kwargs)
|
import sys
commands = {}
args = []
kwargs = {}
def EXIT(msg, code=1):
print(msg)
sys.exit(code)
def command(fn):
commands[fn.__name__] = fn
return fn
def run():
if len(sys.argv) < 2:
EXIT('Valid commands are:\n ' + '\n '.join(commands))
cmd = sys.argv[1]
if cmd not in commands:
EXIT('Unkown command')
args = [x for x in sys.argv[2:] if '=' not in x]
kwargs = dict([x.split('=') for x in sys.argv[2:] if '=' in x])
kwargs = dict([(k.replace('-', ''),v) for k,v in kwargs.items()])
commands[cmd](*args, **kwargs)
|
Add a helpful message if no command given
|
Add a helpful message if no command given
|
Python
|
mit
|
runningskull/unn
|
python
|
## Code Before:
import sys
commands = {}
args = []
kwargs = {}
def EXIT(msg, code=1):
print(msg)
sys.exit(code)
def command(fn):
commands[fn.__name__] = fn
return fn
def run():
if len(sys.argv) < 2:
EXIT('No command provided')
cmd = sys.argv[1]
if cmd not in commands:
EXIT('Unkown command')
args = [x for x in sys.argv[2:] if '=' not in x]
kwargs = dict([x.split('=') for x in sys.argv[2:] if '=' in x])
kwargs = dict([(k.replace('-', ''),v) for k,v in kwargs.items()])
commands[cmd](*args, **kwargs)
## Instruction:
Add a helpful message if no command given
## Code After:
import sys
commands = {}
args = []
kwargs = {}
def EXIT(msg, code=1):
print(msg)
sys.exit(code)
def command(fn):
commands[fn.__name__] = fn
return fn
def run():
if len(sys.argv) < 2:
EXIT('Valid commands are:\n ' + '\n '.join(commands))
cmd = sys.argv[1]
if cmd not in commands:
EXIT('Unkown command')
args = [x for x in sys.argv[2:] if '=' not in x]
kwargs = dict([x.split('=') for x in sys.argv[2:] if '=' in x])
kwargs = dict([(k.replace('-', ''),v) for k,v in kwargs.items()])
commands[cmd](*args, **kwargs)
|
397ffa609c9d9da521fc2532ee3886cab6ba876c
|
app/templates/files/component.mustache.html
|
app/templates/files/component.mustache.html
|
<!--
Since AngularJS use the same bracket notation as mustache we ask mustache
nicely to use square brackets instead.
-->
{{=[[ ]]=}}
<!--
Try to have one enclosing tag around your component and add the components
name as a class to it.
-->
<div <%= componentName %>></div>
|
<!--
Since AngularJS use the same bracket notation as mustache we ask mustache
nicely to use square brackets instead.
-->
{{=[[ ]]=}}
<!--
Try to have one enclosing tag around your component and add the components
name as a class to it.
-->
<div <%= componentName %> class="<% componentName %>"></div>
|
Add the components "wrapper" class automatically
|
Add the components "wrapper" class automatically
|
HTML
|
mit
|
kornosaurus/generator-diversity-component,kornosaurus/generator-diversity-component
|
html
|
## Code Before:
<!--
Since AngularJS use the same bracket notation as mustache we ask mustache
nicely to use square brackets instead.
-->
{{=[[ ]]=}}
<!--
Try to have one enclosing tag around your component and add the components
name as a class to it.
-->
<div <%= componentName %>></div>
## Instruction:
Add the components "wrapper" class automatically
## Code After:
<!--
Since AngularJS use the same bracket notation as mustache we ask mustache
nicely to use square brackets instead.
-->
{{=[[ ]]=}}
<!--
Try to have one enclosing tag around your component and add the components
name as a class to it.
-->
<div <%= componentName %> class="<% componentName %>"></div>
|
48c3aff478b6277d58305f92b5d3825fcf25ada1
|
src/console/twentyfortyeight.cpp
|
src/console/twentyfortyeight.cpp
|
int main() {
std::cout << "hello world" << std::endl;
return 0;
}
|
//#include <stdio.h>
enum MoveDirection { LEFT, UP, RIGHT, DOWN };
class ConsoleMoveDirectionReader {
public:
MoveDirection next() {
while (true) {
int keyCode = std::cin.get();
if (keyCode == 97) {
return MoveDirection::LEFT;
break;
}
else if (keyCode == 115) {
return MoveDirection::DOWN;
break;
}
else if (keyCode == 100) {
return MoveDirection::RIGHT;
break;
}
else if (keyCode == 119) {
return MoveDirection::UP;
break;
}
}
}
};
int main() {
std::cout << "hello world" << std::endl;
ConsoleMoveDirectionReader directionReader;
MoveDirection direction = directionReader.next();
std::cout << (int)direction << std::endl;
return 0;
}
|
Move direction reader from console.
|
Move direction reader from console.
|
C++
|
mit
|
joshjcarrier/2048-cpp
|
c++
|
## Code Before:
int main() {
std::cout << "hello world" << std::endl;
return 0;
}
## Instruction:
Move direction reader from console.
## Code After:
//#include <stdio.h>
enum MoveDirection { LEFT, UP, RIGHT, DOWN };
class ConsoleMoveDirectionReader {
public:
MoveDirection next() {
while (true) {
int keyCode = std::cin.get();
if (keyCode == 97) {
return MoveDirection::LEFT;
break;
}
else if (keyCode == 115) {
return MoveDirection::DOWN;
break;
}
else if (keyCode == 100) {
return MoveDirection::RIGHT;
break;
}
else if (keyCode == 119) {
return MoveDirection::UP;
break;
}
}
}
};
int main() {
std::cout << "hello world" << std::endl;
ConsoleMoveDirectionReader directionReader;
MoveDirection direction = directionReader.next();
std::cout << (int)direction << std::endl;
return 0;
}
|
05e68b98a7d38efb95cca834e95ae35387bba730
|
static/js/entity.js
|
static/js/entity.js
|
(function () {
var $specContainer = $('#spec-container');
if (!isMobile) {
$('.section-tabs a').click(function () {
var href = $(this).attr('href');
gotoTab(href);
return false;
});
$specContainer.scrollspy({
'data-spy': 'scroll',
'data-target': '.section-tabs',
'offset': 100
})
.on('activate.changehash', function () {
var target = $('.section-tabs li.active a').attr('href');
selectTab(target);
});
function gotoTab(target) {
selectTab(target);
var $elem = $(target);
$specContainer.animate({
scrollTop: $specContainer.scrollTop() + $elem.position().top
}, 300);
}
function selectTab(target) {
var $list = $('.section-tabs li');
$list.removeClass('active');
$list.children('[href="'+target+'"]').parent('li').addClass('active');
if (location.hash != target) {
if (history.pushState) {
history.pushState({}, null, target);
} else {
location.hash = target;
}
}
}
}
}());
|
(function () {
var $body = $('body');
if (!isMobile) {
$('.section-tabs a').click(function () {
var href = $(this).attr('href');
gotoTab(href);
return false;
});
$body.scrollspy({
'data-spy': 'scroll',
'data-target': '.section-tabs',
'offset': 100
})
.on('activate.changehash', function () {
var target = $('.section-tabs li.active a').attr('href');
selectTab(target);
});
function gotoTab(target) {
selectTab(target);
var $elem = $(target);
$body.animate({
scrollTop: $elem.offset().top
}, 300);
}
function selectTab(target) {
var $list = $('.section-tabs li');
$list.removeClass('active');
$list.children('[href="'+target+'"]').parent('li').addClass('active');
if (location.hash != target) {
if (history.pushState) {
history.pushState({}, null, target);
} else {
location.hash = target;
}
}
}
}
}());
|
Fix scrollspy bug occured by layout changes
|
Fix scrollspy bug occured by layout changes
|
JavaScript
|
apache-2.0
|
teampopong/pokr.kr,teampopong/pokr.kr,teampopong/pokr.kr,teampopong/pokr.kr
|
javascript
|
## Code Before:
(function () {
var $specContainer = $('#spec-container');
if (!isMobile) {
$('.section-tabs a').click(function () {
var href = $(this).attr('href');
gotoTab(href);
return false;
});
$specContainer.scrollspy({
'data-spy': 'scroll',
'data-target': '.section-tabs',
'offset': 100
})
.on('activate.changehash', function () {
var target = $('.section-tabs li.active a').attr('href');
selectTab(target);
});
function gotoTab(target) {
selectTab(target);
var $elem = $(target);
$specContainer.animate({
scrollTop: $specContainer.scrollTop() + $elem.position().top
}, 300);
}
function selectTab(target) {
var $list = $('.section-tabs li');
$list.removeClass('active');
$list.children('[href="'+target+'"]').parent('li').addClass('active');
if (location.hash != target) {
if (history.pushState) {
history.pushState({}, null, target);
} else {
location.hash = target;
}
}
}
}
}());
## Instruction:
Fix scrollspy bug occured by layout changes
## Code After:
(function () {
var $body = $('body');
if (!isMobile) {
$('.section-tabs a').click(function () {
var href = $(this).attr('href');
gotoTab(href);
return false;
});
$body.scrollspy({
'data-spy': 'scroll',
'data-target': '.section-tabs',
'offset': 100
})
.on('activate.changehash', function () {
var target = $('.section-tabs li.active a').attr('href');
selectTab(target);
});
function gotoTab(target) {
selectTab(target);
var $elem = $(target);
$body.animate({
scrollTop: $elem.offset().top
}, 300);
}
function selectTab(target) {
var $list = $('.section-tabs li');
$list.removeClass('active');
$list.children('[href="'+target+'"]').parent('li').addClass('active');
if (location.hash != target) {
if (history.pushState) {
history.pushState({}, null, target);
} else {
location.hash = target;
}
}
}
}
}());
|
b1d73df29598cb3df43425377a395c1e4cafc1db
|
src/canard.c
|
src/canard.c
|
void canardInitPoolAllocator(CanardPoolAllocator* allocator, CanardPoolAllocatorBlock* buf, unsigned int buf_len)
{
unsigned int current_index = 0;
CanardPoolAllocatorBlock** current_block = &(allocator->free_list);
while (current_index < buf_len)
{
*current_block = &buf[current_index];
current_block = &((*current_block)->next);
current_index++;
}
*current_block = NULL;
}
void* canardAllocateBlock(CanardPoolAllocator* allocator)
{
/* Check if there are any blocks available in the free list. */
if (allocator->free_list == NULL)
{
return NULL;
}
/* Take first available block and prepares next block for use. */
void* result = allocator->free_list;
allocator->free_list = allocator->free_list->next;
return result;
}
void canardFreeBlock(CanardPoolAllocator* allocator, void* p)
{
CanardPoolAllocatorBlock* block = (CanardPoolAllocatorBlock*)p;
block->next = allocator->free_list;
allocator->free_list = block;
}
|
CANARD_INTERNAL void canardInitPoolAllocator(CanardPoolAllocator* allocator, CanardPoolAllocatorBlock* buf, unsigned int buf_len)
{
unsigned int current_index = 0;
CanardPoolAllocatorBlock** current_block = &(allocator->free_list);
while (current_index < buf_len)
{
*current_block = &buf[current_index];
current_block = &((*current_block)->next);
current_index++;
}
*current_block = NULL;
}
CANARD_INTERNAL void* canardAllocateBlock(CanardPoolAllocator* allocator)
{
/* Check if there are any blocks available in the free list. */
if (allocator->free_list == NULL)
{
return NULL;
}
/* Take first available block and prepares next block for use. */
void* result = allocator->free_list;
allocator->free_list = allocator->free_list->next;
return result;
}
CANARD_INTERNAL void canardFreeBlock(CanardPoolAllocator* allocator, void* p)
{
CanardPoolAllocatorBlock* block = (CanardPoolAllocatorBlock*)p;
block->next = allocator->free_list;
allocator->free_list = block;
}
|
Apply internal API marker to declarations too
|
Apply internal API marker to declarations too
|
C
|
mit
|
UAVCAN/libcanard,UAVCAN/libcanard,antoinealb/libcanard,UAVCAN/libcanard,antoinealb/libcanard
|
c
|
## Code Before:
void canardInitPoolAllocator(CanardPoolAllocator* allocator, CanardPoolAllocatorBlock* buf, unsigned int buf_len)
{
unsigned int current_index = 0;
CanardPoolAllocatorBlock** current_block = &(allocator->free_list);
while (current_index < buf_len)
{
*current_block = &buf[current_index];
current_block = &((*current_block)->next);
current_index++;
}
*current_block = NULL;
}
void* canardAllocateBlock(CanardPoolAllocator* allocator)
{
/* Check if there are any blocks available in the free list. */
if (allocator->free_list == NULL)
{
return NULL;
}
/* Take first available block and prepares next block for use. */
void* result = allocator->free_list;
allocator->free_list = allocator->free_list->next;
return result;
}
void canardFreeBlock(CanardPoolAllocator* allocator, void* p)
{
CanardPoolAllocatorBlock* block = (CanardPoolAllocatorBlock*)p;
block->next = allocator->free_list;
allocator->free_list = block;
}
## Instruction:
Apply internal API marker to declarations too
## Code After:
CANARD_INTERNAL void canardInitPoolAllocator(CanardPoolAllocator* allocator, CanardPoolAllocatorBlock* buf, unsigned int buf_len)
{
unsigned int current_index = 0;
CanardPoolAllocatorBlock** current_block = &(allocator->free_list);
while (current_index < buf_len)
{
*current_block = &buf[current_index];
current_block = &((*current_block)->next);
current_index++;
}
*current_block = NULL;
}
CANARD_INTERNAL void* canardAllocateBlock(CanardPoolAllocator* allocator)
{
/* Check if there are any blocks available in the free list. */
if (allocator->free_list == NULL)
{
return NULL;
}
/* Take first available block and prepares next block for use. */
void* result = allocator->free_list;
allocator->free_list = allocator->free_list->next;
return result;
}
CANARD_INTERNAL void canardFreeBlock(CanardPoolAllocator* allocator, void* p)
{
CanardPoolAllocatorBlock* block = (CanardPoolAllocatorBlock*)p;
block->next = allocator->free_list;
allocator->free_list = block;
}
|
c8e6e3a0b871bbfcd7e6db1b18ceac56a540d7b8
|
index.js
|
index.js
|
var WindowUtils = require("sdk/window/utils");
var windows = require("sdk/windows").browserWindows;
windows.on("open", function() {
var window = WindowUtils.getMostRecentBrowserWindow()
if (WindowUtils.isBrowser(window) && !window.fullScreen) {
window.BrowserFullScreen();
}
});
|
var WindowUtils = require("sdk/window/utils");
var windows = require("sdk/windows").browserWindows;
var { viewFor } = require("sdk/view/core");
windows.on("open", function(browserWindow) {
var chromeWindow = viewFor(browserWindow);
if (WindowUtils.isBrowser(chromeWindow) && !chromeWindow.fullScreen) {
chromeWindow.BrowserFullScreen();
}
});
|
Convert the BrowserWindow object to a chrome window object instead of using getMostRecentBrowserWindow
|
Convert the BrowserWindow object to a chrome window object instead of using getMostRecentBrowserWindow
|
JavaScript
|
mpl-2.0
|
Phoenix09/firefox-startfullscreen
|
javascript
|
## Code Before:
var WindowUtils = require("sdk/window/utils");
var windows = require("sdk/windows").browserWindows;
windows.on("open", function() {
var window = WindowUtils.getMostRecentBrowserWindow()
if (WindowUtils.isBrowser(window) && !window.fullScreen) {
window.BrowserFullScreen();
}
});
## Instruction:
Convert the BrowserWindow object to a chrome window object instead of using getMostRecentBrowserWindow
## Code After:
var WindowUtils = require("sdk/window/utils");
var windows = require("sdk/windows").browserWindows;
var { viewFor } = require("sdk/view/core");
windows.on("open", function(browserWindow) {
var chromeWindow = viewFor(browserWindow);
if (WindowUtils.isBrowser(chromeWindow) && !chromeWindow.fullScreen) {
chromeWindow.BrowserFullScreen();
}
});
|
626a301449dc771d4445a673b9f4dfe93228e9a8
|
app/assets/stylesheets/reports_kit/reports.css.sass
|
app/assets/stylesheets/reports_kit/reports.css.sass
|
.reports_kit_report
width: 100%
padding: 1px
canvas
width: 100%
.date_range_picker
width: 180px
|
.reports_kit_report
width: 100%
padding: 1px
canvas
width: 100%
max-height: 500px
.date_range_picker
width: 180px
|
Fix issue with canvas height with `maintainAspectRatio: false`
|
Fix issue with canvas height with `maintainAspectRatio: false`
|
Sass
|
mit
|
tombenner/reports_kit,tombenner/reports_kit
|
sass
|
## Code Before:
.reports_kit_report
width: 100%
padding: 1px
canvas
width: 100%
.date_range_picker
width: 180px
## Instruction:
Fix issue with canvas height with `maintainAspectRatio: false`
## Code After:
.reports_kit_report
width: 100%
padding: 1px
canvas
width: 100%
max-height: 500px
.date_range_picker
width: 180px
|
e5d037c01ac0b9be2bec4b8921f93ea273b49327
|
.github/PULL_REQUEST_TEMPLATE.md
|
.github/PULL_REQUEST_TEMPLATE.md
|
<!--- See what makes a good Pull Request at : https://github.com/terraform-providers/terraform-provider-aws/blob/master/.github/CONTRIBUTING.md#pull-requests --->
<!--- Please keep this note for the community --->
### Community Note
* Please vote on this pull request by adding a 👍 [reaction](https://blog.github.com/2016-03-10-add-reactions-to-pull-requests-issues-and-comments/) to the original pull request comment to help the community and maintainers prioritize this request
* Please do not leave "+1" comments, they generate extra noise for pull request followers and do not help prioritize the request
<!--- Thank you for keeping this note for the community --->
Fixes #0000
Release note for [CHANGELOG](https://github.com/terraform-providers/terraform-provider-aws/blob/master/CHANGELOG.md):
<!--
If change is not user facing, just write "NONE" in the release-note block below.
-->
```release-note
```
Output from acceptance testing:
```
$ make testacc TESTARGS='-run=TestAccXXX'
...
```
|
<!--- See what makes a good Pull Request at : https://github.com/terraform-providers/terraform-provider-aws/blob/master/.github/CONTRIBUTING.md#pull-requests --->
<!--- Please keep this note for the community --->
### Community Note
* Please vote on this pull request by adding a 👍 [reaction](https://blog.github.com/2016-03-10-add-reactions-to-pull-requests-issues-and-comments/) to the original pull request comment to help the community and maintainers prioritize this request
* Please do not leave "+1" comments, they generate extra noise for pull request followers and do not help prioritize the request
<!--- Thank you for keeping this note for the community --->
<!--- If your PR fully resolves and should automatically close the linked issue, use Closes. Otherwise, use Relates --->
Closes|Relates<!-- Pick only one --> #0000
Release note for [CHANGELOG](https://github.com/terraform-providers/terraform-provider-aws/blob/master/CHANGELOG.md):
<!--
If change is not user facing, just write "NONE" in the release-note block below.
-->
```release-note
```
Output from acceptance testing:
```
$ make testacc TESTARGS='-run=TestAccXXX'
...
```
|
Change the wording for related issues
|
Change the wording for related issues
To hopefully make it more clear when a related issue will be automatically closed versus left open.
|
Markdown
|
mpl-2.0
|
terraform-providers/terraform-provider-aws,kjmkznr/terraform-provider-aws,terraform-providers/terraform-provider-aws,kjmkznr/terraform-provider-aws,terraform-providers/terraform-provider-aws,kjmkznr/terraform-provider-aws,terraform-providers/terraform-provider-aws,kjmkznr/terraform-provider-aws
|
markdown
|
## Code Before:
<!--- See what makes a good Pull Request at : https://github.com/terraform-providers/terraform-provider-aws/blob/master/.github/CONTRIBUTING.md#pull-requests --->
<!--- Please keep this note for the community --->
### Community Note
* Please vote on this pull request by adding a 👍 [reaction](https://blog.github.com/2016-03-10-add-reactions-to-pull-requests-issues-and-comments/) to the original pull request comment to help the community and maintainers prioritize this request
* Please do not leave "+1" comments, they generate extra noise for pull request followers and do not help prioritize the request
<!--- Thank you for keeping this note for the community --->
Fixes #0000
Release note for [CHANGELOG](https://github.com/terraform-providers/terraform-provider-aws/blob/master/CHANGELOG.md):
<!--
If change is not user facing, just write "NONE" in the release-note block below.
-->
```release-note
```
Output from acceptance testing:
```
$ make testacc TESTARGS='-run=TestAccXXX'
...
```
## Instruction:
Change the wording for related issues
To hopefully make it more clear when a related issue will be automatically closed versus left open.
## Code After:
<!--- See what makes a good Pull Request at : https://github.com/terraform-providers/terraform-provider-aws/blob/master/.github/CONTRIBUTING.md#pull-requests --->
<!--- Please keep this note for the community --->
### Community Note
* Please vote on this pull request by adding a 👍 [reaction](https://blog.github.com/2016-03-10-add-reactions-to-pull-requests-issues-and-comments/) to the original pull request comment to help the community and maintainers prioritize this request
* Please do not leave "+1" comments, they generate extra noise for pull request followers and do not help prioritize the request
<!--- Thank you for keeping this note for the community --->
<!--- If your PR fully resolves and should automatically close the linked issue, use Closes. Otherwise, use Relates --->
Closes|Relates<!-- Pick only one --> #0000
Release note for [CHANGELOG](https://github.com/terraform-providers/terraform-provider-aws/blob/master/CHANGELOG.md):
<!--
If change is not user facing, just write "NONE" in the release-note block below.
-->
```release-note
```
Output from acceptance testing:
```
$ make testacc TESTARGS='-run=TestAccXXX'
...
```
|
6b61b01367ea660f46e54945074a587f820cd1f9
|
index.android.js
|
index.android.js
|
/**
* Sample React Native App
* https://github.com/facebook/react-native
*/
'use strict';
import React, {
AppRegistry,
Component,
StyleSheet
} from 'react-native';
import Router from 'react-native-simple-router';
import ContestListScreen from './ContestListScreen';
class JumuNordost extends Component {
render() {
return (
<Router
firstRoute={firstRoute}
headerStyle={styles.header}
/>
)
}
}
const firstRoute = {
name: 'Contests',
component: ContestListScreen
};
const styles = StyleSheet.create({
container: {
flex: 1
},
header: {
backgroundColor: '#a9a9a9',
},
});
AppRegistry.registerComponent('JumuNordost', () => JumuNordost);
|
/**
* Sample React Native App
* https://github.com/facebook/react-native
*/
'use strict';
import React, {
AppRegistry,
Component,
StyleSheet
} from 'react-native';
import Router from 'react-native-simple-router';
import ContestListScreen from './ContestListScreen';
class JumuNordost extends Component {
render() {
return (
<Router
firstRoute={firstRoute}
headerStyle={styles.header}
handleBackAndroid={true}
/>
)
}
}
const firstRoute = {
name: 'Contests',
component: ContestListScreen
};
const styles = StyleSheet.create({
container: {
flex: 1
},
header: {
backgroundColor: '#a9a9a9',
},
});
AppRegistry.registerComponent('JumuNordost', () => JumuNordost);
|
Add listener for hardware back button
|
Add listener for hardware back button
|
JavaScript
|
mit
|
richeterre/jumu-nordost-react-native,richeterre/jumu-nordost-react-native,richeterre/jumu-nordost-react-native,richeterre/jumu-nordost-react-native
|
javascript
|
## Code Before:
/**
* Sample React Native App
* https://github.com/facebook/react-native
*/
'use strict';
import React, {
AppRegistry,
Component,
StyleSheet
} from 'react-native';
import Router from 'react-native-simple-router';
import ContestListScreen from './ContestListScreen';
class JumuNordost extends Component {
render() {
return (
<Router
firstRoute={firstRoute}
headerStyle={styles.header}
/>
)
}
}
const firstRoute = {
name: 'Contests',
component: ContestListScreen
};
const styles = StyleSheet.create({
container: {
flex: 1
},
header: {
backgroundColor: '#a9a9a9',
},
});
AppRegistry.registerComponent('JumuNordost', () => JumuNordost);
## Instruction:
Add listener for hardware back button
## Code After:
/**
* Sample React Native App
* https://github.com/facebook/react-native
*/
'use strict';
import React, {
AppRegistry,
Component,
StyleSheet
} from 'react-native';
import Router from 'react-native-simple-router';
import ContestListScreen from './ContestListScreen';
class JumuNordost extends Component {
render() {
return (
<Router
firstRoute={firstRoute}
headerStyle={styles.header}
handleBackAndroid={true}
/>
)
}
}
const firstRoute = {
name: 'Contests',
component: ContestListScreen
};
const styles = StyleSheet.create({
container: {
flex: 1
},
header: {
backgroundColor: '#a9a9a9',
},
});
AppRegistry.registerComponent('JumuNordost', () => JumuNordost);
|
48c49a3b8076e6f3543aa1596f0f771bef8921e3
|
resources/port_options.rb
|
resources/port_options.rb
|
actions :create
default_action :create
attribute :name, kind_of: String, name_attribute: true
attribute :source, kind_of: String
attribute :options, kind_of: Hash
attribute :dir_path, kind_of: String
attribute :full_path, kind_of: String
attribute :default_options, kind_of: Hash, default: {}
attribute :current_options, kind_of: Hash, default: {}
attribute :file_writer, kind_of: String
def initialize(*args)
super
@dir_path = '/var/db/ports/' + name
@full_path = @dir_path + '/options'
end
|
actions :create
default_action :create
attribute :name, kind_of: String, name_attribute: true
attribute :source, kind_of: String
attribute :options, kind_of: Hash
attribute :dir_path, kind_of: String, default: lazy { |r| '/var/db/ports/' + r.name }
attribute :full_path, kind_of: String, default: lazy { |r| r.dir_path + '/options' }
attribute :default_options, kind_of: Hash, default: {}
attribute :current_options, kind_of: Hash, default: {}
attribute :file_writer, kind_of: String
|
Use lazy for the default values instead of the initializer
|
Use lazy for the default values instead of the initializer
Signed-off-by: Tim Smith <[email protected]>
|
Ruby
|
apache-2.0
|
opscode-cookbooks/freebsd,chef-cookbooks/freebsd
|
ruby
|
## Code Before:
actions :create
default_action :create
attribute :name, kind_of: String, name_attribute: true
attribute :source, kind_of: String
attribute :options, kind_of: Hash
attribute :dir_path, kind_of: String
attribute :full_path, kind_of: String
attribute :default_options, kind_of: Hash, default: {}
attribute :current_options, kind_of: Hash, default: {}
attribute :file_writer, kind_of: String
def initialize(*args)
super
@dir_path = '/var/db/ports/' + name
@full_path = @dir_path + '/options'
end
## Instruction:
Use lazy for the default values instead of the initializer
Signed-off-by: Tim Smith <[email protected]>
## Code After:
actions :create
default_action :create
attribute :name, kind_of: String, name_attribute: true
attribute :source, kind_of: String
attribute :options, kind_of: Hash
attribute :dir_path, kind_of: String, default: lazy { |r| '/var/db/ports/' + r.name }
attribute :full_path, kind_of: String, default: lazy { |r| r.dir_path + '/options' }
attribute :default_options, kind_of: Hash, default: {}
attribute :current_options, kind_of: Hash, default: {}
attribute :file_writer, kind_of: String
|
d3db522ed34d877353ef72a31021b1401e0a117d
|
cea/interfaces/dashboard/plots/static/css/map_layout.css
|
cea/interfaces/dashboard/plots/static/css/map_layout.css
|
.row.display-flex {
display: flex;
flex-wrap: wrap;
}
.row.display-flex > [class*='col-'] {
display: flex;
flex-direction: column;
padding: 0px 5px;
}
@media (min-width: 1200px) {
#map-container {
min-width: 1000px;
}
}
.plot-widget {
min-width: 500px;
}
.plot-widget .x_panel {
height: 100%;
min-height: 500px;
}
#map-container .x_panel{
padding: 15px;
}
#map-container .x_panel .x_content{
height: 100%;
}
#map-div {
height: 100%;
}
|
.row.display-flex {
display: flex;
flex-wrap: wrap;
}
.row.display-flex > [class*='col-'] {
display: flex;
flex-direction: column;
padding: 0px 5px;
flex-grow: 1;
}
@media (min-width: 1200px) {
#map-container {
min-width: 1000px;
}
}
.plot-widget {
min-width: 500px;
}
.plot-widget .x_panel {
height: 100%;
min-height: 500px;
}
#map-container .x_panel{
padding: 15px;
}
#map-container .x_panel .x_content{
height: 100%;
}
#map-div {
height: 100%;
}
|
Allow plot widget to grow
|
Allow plot widget to grow
|
CSS
|
mit
|
architecture-building-systems/CityEnergyAnalyst,architecture-building-systems/CityEnergyAnalyst,architecture-building-systems/CityEnergyAnalyst
|
css
|
## Code Before:
.row.display-flex {
display: flex;
flex-wrap: wrap;
}
.row.display-flex > [class*='col-'] {
display: flex;
flex-direction: column;
padding: 0px 5px;
}
@media (min-width: 1200px) {
#map-container {
min-width: 1000px;
}
}
.plot-widget {
min-width: 500px;
}
.plot-widget .x_panel {
height: 100%;
min-height: 500px;
}
#map-container .x_panel{
padding: 15px;
}
#map-container .x_panel .x_content{
height: 100%;
}
#map-div {
height: 100%;
}
## Instruction:
Allow plot widget to grow
## Code After:
.row.display-flex {
display: flex;
flex-wrap: wrap;
}
.row.display-flex > [class*='col-'] {
display: flex;
flex-direction: column;
padding: 0px 5px;
flex-grow: 1;
}
@media (min-width: 1200px) {
#map-container {
min-width: 1000px;
}
}
.plot-widget {
min-width: 500px;
}
.plot-widget .x_panel {
height: 100%;
min-height: 500px;
}
#map-container .x_panel{
padding: 15px;
}
#map-container .x_panel .x_content{
height: 100%;
}
#map-div {
height: 100%;
}
|
912eb7950cc93d984d2c29149c29775de3a0e234
|
Editor/CMakeLists.txt
|
Editor/CMakeLists.txt
|
cmake_minimum_required(VERSION 3.10)
set(CMAKE_INCLUDE_CURRENT_DIR ON)
set(CMAKE_AUTOMOC ON)
set(CMAKE_AUTOUIC ON)
find_package(Qt5Widgets)
find_package(Qt5Network)
find_package(Qt5Xml)
add_subdirectory(platforms/freedesktop)
declare_qticon_target()
list(APPEND PLAT_SRCS mainicon_qt.cpp)
add_executable(amuse-gui WIN32 MACOSX_BUNDLE
MainWindow.ui MainWindow.hpp MainWindow.cpp
${PLAT_SRCS}
main.cpp)
set_target_properties(amuse-gui PROPERTIES
MACOSX_BUNDLE_INFO_PLIST "${CMAKE_CURRENT_SOURCE_DIR}/platforms/mac/Info.plist")
target_link_libraries(amuse-gui ${PLAT_LIBS}
${Qt5Widgets_LIBRARIES}
${Qt5Network_LIBRARIES}
${Qt5Xml_LIBRARIES}
boo logvisor zeus athena-core athena-libyaml xxhash z)
|
cmake_minimum_required(VERSION 3.10)
set(CMAKE_INCLUDE_CURRENT_DIR ON)
set(CMAKE_AUTOMOC ON)
set(CMAKE_AUTOUIC ON)
find_package(Qt5Widgets)
find_package(Qt5Network)
find_package(Qt5Xml)
if(WIN32)
list(APPEND PLAT_SRCS platforms/win/amuse-gui.rc platforms/win/amuse-gui.manifest)
elseif(APPLE)
list(APPEND PLAT_SRCS platforms/mac/mainicon.icns)
set_source_files_properties(platforms/mac/mainicon.icns PROPERTIES
MACOSX_PACKAGE_LOCATION Resources)
endif()
add_subdirectory(platforms/freedesktop)
declare_qticon_target()
list(APPEND PLAT_SRCS mainicon_qt.cpp)
add_executable(amuse-gui WIN32 MACOSX_BUNDLE
MainWindow.ui MainWindow.hpp MainWindow.cpp
${PLAT_SRCS}
main.cpp)
set_target_properties(amuse-gui PROPERTIES
MACOSX_BUNDLE_INFO_PLIST "${CMAKE_CURRENT_SOURCE_DIR}/platforms/mac/Info.plist")
target_link_libraries(amuse-gui ${PLAT_LIBS}
${Qt5Widgets_LIBRARIES}
${Qt5Network_LIBRARIES}
${Qt5Xml_LIBRARIES}
boo logvisor zeus athena-core athena-libyaml xxhash z)
|
Add amuse-gui .rc in CMake
|
Add amuse-gui .rc in CMake
|
Text
|
mit
|
AxioDL/amuse,AxioDL/amuse,AxioDL/amuse
|
text
|
## Code Before:
cmake_minimum_required(VERSION 3.10)
set(CMAKE_INCLUDE_CURRENT_DIR ON)
set(CMAKE_AUTOMOC ON)
set(CMAKE_AUTOUIC ON)
find_package(Qt5Widgets)
find_package(Qt5Network)
find_package(Qt5Xml)
add_subdirectory(platforms/freedesktop)
declare_qticon_target()
list(APPEND PLAT_SRCS mainicon_qt.cpp)
add_executable(amuse-gui WIN32 MACOSX_BUNDLE
MainWindow.ui MainWindow.hpp MainWindow.cpp
${PLAT_SRCS}
main.cpp)
set_target_properties(amuse-gui PROPERTIES
MACOSX_BUNDLE_INFO_PLIST "${CMAKE_CURRENT_SOURCE_DIR}/platforms/mac/Info.plist")
target_link_libraries(amuse-gui ${PLAT_LIBS}
${Qt5Widgets_LIBRARIES}
${Qt5Network_LIBRARIES}
${Qt5Xml_LIBRARIES}
boo logvisor zeus athena-core athena-libyaml xxhash z)
## Instruction:
Add amuse-gui .rc in CMake
## Code After:
cmake_minimum_required(VERSION 3.10)
set(CMAKE_INCLUDE_CURRENT_DIR ON)
set(CMAKE_AUTOMOC ON)
set(CMAKE_AUTOUIC ON)
find_package(Qt5Widgets)
find_package(Qt5Network)
find_package(Qt5Xml)
if(WIN32)
list(APPEND PLAT_SRCS platforms/win/amuse-gui.rc platforms/win/amuse-gui.manifest)
elseif(APPLE)
list(APPEND PLAT_SRCS platforms/mac/mainicon.icns)
set_source_files_properties(platforms/mac/mainicon.icns PROPERTIES
MACOSX_PACKAGE_LOCATION Resources)
endif()
add_subdirectory(platforms/freedesktop)
declare_qticon_target()
list(APPEND PLAT_SRCS mainicon_qt.cpp)
add_executable(amuse-gui WIN32 MACOSX_BUNDLE
MainWindow.ui MainWindow.hpp MainWindow.cpp
${PLAT_SRCS}
main.cpp)
set_target_properties(amuse-gui PROPERTIES
MACOSX_BUNDLE_INFO_PLIST "${CMAKE_CURRENT_SOURCE_DIR}/platforms/mac/Info.plist")
target_link_libraries(amuse-gui ${PLAT_LIBS}
${Qt5Widgets_LIBRARIES}
${Qt5Network_LIBRARIES}
${Qt5Xml_LIBRARIES}
boo logvisor zeus athena-core athena-libyaml xxhash z)
|
8ba6415fe1cb55001ae589970f08612e4de7631c
|
kernel/common/codeloader19.rb
|
kernel/common/codeloader19.rb
|
module Rubinius
class CodeLoader
# Searches $LOAD_PATH for a file named +name+. Does not append any file
# extension to +name+ while searching. Used by #load to resolve the name
# to a full path to load. Also used by #require when the file extension is
# provided.
def search_load_path(name, loading)
$LOAD_PATH.each do |dir|
path = "#{dir}/#{name}"
return path if loadable? path
end
return nil
end
# requires files relative to the current directory. We do one interesting
# check to make sure it's not called inside of an eval.
def self.require_relative(name, scope)
script = scope.current_script
if script
require File.expand_path(name, File.dirname(script.data_path))
else
raise LoadError.new "Something is wrong in trying to get relative path"
end
end
end
end
|
module Rubinius
class CodeLoader
# Searches $LOAD_PATH for a file named +name+. Does not append any file
# extension to +name+ while searching. Used by #load to resolve the name
# to a full path to load. Also used by #require when the file extension is
# provided.
def search_load_path(name, loading)
$LOAD_PATH.each do |dir|
path = "#{dir}/#{name}"
return path if loadable? path
end
return name if loading and loadable? "./#{name}"
return nil
end
# requires files relative to the current directory. We do one interesting
# check to make sure it's not called inside of an eval.
def self.require_relative(name, scope)
script = scope.current_script
if script
require File.expand_path(name, File.dirname(script.data_path))
else
raise LoadError.new "Something is wrong in trying to get relative path"
end
end
end
end
|
Add back load code lost in require_relative commit.
|
Add back load code lost in require_relative commit.
|
Ruby
|
bsd-3-clause
|
dblock/rubinius,ngpestelos/rubinius,jemc/rubinius,heftig/rubinius,Azizou/rubinius,ngpestelos/rubinius,pH14/rubinius,Azizou/rubinius,travis-repos/rubinius,heftig/rubinius,dblock/rubinius,digitalextremist/rubinius,sferik/rubinius,ngpestelos/rubinius,travis-repos/rubinius,pH14/rubinius,heftig/rubinius,benlovell/rubinius,jemc/rubinius,mlarraz/rubinius,dblock/rubinius,jsyeo/rubinius,Wirachmat/rubinius,benlovell/rubinius,Azizou/rubinius,pH14/rubinius,travis-repos/rubinius,lgierth/rubinius,ruipserra/rubinius,jsyeo/rubinius,jemc/rubinius,dblock/rubinius,sferik/rubinius,benlovell/rubinius,ruipserra/rubinius,slawosz/rubinius,travis-repos/rubinius,heftig/rubinius,jsyeo/rubinius,heftig/rubinius,digitalextremist/rubinius,heftig/rubinius,pH14/rubinius,benlovell/rubinius,jemc/rubinius,kachick/rubinius,lgierth/rubinius,digitalextremist/rubinius,digitalextremist/rubinius,sferik/rubinius,ruipserra/rubinius,dblock/rubinius,slawosz/rubinius,slawosz/rubinius,pH14/rubinius,jsyeo/rubinius,kachick/rubinius,sferik/rubinius,pH14/rubinius,benlovell/rubinius,Wirachmat/rubinius,jemc/rubinius,Wirachmat/rubinius,ruipserra/rubinius,travis-repos/rubinius,Azizou/rubinius,Azizou/rubinius,travis-repos/rubinius,slawosz/rubinius,jemc/rubinius,ruipserra/rubinius,benlovell/rubinius,heftig/rubinius,ngpestelos/rubinius,dblock/rubinius,kachick/rubinius,ngpestelos/rubinius,mlarraz/rubinius,Azizou/rubinius,mlarraz/rubinius,pH14/rubinius,lgierth/rubinius,mlarraz/rubinius,lgierth/rubinius,lgierth/rubinius,jsyeo/rubinius,ruipserra/rubinius,mlarraz/rubinius,mlarraz/rubinius,slawosz/rubinius,Azizou/rubinius,kachick/rubinius,digitalextremist/rubinius,jsyeo/rubinius,Wirachmat/rubinius,digitalextremist/rubinius,sferik/rubinius,travis-repos/rubinius,jsyeo/rubinius,kachick/rubinius,ngpestelos/rubinius,Wirachmat/rubinius,benlovell/rubinius,digitalextremist/rubinius,lgierth/rubinius,sferik/rubinius,Wirachmat/rubinius,mlarraz/rubinius,kachick/rubinius,kachick/rubinius,Wirachmat/rubinius,slawosz/rubinius,ngpestelos/rubinius,dblock/rubinius,ruipserra/rubinius,sferik/rubinius,lgierth/rubinius,slawosz/rubinius,jemc/rubinius,kachick/rubinius
|
ruby
|
## Code Before:
module Rubinius
class CodeLoader
# Searches $LOAD_PATH for a file named +name+. Does not append any file
# extension to +name+ while searching. Used by #load to resolve the name
# to a full path to load. Also used by #require when the file extension is
# provided.
def search_load_path(name, loading)
$LOAD_PATH.each do |dir|
path = "#{dir}/#{name}"
return path if loadable? path
end
return nil
end
# requires files relative to the current directory. We do one interesting
# check to make sure it's not called inside of an eval.
def self.require_relative(name, scope)
script = scope.current_script
if script
require File.expand_path(name, File.dirname(script.data_path))
else
raise LoadError.new "Something is wrong in trying to get relative path"
end
end
end
end
## Instruction:
Add back load code lost in require_relative commit.
## Code After:
module Rubinius
class CodeLoader
# Searches $LOAD_PATH for a file named +name+. Does not append any file
# extension to +name+ while searching. Used by #load to resolve the name
# to a full path to load. Also used by #require when the file extension is
# provided.
def search_load_path(name, loading)
$LOAD_PATH.each do |dir|
path = "#{dir}/#{name}"
return path if loadable? path
end
return name if loading and loadable? "./#{name}"
return nil
end
# requires files relative to the current directory. We do one interesting
# check to make sure it's not called inside of an eval.
def self.require_relative(name, scope)
script = scope.current_script
if script
require File.expand_path(name, File.dirname(script.data_path))
else
raise LoadError.new "Something is wrong in trying to get relative path"
end
end
end
end
|
bf39ed4ed9d58d1734f715fd08bb62ba65127254
|
NaPTAN-Rail/04-Model-05-iNterpret/run.sh
|
NaPTAN-Rail/04-Model-05-iNterpret/run.sh
|
echo "Type Count" > StopTypes.tsv
< StopPoint.ndjson jq -c -r '.StopClassification.StopType' | ./suniq.sh >> StopTypes.tsv
< StopPoint.ndjson jq -c '{AtcoCode} + (.Descriptor | {CommonName, Street, Indicator} ) + (.Place.Location.Translation | {lat: .Latitude, lon: .Longitude}) + (.Place | {node: .NptgLocalityRef}) + (.StopClassification | {stoptype: .StopType}) | del(.[] | select(. == null))' > naptandata.ndjson
< naptandata.ndjson jq -c -s -r '.' > naptandata.json
|
echo "Type Count" > StopTypes.tsv
< StopPoint.ndjson jq -c -r '.StopClassification.StopType' | sort - | uniq -c | sort -rn | sed 's/^ *\([0-9][0-9]*\) \(.*\)$/\2\t\1/' >> StopTypes.tsv
< StopPoint.ndjson jq -c '{AtcoCode} + (.Descriptor | {CommonName, Street, Indicator} ) + (.Place.Location.Translation | {lat: .Latitude, lon: .Longitude}) + (.Place | {node: .NptgLocalityRef}) + (.StopClassification | {stoptype: .StopType}) | del(.[] | select(. == null))' > naptandata.ndjson
< naptandata.ndjson jq -c -s -r '.' > naptandata.json
|
Update to remove reference to suniq.sh script
|
Update to remove reference to suniq.sh script
|
Shell
|
mit
|
anisotropi4/wren,anisotropi4/wren,anisotropi4/wren,anisotropi4/wren
|
shell
|
## Code Before:
echo "Type Count" > StopTypes.tsv
< StopPoint.ndjson jq -c -r '.StopClassification.StopType' | ./suniq.sh >> StopTypes.tsv
< StopPoint.ndjson jq -c '{AtcoCode} + (.Descriptor | {CommonName, Street, Indicator} ) + (.Place.Location.Translation | {lat: .Latitude, lon: .Longitude}) + (.Place | {node: .NptgLocalityRef}) + (.StopClassification | {stoptype: .StopType}) | del(.[] | select(. == null))' > naptandata.ndjson
< naptandata.ndjson jq -c -s -r '.' > naptandata.json
## Instruction:
Update to remove reference to suniq.sh script
## Code After:
echo "Type Count" > StopTypes.tsv
< StopPoint.ndjson jq -c -r '.StopClassification.StopType' | sort - | uniq -c | sort -rn | sed 's/^ *\([0-9][0-9]*\) \(.*\)$/\2\t\1/' >> StopTypes.tsv
< StopPoint.ndjson jq -c '{AtcoCode} + (.Descriptor | {CommonName, Street, Indicator} ) + (.Place.Location.Translation | {lat: .Latitude, lon: .Longitude}) + (.Place | {node: .NptgLocalityRef}) + (.StopClassification | {stoptype: .StopType}) | del(.[] | select(. == null))' > naptandata.ndjson
< naptandata.ndjson jq -c -s -r '.' > naptandata.json
|
83365dc3fd1d21c43dabb43ffb8c2101a8335ac9
|
docs/getting_started/ssh_users.rst
|
docs/getting_started/ssh_users.rst
|
Adding SSH Users
================
If you want to add more users to the servers, update the
``inventory/groups_vars/all/users.yml`` file. Below is an example. Each public
ssh key should be on a single line.
.. warning:: All users added in this file will have root access via ``sudo``.
.. code-block:: yaml
---
users:
- name: user1
enable: 1
pubkeys:
- ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABA.....
- name: user2
enable: 1
pubkeys:
- ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAABA......
|
Adding SSH Users
================
If you want to add more users to the servers, create a file (e.g. ``users.yml``).
Below is an example. Each public ssh key should be on a single line. The ``users.yml``
file will need to be passed to ``ansible-playbook`` with ``-e @users.yml``.
.. warning:: All users added in this file will have root access via ``sudo``.
.. code-block:: yaml
---
users:
- name: user1
enable: 1
pubkeys:
- ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABA.....
- name: user2
enable: 1
pubkeys:
- ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAABA......
|
Update docs for adding SSH users
|
Update docs for adding SSH users
|
reStructuredText
|
apache-2.0
|
kindlyops/microservices-infrastructure,revpoint/microservices-infrastructure,eirslett/microservices-infrastructure,ilboud/microservices-infrastructure,mantl/mantl,chrislovecnm/microservices-infrastructure,phnmnl/mantl,huodon/microservices-infrastructure,z00223295/microservices-infrastructure,liangyali/microservices-infrastructure,CiscoCloud/microservices-infrastructure,CiscoCloud/microservices-infrastructure,mehulsbhatt/microservices-infrastructure,sudhirpandey/microservices-infrastructure,gtcno/microservices-infrastructure,abn/microservices-infrastructure,sudhirpandey/microservices-infrastructure,linearregression/microservices-infrastructure,TeaBough/microservices-infrastructure,CiscoCloud/mantl,liangyali/microservices-infrastructure,ianscrivener/microservices-infrastructure,chrislovecnm/microservices-infrastructure,ianscrivener/microservices-infrastructure,z00223295/microservices-infrastructure,Parkayun/microservices-infrastructure,heww/microservices-infrastructure,SillyMoo/microservices-infrastructure,ianscrivener/microservices-infrastructure,mehulsbhatt/microservices-infrastructure,ddONGzaru/microservices-infrastructure,arminc/microservices-infrastructure,ContainerSolutions/microservices-infrastructure,kindlyops/microservices-infrastructure,ianscrivener/microservices-infrastructure,huodon/microservices-infrastructure,eirslett/microservices-infrastructure,ddONGzaru/microservices-infrastructure,ludovicc/microservices-infrastructure,bitium/mantl,mantl/mantl,remmelt/microservices-infrastructure,sehqlr/mantl,arminc/microservices-infrastructure,z00223295/microservices-infrastructure,chrislovecnm/microservices-infrastructure,abn/microservices-infrastructure,sehqlr/mantl,benschumacher/microservices-infrastructure,chrislovecnm/microservices-infrastructure,futuro/microservices-infrastructure,SillyMoo/microservices-infrastructure,noelbk/microservices-infrastructure,TeaBough/microservices-infrastructure,phnmnl/mantl,KaGeN101/mantl,benschumacher/microservices-infrastructure,remmelt/microservices-infrastructure,ContainerSolutions/microservices-infrastructure,ilboud/microservices-infrastructure,kenjones-cisco/microservices-infrastructure,Parkayun/microservices-infrastructure,eirslett/microservices-infrastructure,ianscrivener/microservices-infrastructure,kenjones-cisco/microservices-infrastructure,huodon/microservices-infrastructure,datascienceinc/mantl,huodon/microservices-infrastructure,linearregression/microservices-infrastructure,gtcno/microservices-infrastructure,ludovicc/microservices-infrastructure,linearregression/microservices-infrastructure,datascienceinc/mantl,arminc/microservices-infrastructure,benschumacher/microservices-infrastructure,CiscoCloud/mantl,revpoint/microservices-infrastructure,mehulsbhatt/microservices-infrastructure,revpoint/microservices-infrastructure,futuro/microservices-infrastructure,KaGeN101/mantl,abn/microservices-infrastructure,benschumacher/microservices-infrastructure,chrislovecnm/microservices-infrastructure,ilboud/microservices-infrastructure,benschumacher/microservices-infrastructure,noelbk/microservices-infrastructure,bitium/mantl,heww/microservices-infrastructure,huodon/microservices-infrastructure
|
restructuredtext
|
## Code Before:
Adding SSH Users
================
If you want to add more users to the servers, update the
``inventory/groups_vars/all/users.yml`` file. Below is an example. Each public
ssh key should be on a single line.
.. warning:: All users added in this file will have root access via ``sudo``.
.. code-block:: yaml
---
users:
- name: user1
enable: 1
pubkeys:
- ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABA.....
- name: user2
enable: 1
pubkeys:
- ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAABA......
## Instruction:
Update docs for adding SSH users
## Code After:
Adding SSH Users
================
If you want to add more users to the servers, create a file (e.g. ``users.yml``).
Below is an example. Each public ssh key should be on a single line. The ``users.yml``
file will need to be passed to ``ansible-playbook`` with ``-e @users.yml``.
.. warning:: All users added in this file will have root access via ``sudo``.
.. code-block:: yaml
---
users:
- name: user1
enable: 1
pubkeys:
- ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABA.....
- name: user2
enable: 1
pubkeys:
- ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAABA......
|
5a1ffe1724805dc5be52d283e335e08e5770ffaf
|
examples/consumer-group.rb
|
examples/consumer-group.rb
|
$LOAD_PATH.unshift(File.expand_path("../../lib", __FILE__))
require "kafka"
logger = Logger.new(STDOUT)
brokers = ENV.fetch("KAFKA_BROKERS", "localhost:9092").split(",")
# Make sure to create this topic in your Kafka cluster or configure the
# cluster to auto-create topics.
topic = "text"
kafka = Kafka.new(
seed_brokers: brokers,
client_id: "test",
socket_timeout: 20,
logger: logger,
)
consumer = kafka.consumer(group_id: "test")
consumer.subscribe(topic)
trap("TERM") { consumer.stop }
trap("INT") { consumer.stop }
consumer.each_message do |message|
puts message.value
end
|
$LOAD_PATH.unshift(File.expand_path("../../lib", __FILE__))
require "kafka"
logger = Logger.new(STDOUT)
brokers = ENV.fetch("KAFKA_BROKERS", "localhost:9092").split(",")
# Make sure to create this topic in your Kafka cluster or configure the
# cluster to auto-create topics.
topic = "text"
kafka = Kafka.new(
seed_brokers: brokers,
client_id: "test",
socket_timeout: 20,
logger: logger,
)
consumer = kafka.consumer(group_id: "test")
consumer.subscribe(topic)
trap("TERM") { consumer.stop }
trap("INT") { consumer.stop }
begin
consumer.each_message do |message|
raise "balls"
puts message.value
end
rescue Kafka::ProcessingError => e
warn "Got #{e.cause}"
consumer.pause(e.topic, e.partition, timeout: 20)
retry
end
|
Handle processing errors in the consumer example
|
Handle processing errors in the consumer example
|
Ruby
|
apache-2.0
|
zendesk/ruby-kafka,zendesk/ruby-kafka
|
ruby
|
## Code Before:
$LOAD_PATH.unshift(File.expand_path("../../lib", __FILE__))
require "kafka"
logger = Logger.new(STDOUT)
brokers = ENV.fetch("KAFKA_BROKERS", "localhost:9092").split(",")
# Make sure to create this topic in your Kafka cluster or configure the
# cluster to auto-create topics.
topic = "text"
kafka = Kafka.new(
seed_brokers: brokers,
client_id: "test",
socket_timeout: 20,
logger: logger,
)
consumer = kafka.consumer(group_id: "test")
consumer.subscribe(topic)
trap("TERM") { consumer.stop }
trap("INT") { consumer.stop }
consumer.each_message do |message|
puts message.value
end
## Instruction:
Handle processing errors in the consumer example
## Code After:
$LOAD_PATH.unshift(File.expand_path("../../lib", __FILE__))
require "kafka"
logger = Logger.new(STDOUT)
brokers = ENV.fetch("KAFKA_BROKERS", "localhost:9092").split(",")
# Make sure to create this topic in your Kafka cluster or configure the
# cluster to auto-create topics.
topic = "text"
kafka = Kafka.new(
seed_brokers: brokers,
client_id: "test",
socket_timeout: 20,
logger: logger,
)
consumer = kafka.consumer(group_id: "test")
consumer.subscribe(topic)
trap("TERM") { consumer.stop }
trap("INT") { consumer.stop }
begin
consumer.each_message do |message|
raise "balls"
puts message.value
end
rescue Kafka::ProcessingError => e
warn "Got #{e.cause}"
consumer.pause(e.topic, e.partition, timeout: 20)
retry
end
|
40d18da4f722a2b0749863be39ba92d4e439e4c4
|
.travis.yml
|
.travis.yml
|
dist: bionic
language:
- generic
before_install:
- docker-compose up -d
- docker ps
- docker images
script:
- CHTSH_URL=http://localhost:8002 bash tests/run-tests.sh
|
dist: bionic
language:
- generic
before_install:
- docker-compose up -d
- docker ps
- docker images
script:
- curl http://localhost:8002
- CHTSH_URL=http://localhost:8002 bash tests/run-tests.sh
|
Test server is up with `curl` before running tests
|
Test server is up with `curl` before running tests
|
YAML
|
mit
|
chubin/cheat.sh,chubin/cheat.sh,chubin/cheat.sh,chubin/cheat.sh
|
yaml
|
## Code Before:
dist: bionic
language:
- generic
before_install:
- docker-compose up -d
- docker ps
- docker images
script:
- CHTSH_URL=http://localhost:8002 bash tests/run-tests.sh
## Instruction:
Test server is up with `curl` before running tests
## Code After:
dist: bionic
language:
- generic
before_install:
- docker-compose up -d
- docker ps
- docker images
script:
- curl http://localhost:8002
- CHTSH_URL=http://localhost:8002 bash tests/run-tests.sh
|
9bb1ec0b3d519e44ee6d83130bdca7c6a7388d62
|
appveyor.yml
|
appveyor.yml
|
version: 2.5.1.0-extended.{build}
branches:
only:
- extended
assembly_info:
patch: true
file: AssemblyInfo.*
assembly_version: "2.5.1.{build}"
assembly_file_version: "{version}"
assembly_informational_version: "{version}"
build:
project: ElectronicObserver.sln
before_build:
- git.exe submodule update --init -- "DockPanelSuite"
- nuget restore
- ps: (Get-Content ElectronicObserver\Utility\SoftwareInformation.cs -Encoding UTF8).replace("<BUILD_VERSION>", "$env:APPVEYOR_BUILD_VERSION") | Set-Content ElectronicObserver\Utility\SoftwareInformation.cs -Encoding UTF8
artifacts:
- path: output
name: 74eoe-$(APPVEYOR_BUILD_VERSION)
|
version: 2.5.1.0-extended.{build}
branches:
only:
- extended
assembly_info:
patch: true
file: AssemblyInfo.*
assembly_version: "2.5.1.{build}"
assembly_file_version: "{version}"
assembly_informational_version: "{version}"
build:
project: ElectronicObserver.sln
before_build:
- git.exe submodule update --init -- "DockPanelSuite"
- nuget restore
- ps: (Get-Content ElectronicObserver\Utility\SoftwareInformation.cs -Encoding UTF8).replace("<BUILD_VERSION>", "$env:APPVEYOR_BUILD_VERSION") | Set-Content ElectronicObserver\Utility\SoftwareInformation.cs -Encoding UTF8
before_package:
- mkdir output\Licenses
- copy Licenses\*.* output\Licenses\
- copy LICENSE output\LICENSE
artifacts:
- path: output
name: 74eoe-$(APPVEYOR_BUILD_VERSION)
|
Package license files with CI output
|
Package license files with CI output
|
YAML
|
mit
|
CAWAS/ElectronicObserverExtended,CNA-Bld/EOE-Sandbox
|
yaml
|
## Code Before:
version: 2.5.1.0-extended.{build}
branches:
only:
- extended
assembly_info:
patch: true
file: AssemblyInfo.*
assembly_version: "2.5.1.{build}"
assembly_file_version: "{version}"
assembly_informational_version: "{version}"
build:
project: ElectronicObserver.sln
before_build:
- git.exe submodule update --init -- "DockPanelSuite"
- nuget restore
- ps: (Get-Content ElectronicObserver\Utility\SoftwareInformation.cs -Encoding UTF8).replace("<BUILD_VERSION>", "$env:APPVEYOR_BUILD_VERSION") | Set-Content ElectronicObserver\Utility\SoftwareInformation.cs -Encoding UTF8
artifacts:
- path: output
name: 74eoe-$(APPVEYOR_BUILD_VERSION)
## Instruction:
Package license files with CI output
## Code After:
version: 2.5.1.0-extended.{build}
branches:
only:
- extended
assembly_info:
patch: true
file: AssemblyInfo.*
assembly_version: "2.5.1.{build}"
assembly_file_version: "{version}"
assembly_informational_version: "{version}"
build:
project: ElectronicObserver.sln
before_build:
- git.exe submodule update --init -- "DockPanelSuite"
- nuget restore
- ps: (Get-Content ElectronicObserver\Utility\SoftwareInformation.cs -Encoding UTF8).replace("<BUILD_VERSION>", "$env:APPVEYOR_BUILD_VERSION") | Set-Content ElectronicObserver\Utility\SoftwareInformation.cs -Encoding UTF8
before_package:
- mkdir output\Licenses
- copy Licenses\*.* output\Licenses\
- copy LICENSE output\LICENSE
artifacts:
- path: output
name: 74eoe-$(APPVEYOR_BUILD_VERSION)
|
058809cb25c088ea22aad3b69156efc0b183fa13
|
app/assets/stylesheets/_ukhpi-header.scss
|
app/assets/stylesheets/_ukhpi-header.scss
|
/* Styles for UKHPI header component */
.c-hmlr-header-colours {
color: $black;
background-color: white;
}
#global-header {
&.c-hmlr-header {
@extend .c-hmlr-header-colours;
a, nav {
@extend .c-hmlr-header-colours;
}
}
.header-wrapper {
@extend .c-hmlr-header-colours;
padding-bottom: 0;
}
.header-proposition #proposition-name {
@extend .c-hmlr-header-colours;
}
.header-proposition #proposition-links a {
@extend .c-hmlr-header-colours;
}
.header-wrapper .header-global .header-logo .content img {
margin-top: 14px;
height: 40px;
}
}
.o-lr-top-bar {
height: 5px;
}
.u-active {
border-bottom: 2px solid $black;
}
.o-secondary-banner {
display: flex;
flex-direction: row;
justify-content: space-between;
}
|
/* Styles for UKHPI header component */
.c-hmlr-header-colours {
color: $black;
background-color: white;
}
#global-header {
&.c-hmlr-header {
@extend .c-hmlr-header-colours;
a, nav {
@extend .c-hmlr-header-colours;
}
}
.header-wrapper {
@extend .c-hmlr-header-colours;
padding-bottom: 0;
}
.header-proposition #proposition-name {
@extend .c-hmlr-header-colours;
}
.header-proposition #proposition-links a {
@extend .c-hmlr-header-colours;
}
.header-wrapper .header-global .header-logo .content img {
margin-top: 14px;
height: 40px;
}
}
.o-lr-top-bar {
height: 5px;
}
.u-active {
border-bottom: 2px solid $black;
}
.o-secondary-banner {
display: flex;
flex-direction: row;
justify-content: end;
}
|
Fix right-alignment of language selector
|
Fix right-alignment of language selector
|
SCSS
|
mit
|
epimorphics/ukhpi,epimorphics/ukhpi,epimorphics/ukhpi,epimorphics/ukhpi
|
scss
|
## Code Before:
/* Styles for UKHPI header component */
.c-hmlr-header-colours {
color: $black;
background-color: white;
}
#global-header {
&.c-hmlr-header {
@extend .c-hmlr-header-colours;
a, nav {
@extend .c-hmlr-header-colours;
}
}
.header-wrapper {
@extend .c-hmlr-header-colours;
padding-bottom: 0;
}
.header-proposition #proposition-name {
@extend .c-hmlr-header-colours;
}
.header-proposition #proposition-links a {
@extend .c-hmlr-header-colours;
}
.header-wrapper .header-global .header-logo .content img {
margin-top: 14px;
height: 40px;
}
}
.o-lr-top-bar {
height: 5px;
}
.u-active {
border-bottom: 2px solid $black;
}
.o-secondary-banner {
display: flex;
flex-direction: row;
justify-content: space-between;
}
## Instruction:
Fix right-alignment of language selector
## Code After:
/* Styles for UKHPI header component */
.c-hmlr-header-colours {
color: $black;
background-color: white;
}
#global-header {
&.c-hmlr-header {
@extend .c-hmlr-header-colours;
a, nav {
@extend .c-hmlr-header-colours;
}
}
.header-wrapper {
@extend .c-hmlr-header-colours;
padding-bottom: 0;
}
.header-proposition #proposition-name {
@extend .c-hmlr-header-colours;
}
.header-proposition #proposition-links a {
@extend .c-hmlr-header-colours;
}
.header-wrapper .header-global .header-logo .content img {
margin-top: 14px;
height: 40px;
}
}
.o-lr-top-bar {
height: 5px;
}
.u-active {
border-bottom: 2px solid $black;
}
.o-secondary-banner {
display: flex;
flex-direction: row;
justify-content: end;
}
|
85d13bf21dbbf1f4e196477bdcda85c8d7e55da1
|
app/assets/stylesheets/components/_footer_site_links.scss
|
app/assets/stylesheets/components/_footer_site_links.scss
|
// Internal links section of the site-wide footer.
//
// Styleguide Footer Site Links
.footer-site-links {
// height: $bl*33;
padding: $bl*5 0 $bl*6 0;
background-color: $color-footer-bg;
li {
font-family: MuseoSlab;
font-weight: 700;
}
a {
@extend %link-nav;
}
}
|
// Internal links section of the site-wide footer.
//
// Styleguide Footer Site Links
.footer-site-links {
padding: $bl*5 0 $bl*6 0;
background-color: $color-footer-bg;
li {
font-family: MuseoSlab;
font-weight: 700;
}
a {
@extend %link-nav;
}
}
|
Clean up footer site links file.
|
Clean up footer site links file.
|
SCSS
|
mit
|
moneyadviceservice/frontend,moneyadviceservice/frontend,moneyadviceservice/frontend,moneyadviceservice/frontend
|
scss
|
## Code Before:
// Internal links section of the site-wide footer.
//
// Styleguide Footer Site Links
.footer-site-links {
// height: $bl*33;
padding: $bl*5 0 $bl*6 0;
background-color: $color-footer-bg;
li {
font-family: MuseoSlab;
font-weight: 700;
}
a {
@extend %link-nav;
}
}
## Instruction:
Clean up footer site links file.
## Code After:
// Internal links section of the site-wide footer.
//
// Styleguide Footer Site Links
.footer-site-links {
padding: $bl*5 0 $bl*6 0;
background-color: $color-footer-bg;
li {
font-family: MuseoSlab;
font-weight: 700;
}
a {
@extend %link-nav;
}
}
|
8fd27f72135c90b45fb1aca0e989cf04b17a8416
|
app/assets/sass/global/_color-schemes.scss
|
app/assets/sass/global/_color-schemes.scss
|
//----------------------------------*\
// COLOR SCHEMES
//----------------------------------*/
// Light Scheme
//----------------------------------*/
.color-scheme--light {
// Light scheme is not ready for now.
}
// Dark Scheme
//----------------------------------*/
.color-scheme--dark {
%colorTxtCurrent {
color:$_color_dark_delta;
}
%colorHightContrast {
color:$_color_omega;
}
// For panel and the sidebar
%colorBkgPanel {
background:$_color_dark_alpha;
}
%colorLink {
color:$_color_dark_delta;
&:hover {
color:white;
}
}
%colorBorderThin {
border-color:$_color_dark_alpha;
}
%colorBkgTopbar {
background:$_color_dark_beta;
}
%colorBkgTopbarPanel {
background:$_color_dark_alpha;
}
%colorBkgTopbarButton {
background:$_color_dark_beta;
}
}
|
//----------------------------------*\
// COLOR SCHEMES
//----------------------------------*/
// Light Scheme
//----------------------------------*/
.color-scheme--light {
// Light scheme is not ready for now.
}
// Dark Scheme
//----------------------------------*/
.color-scheme--dark {
%colorTxtCurrent {
color:$_color_dark_delta;
}
%colorHightContrast {
color:$_color_omega;
}
// For panel and the sidebar
%colorBkgPanel {
background:$_color_dark_alpha;
}
%colorLink {
color:$_color_dark_delta;
transition-duration: 0.5s;
transition-property: color;
&:hover {
color:$_color_omega;
}
}
%colorBorderThin {
border-color:$_color_dark_alpha;
}
%colorBkgTopbar {
background:$_color_dark_beta;
}
%colorBkgTopbarPanel {
background:$_color_dark_alpha;
}
%colorBkgTopbarButton {
background:$_color_dark_beta;
}
}
|
Add transition + color var name.
|
Add transition + color var name.
|
SCSS
|
agpl-3.0
|
openstack/bansho,stackforge/bansho,openstack/bansho,stackforge/bansho,Freddrickk/adagios-frontend,stackforge/bansho,openstack/bansho,Freddrickk/adagios-frontend
|
scss
|
## Code Before:
//----------------------------------*\
// COLOR SCHEMES
//----------------------------------*/
// Light Scheme
//----------------------------------*/
.color-scheme--light {
// Light scheme is not ready for now.
}
// Dark Scheme
//----------------------------------*/
.color-scheme--dark {
%colorTxtCurrent {
color:$_color_dark_delta;
}
%colorHightContrast {
color:$_color_omega;
}
// For panel and the sidebar
%colorBkgPanel {
background:$_color_dark_alpha;
}
%colorLink {
color:$_color_dark_delta;
&:hover {
color:white;
}
}
%colorBorderThin {
border-color:$_color_dark_alpha;
}
%colorBkgTopbar {
background:$_color_dark_beta;
}
%colorBkgTopbarPanel {
background:$_color_dark_alpha;
}
%colorBkgTopbarButton {
background:$_color_dark_beta;
}
}
## Instruction:
Add transition + color var name.
## Code After:
//----------------------------------*\
// COLOR SCHEMES
//----------------------------------*/
// Light Scheme
//----------------------------------*/
.color-scheme--light {
// Light scheme is not ready for now.
}
// Dark Scheme
//----------------------------------*/
.color-scheme--dark {
%colorTxtCurrent {
color:$_color_dark_delta;
}
%colorHightContrast {
color:$_color_omega;
}
// For panel and the sidebar
%colorBkgPanel {
background:$_color_dark_alpha;
}
%colorLink {
color:$_color_dark_delta;
transition-duration: 0.5s;
transition-property: color;
&:hover {
color:$_color_omega;
}
}
%colorBorderThin {
border-color:$_color_dark_alpha;
}
%colorBkgTopbar {
background:$_color_dark_beta;
}
%colorBkgTopbarPanel {
background:$_color_dark_alpha;
}
%colorBkgTopbarButton {
background:$_color_dark_beta;
}
}
|
b57f78155cdc6ceaae9d82ceb1452636b41c0171
|
client/app/components/settings-screen.html
|
client/app/components/settings-screen.html
|
<page-header title="Settings">
</page-header>
<div class="container">
<div class="container bg-white p-5">
<ul class="tab-nav">
<li ng-class="{'active': dsPage }" ng-if="showDsLink"><a href="data_sources">Data Sources</a></li>
<li ng-class="{'active': usersPage }" ng-if="showUsersLink"><a href="users">Users</a></li>
<li ng-class="{'active': groupsPage }" ng-if="showGroupsLink"><a href="groups">Groups</a></li>
<li ng-class="{'active': destinationsPage }" ng-if="showDestinationsLink"><a href="destinations">Alert Destinations</a></li>
<li ng-class="{'active': snippetsPage }" ng-if="showQuerySnippetsLink"><a href="query_snippets">Query Snippets</a></li>
</ul>
<div ng-transclude>
</div>
</div>
</div>
|
<div class="container">
<page-header title="Settings">
</page-header>
<div class="container">
<div class="container bg-white p-5">
<ul class="tab-nav">
<li ng-class="{'active': dsPage }" ng-if="showDsLink"><a href="data_sources">Data Sources</a></li>
<li ng-class="{'active': usersPage }" ng-if="showUsersLink"><a href="users">Users</a></li>
<li ng-class="{'active': groupsPage }" ng-if="showGroupsLink"><a href="groups">Groups</a></li>
<li ng-class="{'active': destinationsPage }" ng-if="showDestinationsLink"><a href="destinations">Alert Destinations</a></li>
<li ng-class="{'active': snippetsPage }" ng-if="showQuerySnippetsLink"><a href="query_snippets">Query Snippets</a></li>
</ul>
<div ng-transclude>
</div>
</div>
</div>
</div>
|
Add container for Settings to resolve vertical scroll issue
|
Add container for Settings to resolve vertical scroll issue
|
HTML
|
bsd-2-clause
|
getredash/redash,hudl/redash,moritz9/redash,44px/redash,crowdworks/redash,getredash/redash,alexanderlz/redash,moritz9/redash,moritz9/redash,alexanderlz/redash,chriszs/redash,crowdworks/redash,44px/redash,chriszs/redash,crowdworks/redash,denisov-vlad/redash,moritz9/redash,crowdworks/redash,44px/redash,denisov-vlad/redash,alexanderlz/redash,chriszs/redash,denisov-vlad/redash,hudl/redash,alexanderlz/redash,44px/redash,denisov-vlad/redash,denisov-vlad/redash,getredash/redash,hudl/redash,getredash/redash,hudl/redash,chriszs/redash,getredash/redash
|
html
|
## Code Before:
<page-header title="Settings">
</page-header>
<div class="container">
<div class="container bg-white p-5">
<ul class="tab-nav">
<li ng-class="{'active': dsPage }" ng-if="showDsLink"><a href="data_sources">Data Sources</a></li>
<li ng-class="{'active': usersPage }" ng-if="showUsersLink"><a href="users">Users</a></li>
<li ng-class="{'active': groupsPage }" ng-if="showGroupsLink"><a href="groups">Groups</a></li>
<li ng-class="{'active': destinationsPage }" ng-if="showDestinationsLink"><a href="destinations">Alert Destinations</a></li>
<li ng-class="{'active': snippetsPage }" ng-if="showQuerySnippetsLink"><a href="query_snippets">Query Snippets</a></li>
</ul>
<div ng-transclude>
</div>
</div>
</div>
## Instruction:
Add container for Settings to resolve vertical scroll issue
## Code After:
<div class="container">
<page-header title="Settings">
</page-header>
<div class="container">
<div class="container bg-white p-5">
<ul class="tab-nav">
<li ng-class="{'active': dsPage }" ng-if="showDsLink"><a href="data_sources">Data Sources</a></li>
<li ng-class="{'active': usersPage }" ng-if="showUsersLink"><a href="users">Users</a></li>
<li ng-class="{'active': groupsPage }" ng-if="showGroupsLink"><a href="groups">Groups</a></li>
<li ng-class="{'active': destinationsPage }" ng-if="showDestinationsLink"><a href="destinations">Alert Destinations</a></li>
<li ng-class="{'active': snippetsPage }" ng-if="showQuerySnippetsLink"><a href="query_snippets">Query Snippets</a></li>
</ul>
<div ng-transclude>
</div>
</div>
</div>
</div>
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.