commit
stringlengths 40
40
| old_file
stringlengths 4
237
| new_file
stringlengths 4
237
| old_contents
stringlengths 1
4.24k
| new_contents
stringlengths 5
4.84k
| subject
stringlengths 15
778
| message
stringlengths 16
6.86k
| lang
stringlengths 1
30
| license
stringclasses 13
values | repos
stringlengths 5
116k
| config
stringlengths 1
30
| content
stringlengths 105
8.72k
|
---|---|---|---|---|---|---|---|---|---|---|---|
064cff13fd7d02672695b4ac1722bbd6df35f1d4 | lib/banzai/filter/external_link_filter.rb | lib/banzai/filter/external_link_filter.rb | module Banzai
module Filter
# HTML Filter to modify the attributes of external links
class ExternalLinkFilter < HTML::Pipeline::Filter
def call
doc.search('a').each do |node|
link = node.attr('href')
next unless link
# Skip non-HTTP(S) links
next unless link.start_with?('http')
# Skip internal links
next if link.start_with?(internal_url)
node.set_attribute('rel', 'nofollow noreferrer')
# Open external links on a new tab
node.set_attribute('target', '_blank')
end
doc
end
private
def internal_url
@internal_url ||= Gitlab.config.gitlab.url
end
end
end
end
| module Banzai
module Filter
# HTML Filter to modify the attributes of external links
class ExternalLinkFilter < HTML::Pipeline::Filter
def call
doc.search('a').each do |node|
link = node.attr('href')
next unless link
# Skip non-HTTP(S) links
next unless link.start_with?('http')
# Skip internal links
next if link.start_with?(internal_url)
node.set_attribute('rel', 'nofollow noreferrer')
node.set_attribute('target', '_blank')
end
doc
end
private
def internal_url
@internal_url ||= Gitlab.config.gitlab.url
end
end
end
end
| Remove obvious comment and extra line | Remove obvious comment and extra line
| Ruby | mit | jirutka/gitlabhq,mmkassem/gitlabhq,shinexiao/gitlabhq,dplarson/gitlabhq,allysonbarros/gitlabhq,martijnvermaat/gitlabhq,LUMC/gitlabhq,openwide-java/gitlabhq,daiyu/gitlab-zh,iiet/iiet-git,openwide-java/gitlabhq,icedwater/gitlabhq,mmkassem/gitlabhq,icedwater/gitlabhq,martijnvermaat/gitlabhq,htve/GitlabForChinese,axilleas/gitlabhq,daiyu/gitlab-zh,screenpages/gitlabhq,stoplightio/gitlabhq,iiet/iiet-git,darkrasid/gitlabhq,dreampet/gitlab,Soullivaneuh/gitlabhq,t-zuehlsdorff/gitlabhq,dplarson/gitlabhq,htve/GitlabForChinese,LUMC/gitlabhq,LUMC/gitlabhq,openwide-java/gitlabhq,t-zuehlsdorff/gitlabhq,mmkassem/gitlabhq,mr-dxdy/gitlabhq,stoplightio/gitlabhq,shinexiao/gitlabhq,axilleas/gitlabhq,darkrasid/gitlabhq,darkrasid/gitlabhq,dplarson/gitlabhq,htve/GitlabForChinese,LUMC/gitlabhq,screenpages/gitlabhq,Soullivaneuh/gitlabhq,jirutka/gitlabhq,dreampet/gitlab,mr-dxdy/gitlabhq,Soullivaneuh/gitlabhq,allysonbarros/gitlabhq,SVArago/gitlabhq,allysonbarros/gitlabhq,mmkassem/gitlabhq,mr-dxdy/gitlabhq,dplarson/gitlabhq,axilleas/gitlabhq,axilleas/gitlabhq,openwide-java/gitlabhq,stoplightio/gitlabhq,stoplightio/gitlabhq,shinexiao/gitlabhq,htve/GitlabForChinese,iiet/iiet-git,icedwater/gitlabhq,t-zuehlsdorff/gitlabhq,screenpages/gitlabhq,martijnvermaat/gitlabhq,Soullivaneuh/gitlabhq,SVArago/gitlabhq,allysonbarros/gitlabhq,iiet/iiet-git,icedwater/gitlabhq,t-zuehlsdorff/gitlabhq,shinexiao/gitlabhq,SVArago/gitlabhq,screenpages/gitlabhq,daiyu/gitlab-zh,darkrasid/gitlabhq,jirutka/gitlabhq,dreampet/gitlab,daiyu/gitlab-zh,mr-dxdy/gitlabhq,martijnvermaat/gitlabhq,jirutka/gitlabhq,SVArago/gitlabhq,dreampet/gitlab | ruby | ## Code Before:
module Banzai
module Filter
# HTML Filter to modify the attributes of external links
class ExternalLinkFilter < HTML::Pipeline::Filter
def call
doc.search('a').each do |node|
link = node.attr('href')
next unless link
# Skip non-HTTP(S) links
next unless link.start_with?('http')
# Skip internal links
next if link.start_with?(internal_url)
node.set_attribute('rel', 'nofollow noreferrer')
# Open external links on a new tab
node.set_attribute('target', '_blank')
end
doc
end
private
def internal_url
@internal_url ||= Gitlab.config.gitlab.url
end
end
end
end
## Instruction:
Remove obvious comment and extra line
## Code After:
module Banzai
module Filter
# HTML Filter to modify the attributes of external links
class ExternalLinkFilter < HTML::Pipeline::Filter
def call
doc.search('a').each do |node|
link = node.attr('href')
next unless link
# Skip non-HTTP(S) links
next unless link.start_with?('http')
# Skip internal links
next if link.start_with?(internal_url)
node.set_attribute('rel', 'nofollow noreferrer')
node.set_attribute('target', '_blank')
end
doc
end
private
def internal_url
@internal_url ||= Gitlab.config.gitlab.url
end
end
end
end
|
6cfeabf8b0cce51c8f5a88a231f9f42cbce8e854 | .travis.yml | .travis.yml | language: python
python:
- "2.6"
- "2.7"
env:
- DJANGO=1.4.10
virtualenv:
system_site_packages: true
install:
- pip install -q coveralls Django==$DJANGO --use-mirrors
- pip install -r requirements.txt
- "if [[ $TRAVIS_PYTHON_VERSION == '2.6' ]]; then pip install --use-mirrors cython && pip install --use-mirrors pysam argparse counter ordereddict importlib; fi"
- pip install flake8
services:
- memcached
- redis-server
before_script:
- flake8
- psql -c 'create database varifydb;' -U postgres
script:
- coverage run test_suite.py
after_success:
- coveralls
| language: python
python:
- "2.6"
- "2.7"
env:
- DJANGO=1.4.10
- DJANGO=1.5.5
virtualenv:
system_site_packages: true
install:
- pip install -q coveralls Django==$DJANGO --use-mirrors
- pip install -r requirements.txt
- "if [[ $TRAVIS_PYTHON_VERSION == '2.6' ]]; then pip install --use-mirrors cython && pip install --use-mirrors pysam argparse counter ordereddict importlib; fi"
- pip install flake8
services:
- memcached
- redis-server
before_script:
- flake8
- psql -c 'create database varifydb;' -U postgres
script:
- coverage run test_suite.py
after_success:
- coveralls
| Add Django 1.5.5 to Travis config | Add Django 1.5.5 to Travis config
| YAML | bsd-2-clause | chop-dbhi/varify,chop-dbhi/varify,chop-dbhi/varify,chop-dbhi/varify | yaml | ## Code Before:
language: python
python:
- "2.6"
- "2.7"
env:
- DJANGO=1.4.10
virtualenv:
system_site_packages: true
install:
- pip install -q coveralls Django==$DJANGO --use-mirrors
- pip install -r requirements.txt
- "if [[ $TRAVIS_PYTHON_VERSION == '2.6' ]]; then pip install --use-mirrors cython && pip install --use-mirrors pysam argparse counter ordereddict importlib; fi"
- pip install flake8
services:
- memcached
- redis-server
before_script:
- flake8
- psql -c 'create database varifydb;' -U postgres
script:
- coverage run test_suite.py
after_success:
- coveralls
## Instruction:
Add Django 1.5.5 to Travis config
## Code After:
language: python
python:
- "2.6"
- "2.7"
env:
- DJANGO=1.4.10
- DJANGO=1.5.5
virtualenv:
system_site_packages: true
install:
- pip install -q coveralls Django==$DJANGO --use-mirrors
- pip install -r requirements.txt
- "if [[ $TRAVIS_PYTHON_VERSION == '2.6' ]]; then pip install --use-mirrors cython && pip install --use-mirrors pysam argparse counter ordereddict importlib; fi"
- pip install flake8
services:
- memcached
- redis-server
before_script:
- flake8
- psql -c 'create database varifydb;' -U postgres
script:
- coverage run test_suite.py
after_success:
- coveralls
|
466f0460774cad86e7e828ffb50c7d1332b64e7b | CONTRIBUTING.rst | CONTRIBUTING.rst | Contributing
------------
If you want to contribute that is awesome. Remember to be nice to others in issues and reviews.
**It is nice if you follow this list:**
* Follow pep8 (run flake8).
* Follow the import sorting guidelines (run isort).
* Write tests for the cool things you create or fix.
Unsure about something from that list or anything else? No worries, `open an issue`_ or ask a
question on `gitter`_ and someone will help you out.
.. _open an issue: https://github.com/relekang/python-semantic-release/issues/new
.. _gitter: https://gitter.im/relekang/python-semantic-release
Commit messages
~~~~~~~~~~~~~~~
Since python-semantic-release is released with python-semantic-release we need the commit messages
to adhere to the `angular commit guidelines`_. If you are unsure how to describe the change correctly
Just try and ask in your pr, or ask on gitter. If we think it should be something else or there is a
pull-request without tags we will help out in adding or changing them.
.. _angular commit guidelins: https://github.com/angular/angular.js/blob/master/CONTRIBUTING.md#commit
Release schedule
~~~~~~~~~~~~~~~~
This package should be released by itself every half hour. Thus, if there is changes tagged with
major, minor or patch a new release will be published xx:00 and xx:30.
| Contributing
------------
If you want to contribute that is awesome. Remember to be nice to others in issues and reviews.
**It is nice if you follow this list:**
* Follow pep8 (run flake8).
* Follow the import sorting guidelines (run isort).
* Write tests for the cool things you create or fix.
Unsure about something from that list or anything else? No worries, `open an issue`_ or ask a
question on `gitter`_ and someone will help you out.
.. _open an issue: https://github.com/relekang/python-semantic-release/issues/new
.. _gitter: https://gitter.im/relekang/python-semantic-release
Commit messages
~~~~~~~~~~~~~~~
Since python-semantic-release is released with python-semantic-release we need the commit messages
to adhere to the `angular commit guidelines`_. If you are unsure how to describe the change correctly
Just try and ask in your pr, or ask on gitter. If we think it should be something else or there is a
pull-request without tags we will help out in adding or changing them.
.. _angular commit guidelins: https://github.com/angular/angular.js/blob/master/CONTRIBUTING.md#commit
Releases
~~~~~~~~
This package is released by python-semantic-release on each master build, thus if there are changes
that should result in a new release it will happen if the build is green.
| Update info about releases in contributing.md | docs: Update info about releases in contributing.md
| reStructuredText | mit | relekang/python-semantic-release,relekang/python-semantic-release,riddlesio/python-semantic-release,wlonk/python-semantic-release | restructuredtext | ## Code Before:
Contributing
------------
If you want to contribute that is awesome. Remember to be nice to others in issues and reviews.
**It is nice if you follow this list:**
* Follow pep8 (run flake8).
* Follow the import sorting guidelines (run isort).
* Write tests for the cool things you create or fix.
Unsure about something from that list or anything else? No worries, `open an issue`_ or ask a
question on `gitter`_ and someone will help you out.
.. _open an issue: https://github.com/relekang/python-semantic-release/issues/new
.. _gitter: https://gitter.im/relekang/python-semantic-release
Commit messages
~~~~~~~~~~~~~~~
Since python-semantic-release is released with python-semantic-release we need the commit messages
to adhere to the `angular commit guidelines`_. If you are unsure how to describe the change correctly
Just try and ask in your pr, or ask on gitter. If we think it should be something else or there is a
pull-request without tags we will help out in adding or changing them.
.. _angular commit guidelins: https://github.com/angular/angular.js/blob/master/CONTRIBUTING.md#commit
Release schedule
~~~~~~~~~~~~~~~~
This package should be released by itself every half hour. Thus, if there is changes tagged with
major, minor or patch a new release will be published xx:00 and xx:30.
## Instruction:
docs: Update info about releases in contributing.md
## Code After:
Contributing
------------
If you want to contribute that is awesome. Remember to be nice to others in issues and reviews.
**It is nice if you follow this list:**
* Follow pep8 (run flake8).
* Follow the import sorting guidelines (run isort).
* Write tests for the cool things you create or fix.
Unsure about something from that list or anything else? No worries, `open an issue`_ or ask a
question on `gitter`_ and someone will help you out.
.. _open an issue: https://github.com/relekang/python-semantic-release/issues/new
.. _gitter: https://gitter.im/relekang/python-semantic-release
Commit messages
~~~~~~~~~~~~~~~
Since python-semantic-release is released with python-semantic-release we need the commit messages
to adhere to the `angular commit guidelines`_. If you are unsure how to describe the change correctly
Just try and ask in your pr, or ask on gitter. If we think it should be something else or there is a
pull-request without tags we will help out in adding or changing them.
.. _angular commit guidelins: https://github.com/angular/angular.js/blob/master/CONTRIBUTING.md#commit
Releases
~~~~~~~~
This package is released by python-semantic-release on each master build, thus if there are changes
that should result in a new release it will happen if the build is green.
|
1d9066bb7054cfe7deeac57e21489062230fcc58 | lib/selection-handle-component.coffee | lib/selection-handle-component.coffee | React = require 'react-atom-fork'
{div} = require 'reactionary-atom-fork'
module.exports = React.createClass
render: ->
{parentView} = @props
{top, left, width, height} = parentView.selectionScrollRect()
div className: 'selection-box-handle', style: {
top: (top + height) + 'px'
left: (left + width) + '%'
}
| React = require 'react-atom-fork'
{div} = require 'reactionary-atom-fork'
module.exports = React.createClass
render: ->
{parentView} = @props
{top, left, width, height} = parentView.selectionScrollRect()
div className: 'selection-box-handle', style: {
top: (top + height) + 'px'
left: width + '%'
transform: "translate(#{left + 'px'}, 0)"
}
| Fix selection handle position when start column is not 0 | :bug: Fix selection handle position when start column is not 0
| CoffeeScript | mit | abe33/atom-tablr | coffeescript | ## Code Before:
React = require 'react-atom-fork'
{div} = require 'reactionary-atom-fork'
module.exports = React.createClass
render: ->
{parentView} = @props
{top, left, width, height} = parentView.selectionScrollRect()
div className: 'selection-box-handle', style: {
top: (top + height) + 'px'
left: (left + width) + '%'
}
## Instruction:
:bug: Fix selection handle position when start column is not 0
## Code After:
React = require 'react-atom-fork'
{div} = require 'reactionary-atom-fork'
module.exports = React.createClass
render: ->
{parentView} = @props
{top, left, width, height} = parentView.selectionScrollRect()
div className: 'selection-box-handle', style: {
top: (top + height) + 'px'
left: width + '%'
transform: "translate(#{left + 'px'}, 0)"
}
|
f4c25e65193b7490ccfe3a9bd2499a366e6007fa | roles/haproxy/tasks/main.yml | roles/haproxy/tasks/main.yml | ---
- name: Install haproxy 1.8 yum repo
copy:
src: haproxy18.repo
dest: /etc/yum.repos.d/
owner: root
mode: 0644
when:
- haproxy_18
- name: Include multiple-instances.yml
include: multiple-instances.yml
when:
- haproxy_multi_instance is defined
- name: Include single-instance.yml
include: single-instance.yml
when:
- haproxy_multi_instance is not defined
- name: Include redirects-instance.yml
include: redirects-instance.yml
when:
- haproxy_redirects is defined
- name: disable health checks for development envs
command: 'echo "disable health {{item.name}}" | socat /var/lib/haproxy/stats stdio'
with_items:
- "{{ haproxy_applications }}"
when:
- develop
| ---
- name: Install haproxy 1.8 yum repo
copy:
src: haproxy18.repo
dest: /etc/yum.repos.d/
owner: root
mode: 0644
- name: Include multiple-instances.yml
include: multiple-instances.yml
when:
- haproxy_multi_instance is defined
- name: Include single-instance.yml
include: single-instance.yml
when:
- haproxy_multi_instance is not defined
- name: Include redirects-instance.yml
include: redirects-instance.yml
when:
- haproxy_redirects is defined
- name: disable health checks for development envs
command: 'echo "disable health {{item.name}}" | socat /var/lib/haproxy/stats stdio'
with_items:
- "{{ haproxy_applications }}"
when:
- develop
| Enable new haproxy > 1.7 | Haproxy: Enable new haproxy > 1.7
| YAML | apache-2.0 | OpenConext/OpenConext-deploy,OpenConext/OpenConext-deploy,OpenConext/OpenConext-deploy,OpenConext/OpenConext-deploy,OpenConext/OpenConext-deploy | yaml | ## Code Before:
---
- name: Install haproxy 1.8 yum repo
copy:
src: haproxy18.repo
dest: /etc/yum.repos.d/
owner: root
mode: 0644
when:
- haproxy_18
- name: Include multiple-instances.yml
include: multiple-instances.yml
when:
- haproxy_multi_instance is defined
- name: Include single-instance.yml
include: single-instance.yml
when:
- haproxy_multi_instance is not defined
- name: Include redirects-instance.yml
include: redirects-instance.yml
when:
- haproxy_redirects is defined
- name: disable health checks for development envs
command: 'echo "disable health {{item.name}}" | socat /var/lib/haproxy/stats stdio'
with_items:
- "{{ haproxy_applications }}"
when:
- develop
## Instruction:
Haproxy: Enable new haproxy > 1.7
## Code After:
---
- name: Install haproxy 1.8 yum repo
copy:
src: haproxy18.repo
dest: /etc/yum.repos.d/
owner: root
mode: 0644
- name: Include multiple-instances.yml
include: multiple-instances.yml
when:
- haproxy_multi_instance is defined
- name: Include single-instance.yml
include: single-instance.yml
when:
- haproxy_multi_instance is not defined
- name: Include redirects-instance.yml
include: redirects-instance.yml
when:
- haproxy_redirects is defined
- name: disable health checks for development envs
command: 'echo "disable health {{item.name}}" | socat /var/lib/haproxy/stats stdio'
with_items:
- "{{ haproxy_applications }}"
when:
- develop
|
1bae1bc94fea8fd9b8fe25d832ebfce07ad08a1c | utilities/build/update_xform_translate_path.py | utilities/build/update_xform_translate_path.py | jar_path_placeholder = 'DYNAMIC_PATH_TO_XFORM_TRANSLATE_JAR'
import sys, os
if 'RAPIDSMS_INI' not in os.environ:
print "RAPIDSMS_INI NOT FOUND"
sys.exit()
local_ini = os.environ['RAPIDSMS_INI']
fin = open(local_ini,"r")
ini = fin.read()
fin.close()
if jar_path_placeholder in ini:
filedir = os.path.dirname(__file__)
xform_jar_path = os.path.abspath(os.path.join(filedir,'..','..','lib'))
ini = ini.replace(jar_path_placeholder, xform_jar_path)
fin = open(local_ini,"w")
fin.write(ini)
fin.close()
print "Updated %s with %s" % (local_ini, xform_jar_path)
| JAR_PATH_SETTING = 'xform_translate_path'
import sys, os
if 'RAPIDSMS_INI' not in os.environ:
print "RAPIDSMS_INI NOT FOUND"
sys.exit()
local_ini = os.environ['RAPIDSMS_INI']
filedir = os.path.dirname(__file__)
xform_jar_path = os.path.abspath(os.path.join(filedir,'..','..','lib'))
ini = ""
should_update = False
fin = open(local_ini,"r")
for line in fin:
if JAR_PATH_SETTING in line:
line = 'xform_translate_path=%s\n' % xform_jar_path
should_update = True
ini = ini + line
fin.close()
if should_update:
fin = open(local_ini,"w")
fin.write(ini)
fin.close()
print "Updated %s with %s" % (local_ini, xform_jar_path)
else:
print "Nothing to update"
| Fix up local.ini updater code to look specifically for 'xform_translate_path' | Fix up local.ini updater code to look specifically for 'xform_translate_path'
| Python | bsd-3-clause | puttarajubr/commcare-hq,dimagi/commcare-hq,SEL-Columbia/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,gmimano/commcaretest,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,gmimano/commcaretest,dimagi/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,SEL-Columbia/commcare-hq,qedsoftware/commcare-hq,SEL-Columbia/commcare-hq,gmimano/commcaretest | python | ## Code Before:
jar_path_placeholder = 'DYNAMIC_PATH_TO_XFORM_TRANSLATE_JAR'
import sys, os
if 'RAPIDSMS_INI' not in os.environ:
print "RAPIDSMS_INI NOT FOUND"
sys.exit()
local_ini = os.environ['RAPIDSMS_INI']
fin = open(local_ini,"r")
ini = fin.read()
fin.close()
if jar_path_placeholder in ini:
filedir = os.path.dirname(__file__)
xform_jar_path = os.path.abspath(os.path.join(filedir,'..','..','lib'))
ini = ini.replace(jar_path_placeholder, xform_jar_path)
fin = open(local_ini,"w")
fin.write(ini)
fin.close()
print "Updated %s with %s" % (local_ini, xform_jar_path)
## Instruction:
Fix up local.ini updater code to look specifically for 'xform_translate_path'
## Code After:
JAR_PATH_SETTING = 'xform_translate_path'
import sys, os
if 'RAPIDSMS_INI' not in os.environ:
print "RAPIDSMS_INI NOT FOUND"
sys.exit()
local_ini = os.environ['RAPIDSMS_INI']
filedir = os.path.dirname(__file__)
xform_jar_path = os.path.abspath(os.path.join(filedir,'..','..','lib'))
ini = ""
should_update = False
fin = open(local_ini,"r")
for line in fin:
if JAR_PATH_SETTING in line:
line = 'xform_translate_path=%s\n' % xform_jar_path
should_update = True
ini = ini + line
fin.close()
if should_update:
fin = open(local_ini,"w")
fin.write(ini)
fin.close()
print "Updated %s with %s" % (local_ini, xform_jar_path)
else:
print "Nothing to update"
|
bd2669a1e50b0546874eceb07816d6a638dc8321 | src/PhpToZephir/CodeCollector/DirectoryCodeCollector.php | src/PhpToZephir/CodeCollector/DirectoryCodeCollector.php | <?php
namespace PhpToZephir\CodeCollector;
class DirectoryCodeCollector implements CodeCollectorInterface
{
/**
* @var array
*/
private $directories;
/**
* @param array $code
*/
public function __construct(array $directories)
{
$this->directories = $directories;
}
/**
* @param string $dir
* @return \RegexIterator
*/
private function findFiles($dir)
{
$directory = new \RecursiveDirectoryIterator($dir);
$iterator = new \RecursiveIteratorIterator($directory);
$regex = new \RegexIterator($iterator, '/^.+\.php$/i', \RecursiveRegexIterator::GET_MATCH);
return $regex;
}
/**
* @return array
*/
public function getCode()
{
$files = array();
foreach ($this->directories as $directory) {
foreach ($this->findFiles($directory) as $file) {
$files[$file] = file_get_contents($file);
}
}
return $files;
}
} | <?php
namespace PhpToZephir\CodeCollector;
class DirectoryCodeCollector implements CodeCollectorInterface
{
/**
* @var array
*/
private $directories;
/**
* @param array $code
*/
public function __construct(array $directories)
{
$this->directories = $directories;
}
/**
* @param string $dir
* @return \RegexIterator
*/
private function findFiles($dir)
{
$directory = new \RecursiveDirectoryIterator($dir);
$iterator = new \RecursiveIteratorIterator($directory);
$regex = new \RegexIterator($iterator, '/^.+\.php$/i', \RecursiveRegexIterator::GET_MATCH);
return $regex;
}
/**
* @return array
*/
public function getCode()
{
$files = array();
foreach ($this->directories as $directory) {
foreach ($this->findFiles($directory) as $file) {
if (is_array($file)) {
$file = reset($file);
}
$files[$file] = file_get_contents($file);
}
}
return $files;
}
}
| Fix error on PHP >= 5.5 | Fix error on PHP >= 5.5 | PHP | mit | fezfez/php-to-zephir | php | ## Code Before:
<?php
namespace PhpToZephir\CodeCollector;
class DirectoryCodeCollector implements CodeCollectorInterface
{
/**
* @var array
*/
private $directories;
/**
* @param array $code
*/
public function __construct(array $directories)
{
$this->directories = $directories;
}
/**
* @param string $dir
* @return \RegexIterator
*/
private function findFiles($dir)
{
$directory = new \RecursiveDirectoryIterator($dir);
$iterator = new \RecursiveIteratorIterator($directory);
$regex = new \RegexIterator($iterator, '/^.+\.php$/i', \RecursiveRegexIterator::GET_MATCH);
return $regex;
}
/**
* @return array
*/
public function getCode()
{
$files = array();
foreach ($this->directories as $directory) {
foreach ($this->findFiles($directory) as $file) {
$files[$file] = file_get_contents($file);
}
}
return $files;
}
}
## Instruction:
Fix error on PHP >= 5.5
## Code After:
<?php
namespace PhpToZephir\CodeCollector;
class DirectoryCodeCollector implements CodeCollectorInterface
{
/**
* @var array
*/
private $directories;
/**
* @param array $code
*/
public function __construct(array $directories)
{
$this->directories = $directories;
}
/**
* @param string $dir
* @return \RegexIterator
*/
private function findFiles($dir)
{
$directory = new \RecursiveDirectoryIterator($dir);
$iterator = new \RecursiveIteratorIterator($directory);
$regex = new \RegexIterator($iterator, '/^.+\.php$/i', \RecursiveRegexIterator::GET_MATCH);
return $regex;
}
/**
* @return array
*/
public function getCode()
{
$files = array();
foreach ($this->directories as $directory) {
foreach ($this->findFiles($directory) as $file) {
if (is_array($file)) {
$file = reset($file);
}
$files[$file] = file_get_contents($file);
}
}
return $files;
}
}
|
1ac4e00f3d06955da90bddf03a6e478ddeb4d220 | core/modules/html_has_same_domain.py | core/modules/html_has_same_domain.py | from bs4 import BeautifulSoup as bs
from get_root_domain import get_root_domain
def html_has_same_domain(url, resp):
mod = 'html_has_same_domain'
cnt = 0
root = get_root_domain(url)
current_page = bs(resp.text, 'lxml')
for tag in current_page.find_all('a'):
if tag.get('href'):
in_url = get_root_domain(tag.get('href'))
if in_url == root:
cnt += 1
if ("naver" in tag.text.lower()):
return "P", mod
if cnt >= 1:
return "S", mod
return "U", mod
| from bs4 import BeautifulSoup as bs
from get_root_domain import get_root_domain
def html_has_same_domain(url, resp):
mod = 'html_has_same_domain'
cnt = 0
root = get_root_domain(url)
current_page = bs(resp.text, 'lxml')
for tag in current_page.find_all('a'):
if tag.get('href'):
in_url = get_root_domain(tag.get('href'))
if in_url == root:
cnt += 1
if cnt >= 1:
return "S", mod
return "U", mod
| Undo underperformaing change to code | Undo underperformaing change to code
| Python | bsd-2-clause | mjkim610/phishing-detection,jaeyung1001/phishing_site_detection | python | ## Code Before:
from bs4 import BeautifulSoup as bs
from get_root_domain import get_root_domain
def html_has_same_domain(url, resp):
mod = 'html_has_same_domain'
cnt = 0
root = get_root_domain(url)
current_page = bs(resp.text, 'lxml')
for tag in current_page.find_all('a'):
if tag.get('href'):
in_url = get_root_domain(tag.get('href'))
if in_url == root:
cnt += 1
if ("naver" in tag.text.lower()):
return "P", mod
if cnt >= 1:
return "S", mod
return "U", mod
## Instruction:
Undo underperformaing change to code
## Code After:
from bs4 import BeautifulSoup as bs
from get_root_domain import get_root_domain
def html_has_same_domain(url, resp):
mod = 'html_has_same_domain'
cnt = 0
root = get_root_domain(url)
current_page = bs(resp.text, 'lxml')
for tag in current_page.find_all('a'):
if tag.get('href'):
in_url = get_root_domain(tag.get('href'))
if in_url == root:
cnt += 1
if cnt >= 1:
return "S", mod
return "U", mod
|
0d2b31b152034d9ebfaab6a7d3bfed5ced16a63b | README.md | README.md | [](https://travis-ci.org/b2b2dot0/fuey_client)
[](https://codeclimate.com/repos/5203a52189af7e65a002c4a5/feed)
# FueyClient
TODO: Write a gem description
## Installation
Add this line to your application's Gemfile:
gem 'fuey_client'
And then execute:
$ bundle
Or install it yourself as:
$ gem install fuey_client
## Usage
TODO: Write usage instructions here
## Contributing
1. Fork it
2. Create your feature branch (`git checkout -b my-new-feature`)
3. Commit your changes (`git commit -am 'Add some feature'`)
4. Push to the branch (`git push origin my-new-feature`)
5. Create new Pull Request
| [](https://travis-ci.org/b2b2dot0/fuey_client)
[](https://codeclimate.com/repos/5203a52189af7e65a002c4a5/feed)
# Fuey::Client
Fuey currently supports pinging hosts only. This is great for ensuring your servers are live. You can easily tie this to a cron job that
runs at an interval or write your own Ruby script to run it continually.
## Installation
Install the gem:
gem 'fuey_client'
Copy and modify the example [config file](https://github.com/b2b2dot0/fuey_client/blob/master/config_example/fuey/config.yml).
Place it where you would like to keep it and note the location. The file needs to be called `config.yml` and it needs to be in a directory called
`fuey`. So an acceptable location would be, `/etc/fuey/config.yml`.
## Usage
To run Fuey _(assuming your config file is located at /etc/fuey/config.yml)_:
$ fuey /etc
Fuey output is logged to the logfile you identified in your `config.yml`.
## Contributing
1. Fork it
2. Create your feature branch (`git checkout -b my-new-feature`)
3. Commit your changes (`git commit -am 'Add some feature'`)
4. Push to the branch (`git push origin my-new-feature`)
5. Create new Pull Request
| Add install and usage instructions | Add install and usage instructions | Markdown | mit | dliska/fuey_client,b2b2dot0/fuey_client | markdown | ## Code Before:
[](https://travis-ci.org/b2b2dot0/fuey_client)
[](https://codeclimate.com/repos/5203a52189af7e65a002c4a5/feed)
# FueyClient
TODO: Write a gem description
## Installation
Add this line to your application's Gemfile:
gem 'fuey_client'
And then execute:
$ bundle
Or install it yourself as:
$ gem install fuey_client
## Usage
TODO: Write usage instructions here
## Contributing
1. Fork it
2. Create your feature branch (`git checkout -b my-new-feature`)
3. Commit your changes (`git commit -am 'Add some feature'`)
4. Push to the branch (`git push origin my-new-feature`)
5. Create new Pull Request
## Instruction:
Add install and usage instructions
## Code After:
[](https://travis-ci.org/b2b2dot0/fuey_client)
[](https://codeclimate.com/repos/5203a52189af7e65a002c4a5/feed)
# Fuey::Client
Fuey currently supports pinging hosts only. This is great for ensuring your servers are live. You can easily tie this to a cron job that
runs at an interval or write your own Ruby script to run it continually.
## Installation
Install the gem:
gem 'fuey_client'
Copy and modify the example [config file](https://github.com/b2b2dot0/fuey_client/blob/master/config_example/fuey/config.yml).
Place it where you would like to keep it and note the location. The file needs to be called `config.yml` and it needs to be in a directory called
`fuey`. So an acceptable location would be, `/etc/fuey/config.yml`.
## Usage
To run Fuey _(assuming your config file is located at /etc/fuey/config.yml)_:
$ fuey /etc
Fuey output is logged to the logfile you identified in your `config.yml`.
## Contributing
1. Fork it
2. Create your feature branch (`git checkout -b my-new-feature`)
3. Commit your changes (`git commit -am 'Add some feature'`)
4. Push to the branch (`git push origin my-new-feature`)
5. Create new Pull Request
|
531ea8985abb8b6ce45dfefd54e4ac9652748b5a | layouts/partials/shows.html | layouts/partials/shows.html | <section class="shows">
<div class="columns columns--11 space-around">
<h2 class="huge">Shows</h2>
</div>
{{ range .Site.Params.shows }}
<div class="show" data-date="{{ .date }}">
<a href="{{ .url }}" target="_blank" aria-label="{{ .title }}">
<div class="show__head">
<h4 class="show__date">{{ .date }}</h4>
<h3 class="show__title">{{ .title }}</h3>
</div>
<picture>
<source srcset="{{ printf "shows/%s.webp" .img }}" type="image/webp">
<source srcset="{{ printf "shows/%s.jpg" .img }}" type="image/jpeg">
<img class="show__image" src="{{ printf "shows/%s.jpg" .img }}" alt="{{ .title }}" loading="lazy">
</picture>
</a>
</div>
{{ end }}
<script type="text/javascript">
console.log('NERDDISCO')
// var shows = document.querySelectorAll('.shows')
// shows.map(show => {
// console.log(show)
// })
</script>
</section> | <section class="shows">
<div class="columns columns--11 space-around">
<h2 class="huge">Shows</h2>
</div>
<div class="columns columns--fit space-around">
{{ range .Site.Params.shows }}
<div class="show" data-date="{{ .date }}">
<a href="{{ .url }}" target="_blank" aria-label="{{ .title }}">
<div class="show__head">
<h4 class="show__date">{{ .date }}</h4>
<h3 class="show__title">{{ .title }}</h3>
</div>
<picture>
<source srcset="{{ printf "shows/%s.webp" .img }}" type="image/webp">
<source srcset="{{ printf "shows/%s.jpg" .img }}" type="image/jpeg">
<img class="show__image" src="{{ printf "shows/%s.jpg" .img }}" alt="{{ .title }}" loading="lazy">
</picture>
</a>
</div>
{{ end }}
</div>
<script type="text/javascript">
console.log('NERDDISCO')
// var shows = document.querySelectorAll('.shows')
// shows.map(show => {
// console.log(show)
// })
</script>
</section> | Make sure to use a grid | feat(show): Make sure to use a grid
| HTML | mit | NERDDISCO/nerddis.co,NERDDISCO/nerddis.co,NERDDISCO/nerddis.co | html | ## Code Before:
<section class="shows">
<div class="columns columns--11 space-around">
<h2 class="huge">Shows</h2>
</div>
{{ range .Site.Params.shows }}
<div class="show" data-date="{{ .date }}">
<a href="{{ .url }}" target="_blank" aria-label="{{ .title }}">
<div class="show__head">
<h4 class="show__date">{{ .date }}</h4>
<h3 class="show__title">{{ .title }}</h3>
</div>
<picture>
<source srcset="{{ printf "shows/%s.webp" .img }}" type="image/webp">
<source srcset="{{ printf "shows/%s.jpg" .img }}" type="image/jpeg">
<img class="show__image" src="{{ printf "shows/%s.jpg" .img }}" alt="{{ .title }}" loading="lazy">
</picture>
</a>
</div>
{{ end }}
<script type="text/javascript">
console.log('NERDDISCO')
// var shows = document.querySelectorAll('.shows')
// shows.map(show => {
// console.log(show)
// })
</script>
</section>
## Instruction:
feat(show): Make sure to use a grid
## Code After:
<section class="shows">
<div class="columns columns--11 space-around">
<h2 class="huge">Shows</h2>
</div>
<div class="columns columns--fit space-around">
{{ range .Site.Params.shows }}
<div class="show" data-date="{{ .date }}">
<a href="{{ .url }}" target="_blank" aria-label="{{ .title }}">
<div class="show__head">
<h4 class="show__date">{{ .date }}</h4>
<h3 class="show__title">{{ .title }}</h3>
</div>
<picture>
<source srcset="{{ printf "shows/%s.webp" .img }}" type="image/webp">
<source srcset="{{ printf "shows/%s.jpg" .img }}" type="image/jpeg">
<img class="show__image" src="{{ printf "shows/%s.jpg" .img }}" alt="{{ .title }}" loading="lazy">
</picture>
</a>
</div>
{{ end }}
</div>
<script type="text/javascript">
console.log('NERDDISCO')
// var shows = document.querySelectorAll('.shows')
// shows.map(show => {
// console.log(show)
// })
</script>
</section> |
60baff72b801faeca744c287ca61a8e3608a3e62 | plugins/de.uni_jena.iaa.linktype.atomic.product/plugin.xml | plugins/de.uni_jena.iaa.linktype.atomic.product/plugin.xml | <?xml version="1.0" encoding="UTF-8"?>
<plugin>
<extension
id="atomic_product_id"
point="org.eclipse.core.runtime.products">
<product
name="de.uni_jena.iaa.linktype.atomic.product"
application="de.uni_jena.iaa.linktype.atomic.application">
<property
name="applicationCSS"
value="platform:/plugin/de.uni_jena.iaa.linktype.atomic.product/css/default.css">
</property>
<property
name="applicationXMI"
value="de.uni_jena.iaa.linktype.atomic/AtomicMixedMode.e4xmi">
</property>
<property
name="appName"
value="de.uni_jena.iaa.linktype.atomic.product">
</property>
</product>
</extension>
</plugin>
| <?xml version="1.0" encoding="UTF-8"?>
<plugin>
<extension
id="atomic_product_id"
point="org.eclipse.core.runtime.products">
<product
name="⚛ Atomic"
application="de.uni_jena.iaa.linktype.atomic.application">
<property
name="applicationCSS"
value="platform:/plugin/de.uni_jena.iaa.linktype.atomic.product/css/default.css">
</property>
<property
name="applicationXMI"
value="de.uni_jena.iaa.linktype.atomic/AtomicMixedMode.e4xmi">
</property>
<property
name="appName"
value="de.uni_jena.iaa.linktype.atomic.product">
</property>
</product>
</extension>
</plugin>
| Change window title to "⚛ Atomic" | Change window title to "⚛ Atomic" | XML | apache-2.0 | infraling/atomic,infraling/atomic | xml | ## Code Before:
<?xml version="1.0" encoding="UTF-8"?>
<plugin>
<extension
id="atomic_product_id"
point="org.eclipse.core.runtime.products">
<product
name="de.uni_jena.iaa.linktype.atomic.product"
application="de.uni_jena.iaa.linktype.atomic.application">
<property
name="applicationCSS"
value="platform:/plugin/de.uni_jena.iaa.linktype.atomic.product/css/default.css">
</property>
<property
name="applicationXMI"
value="de.uni_jena.iaa.linktype.atomic/AtomicMixedMode.e4xmi">
</property>
<property
name="appName"
value="de.uni_jena.iaa.linktype.atomic.product">
</property>
</product>
</extension>
</plugin>
## Instruction:
Change window title to "⚛ Atomic"
## Code After:
<?xml version="1.0" encoding="UTF-8"?>
<plugin>
<extension
id="atomic_product_id"
point="org.eclipse.core.runtime.products">
<product
name="⚛ Atomic"
application="de.uni_jena.iaa.linktype.atomic.application">
<property
name="applicationCSS"
value="platform:/plugin/de.uni_jena.iaa.linktype.atomic.product/css/default.css">
</property>
<property
name="applicationXMI"
value="de.uni_jena.iaa.linktype.atomic/AtomicMixedMode.e4xmi">
</property>
<property
name="appName"
value="de.uni_jena.iaa.linktype.atomic.product">
</property>
</product>
</extension>
</plugin>
|
2e4f8003b62e2085d074dfba901ff56297b96242 | lib/tasks/sample_data/group_factory.rb | lib/tasks/sample_data/group_factory.rb | require "tasks/sample_data/addressing"
require "tasks/sample_data/logging"
class GroupFactory
include Logging
include Addressing
def create_samples
log "Creating groups"
return if EnterpriseGroup.where(name: "Producer group").exists?
create_group(
name: "Producer group",
owner: enterprises.first.owner,
on_front_page: true,
description: "The seed producers",
address: "6 Rollings Road, Upper Ferntree Gully, 3156"
)
end
private
def create_group(params)
group = EnterpriseGroup.new(params)
group.address = address(params[:address])
group.enterprises = enterprises
group.save!
end
def enterprises
@enterprises ||= Enterprise.where(
name: [
"Fred's Farm",
"Freddy's Farm Shop",
"Fredo's Farm Hub"
]
)
end
end
| require "tasks/sample_data/addressing"
require "tasks/sample_data/logging"
class GroupFactory
include Logging
include Addressing
def create_samples
log "Creating groups"
return if EnterpriseGroup.where(name: "Producer group").exists?
create_group(
{
name: "Producer group",
owner: enterprises.first.owner,
on_front_page: true,
description: "The seed producers"
},
"6 Rollings Road, Upper Ferntree Gully, 3156"
)
end
private
def create_group(params, group_address)
group = EnterpriseGroup.new(params)
group.address = address(group_address)
group.enterprises = enterprises
group.save!
end
def enterprises
@enterprises ||= Enterprise.where(
name: [
"Fred's Farm",
"Freddy's Farm Shop",
"Fredo's Farm Hub"
]
)
end
end
| Fix group factory in rails 4 | Fix group factory in rails 4
params[:address] was breaking the creation of the EnterpriseGroup
| Ruby | agpl-3.0 | Matt-Yorkley/openfoodnetwork,mkllnk/openfoodnetwork,mkllnk/openfoodnetwork,Matt-Yorkley/openfoodnetwork,Matt-Yorkley/openfoodnetwork,lin-d-hop/openfoodnetwork,openfoodfoundation/openfoodnetwork,openfoodfoundation/openfoodnetwork,Matt-Yorkley/openfoodnetwork,mkllnk/openfoodnetwork,openfoodfoundation/openfoodnetwork,lin-d-hop/openfoodnetwork,openfoodfoundation/openfoodnetwork,lin-d-hop/openfoodnetwork,mkllnk/openfoodnetwork,lin-d-hop/openfoodnetwork | ruby | ## Code Before:
require "tasks/sample_data/addressing"
require "tasks/sample_data/logging"
class GroupFactory
include Logging
include Addressing
def create_samples
log "Creating groups"
return if EnterpriseGroup.where(name: "Producer group").exists?
create_group(
name: "Producer group",
owner: enterprises.first.owner,
on_front_page: true,
description: "The seed producers",
address: "6 Rollings Road, Upper Ferntree Gully, 3156"
)
end
private
def create_group(params)
group = EnterpriseGroup.new(params)
group.address = address(params[:address])
group.enterprises = enterprises
group.save!
end
def enterprises
@enterprises ||= Enterprise.where(
name: [
"Fred's Farm",
"Freddy's Farm Shop",
"Fredo's Farm Hub"
]
)
end
end
## Instruction:
Fix group factory in rails 4
params[:address] was breaking the creation of the EnterpriseGroup
## Code After:
require "tasks/sample_data/addressing"
require "tasks/sample_data/logging"
class GroupFactory
include Logging
include Addressing
def create_samples
log "Creating groups"
return if EnterpriseGroup.where(name: "Producer group").exists?
create_group(
{
name: "Producer group",
owner: enterprises.first.owner,
on_front_page: true,
description: "The seed producers"
},
"6 Rollings Road, Upper Ferntree Gully, 3156"
)
end
private
def create_group(params, group_address)
group = EnterpriseGroup.new(params)
group.address = address(group_address)
group.enterprises = enterprises
group.save!
end
def enterprises
@enterprises ||= Enterprise.where(
name: [
"Fred's Farm",
"Freddy's Farm Shop",
"Fredo's Farm Hub"
]
)
end
end
|
b24cec0adda72055ada6c2b1e6872d3ecab074fa | src/Microsoft.AspNet.Mvc.Common/project.json | src/Microsoft.AspNet.Mvc.Common/project.json | {
"version": "6.0.0-*",
"shared": "*.cs",
"dependencies": {
},
"frameworks": {
"aspnet50": {},
"aspnetcore50": {
"dependencies": {
"System.Reflection.Extensions": "4.0.0-beta-*",
"System.Text.Encoding.Extensions": "4.0.10-beta-*"
}
}
}
}
| {
"version": "6.0.0-*",
"shared": "*.cs",
"dependencies": {
},
"frameworks": {
"net45": { },
"aspnet50": { },
"aspnetcore50": {
"dependencies": {
"System.Reflection.Extensions": "4.0.0-beta-*",
"System.Text.Encoding.Extensions": "4.0.10-beta-*"
}
}
}
}
| Add net45 as a target framework for Microsoft.AspNet.Mvc.Common | Add net45 as a target framework for Microsoft.AspNet.Mvc.Common
| JSON | apache-2.0 | aspnet/AspNetCore,aspnet/AspNetCore,aspnet/AspNetCore,aspnet/AspNetCore,aspnet/AspNetCore,aspnet/AspNetCore,aspnet/AspNetCore,aspnet/AspNetCore,aspnet/AspNetCore | json | ## Code Before:
{
"version": "6.0.0-*",
"shared": "*.cs",
"dependencies": {
},
"frameworks": {
"aspnet50": {},
"aspnetcore50": {
"dependencies": {
"System.Reflection.Extensions": "4.0.0-beta-*",
"System.Text.Encoding.Extensions": "4.0.10-beta-*"
}
}
}
}
## Instruction:
Add net45 as a target framework for Microsoft.AspNet.Mvc.Common
## Code After:
{
"version": "6.0.0-*",
"shared": "*.cs",
"dependencies": {
},
"frameworks": {
"net45": { },
"aspnet50": { },
"aspnetcore50": {
"dependencies": {
"System.Reflection.Extensions": "4.0.0-beta-*",
"System.Text.Encoding.Extensions": "4.0.10-beta-*"
}
}
}
}
|
d5fb4ceba473d79cec822506a5322400f0a42a32 | doc/upstream-training/source/slides/howitsmade-communication.rst | doc/upstream-training/source/slides/howitsmade-communication.rst | ====================
Online Communication
====================
.. image:: ./_assets/os_background.png
:class: fill
:width: 100%
Learning Exercise
=================
* Open the `Communication
<https://docs.openstack.org/contributors/common/communication.html>`_ section
of the Contributor Guide
* Read the material
* Ask the mentors questions
* Get ready to go through the exercises
Exercise 1
==========
* Add your name and email address (and IRC nick if you have one already) to
the training etherpad
Exercise 2
==========
* Look through the list of mailing lists and subscribe to at least
one list of interest
* Locate the archives for that ML. What email thread has the most responses
this month?
| ====================
Online Communication
====================
.. image:: ./_assets/os_background.png
:class: fill
:width: 100%
Learning Exercise
=================
* Open the `Communication
<https://docs.openstack.org/contributors/common/communication.html>`_ section
of the Contributor Guide
* Read the material
* Ask the mentors questions
* Get ready to go through the exercises
Exercise 1
==========
* Add your name and email address (and IRC nick if you have one already) to
the training etherpad
Exercise 2
==========
* Subscribe to the openstack-discuss mailing list.
* Locate the archives for the openstack-discuss ML.
What email thread has the most responses this month?
| Replace "mailing lists" with "openstack-discuss" ML | Replace "mailing lists" with "openstack-discuss" ML
Change-Id: I73c49b88e0586db8b33e3f12813c2438772b99a7
Signed-off-by: Anda Nicolae <[email protected]>
| reStructuredText | apache-2.0 | openstack/training-guides,openstack/training-guides | restructuredtext | ## Code Before:
====================
Online Communication
====================
.. image:: ./_assets/os_background.png
:class: fill
:width: 100%
Learning Exercise
=================
* Open the `Communication
<https://docs.openstack.org/contributors/common/communication.html>`_ section
of the Contributor Guide
* Read the material
* Ask the mentors questions
* Get ready to go through the exercises
Exercise 1
==========
* Add your name and email address (and IRC nick if you have one already) to
the training etherpad
Exercise 2
==========
* Look through the list of mailing lists and subscribe to at least
one list of interest
* Locate the archives for that ML. What email thread has the most responses
this month?
## Instruction:
Replace "mailing lists" with "openstack-discuss" ML
Change-Id: I73c49b88e0586db8b33e3f12813c2438772b99a7
Signed-off-by: Anda Nicolae <[email protected]>
## Code After:
====================
Online Communication
====================
.. image:: ./_assets/os_background.png
:class: fill
:width: 100%
Learning Exercise
=================
* Open the `Communication
<https://docs.openstack.org/contributors/common/communication.html>`_ section
of the Contributor Guide
* Read the material
* Ask the mentors questions
* Get ready to go through the exercises
Exercise 1
==========
* Add your name and email address (and IRC nick if you have one already) to
the training etherpad
Exercise 2
==========
* Subscribe to the openstack-discuss mailing list.
* Locate the archives for the openstack-discuss ML.
What email thread has the most responses this month?
|
7b06d8532c6e62366c8f77ab3f9f91faa8013743 | dash/app/controllers/spree/admin/overview_controller.rb | dash/app/controllers/spree/admin/overview_controller.rb | module Spree
class Admin::OverviewController < Admin::BaseController
before_filter :check_last_jirafe_sync_time, :only => :index
JIRAFE_LOCALES = { :english => 'en_US',
:french => 'fr_FR',
:german => 'de_DE',
:japanese => 'ja_JA' }
def index
redirect_to admin_analytics_register_path unless Spree::Dash::Config.configured?
if JIRAFE_LOCALES.values.include? params[:locale]
Spree::Dash::Config.locale = params[:locale]
end
end
private
def check_last_jirafe_sync_time
if session[:last_jirafe_sync]
hours_since_last_sync = ((DateTime.now - session[:last_jirafe_sync]) * 24).to_i
redirect_to admin_analytics_sync_path if hours_since_last_sync > 24
else
redirect_to admin_analytics_sync_path
end
end
end
end
| module Spree
class Admin::OverviewController < Admin::BaseController
before_filter :check_last_jirafe_sync_time, :only => :index
JIRAFE_LOCALES = { :english => 'en_US',
:french => 'fr_FR',
:german => 'de_DE',
:japanese => 'ja_JA' }
def index
redirect_to admin_analytics_register_path unless Spree::Dash::Config.configured?
if JIRAFE_LOCALES.values.include? params[:locale]
Spree::Dash::Config.locale = params[:locale]
end
end
private
def check_last_jirafe_sync_time
if Spree::Dash.configured?
if session[:last_jirafe_sync]
hours_since_last_sync = ((DateTime.now - session[:last_jirafe_sync]) * 24).to_i
redirect_to admin_analytics_sync_path if hours_since_last_sync > 24
else
redirect_to admin_analytics_sync_path
end
end
end
end
end
| Check if Spree::Dash is configured before trying to sync with Jirafe | Check if Spree::Dash is configured before trying to sync with Jirafe
| Ruby | bsd-3-clause | lzcabrera/spree-1-3-stable,lzcabrera/spree-1-3-stable,lzcabrera/spree-1-3-stable | ruby | ## Code Before:
module Spree
class Admin::OverviewController < Admin::BaseController
before_filter :check_last_jirafe_sync_time, :only => :index
JIRAFE_LOCALES = { :english => 'en_US',
:french => 'fr_FR',
:german => 'de_DE',
:japanese => 'ja_JA' }
def index
redirect_to admin_analytics_register_path unless Spree::Dash::Config.configured?
if JIRAFE_LOCALES.values.include? params[:locale]
Spree::Dash::Config.locale = params[:locale]
end
end
private
def check_last_jirafe_sync_time
if session[:last_jirafe_sync]
hours_since_last_sync = ((DateTime.now - session[:last_jirafe_sync]) * 24).to_i
redirect_to admin_analytics_sync_path if hours_since_last_sync > 24
else
redirect_to admin_analytics_sync_path
end
end
end
end
## Instruction:
Check if Spree::Dash is configured before trying to sync with Jirafe
## Code After:
module Spree
class Admin::OverviewController < Admin::BaseController
before_filter :check_last_jirafe_sync_time, :only => :index
JIRAFE_LOCALES = { :english => 'en_US',
:french => 'fr_FR',
:german => 'de_DE',
:japanese => 'ja_JA' }
def index
redirect_to admin_analytics_register_path unless Spree::Dash::Config.configured?
if JIRAFE_LOCALES.values.include? params[:locale]
Spree::Dash::Config.locale = params[:locale]
end
end
private
def check_last_jirafe_sync_time
if Spree::Dash.configured?
if session[:last_jirafe_sync]
hours_since_last_sync = ((DateTime.now - session[:last_jirafe_sync]) * 24).to_i
redirect_to admin_analytics_sync_path if hours_since_last_sync > 24
else
redirect_to admin_analytics_sync_path
end
end
end
end
end
|
969bd131b47709f47c9262e93283db0edda1e5d4 | app/services/stocks.js | app/services/stocks.js | app.factory('stocks', ['$http', function($http) {
var endPoint = 'https://query.yahooapis.com/v1/public/yql?q=',
symbols = [
"W",
"FIT"
],
query = "select * from yahoo.finance.quotes where symbol IN(\"" + symbols.join('","') + "\")",
args = "&format=json&env=http://datatables.org/alltables.env",
url = endPoint + encodeURIComponent(query) + args;
// return $http.get(url)
// .success(function(data) {
// return data;
// })
// .error(function(data) {
// return data;
// });
// Used for no internet access only
return $http.get('http://localhost:8000/app/components/stocks.json');
}]); | app.factory('stocks', ['$http', function($http) {
var endPoint = 'https://query.yahooapis.com/v1/public/yql?q=',
symbols = [
"W",
"FIT"
],
query = "select * from yahoo.finance.quotes where symbol IN(\"" + symbols.join('","') + "\")",
args = "&format=json&env=http://datatables.org/alltables.env",
url = endPoint + encodeURIComponent(query) + args;
return $http.get('http://localhost:8000/app/components/stocks.json');
return $http.get(url)
.success(function(data) {
return data;
})
.error(function(data) {
return data;
});
}]); | Rearrange temp support for no internet | Rearrange temp support for no internet
| JavaScript | mit | zachloubier/stocks,zachloubier/stocks | javascript | ## Code Before:
app.factory('stocks', ['$http', function($http) {
var endPoint = 'https://query.yahooapis.com/v1/public/yql?q=',
symbols = [
"W",
"FIT"
],
query = "select * from yahoo.finance.quotes where symbol IN(\"" + symbols.join('","') + "\")",
args = "&format=json&env=http://datatables.org/alltables.env",
url = endPoint + encodeURIComponent(query) + args;
// return $http.get(url)
// .success(function(data) {
// return data;
// })
// .error(function(data) {
// return data;
// });
// Used for no internet access only
return $http.get('http://localhost:8000/app/components/stocks.json');
}]);
## Instruction:
Rearrange temp support for no internet
## Code After:
app.factory('stocks', ['$http', function($http) {
var endPoint = 'https://query.yahooapis.com/v1/public/yql?q=',
symbols = [
"W",
"FIT"
],
query = "select * from yahoo.finance.quotes where symbol IN(\"" + symbols.join('","') + "\")",
args = "&format=json&env=http://datatables.org/alltables.env",
url = endPoint + encodeURIComponent(query) + args;
return $http.get('http://localhost:8000/app/components/stocks.json');
return $http.get(url)
.success(function(data) {
return data;
})
.error(function(data) {
return data;
});
}]); |
89b323667c9a869c21bc8c0b5bef5b128a970a5f | api-script-client/int_daily_build/run.sh | api-script-client/int_daily_build/run.sh | set -e; # Stop on error
# Declare run specific parameters
effectiveDate="2015-01-31"
readmeEndDate="2014"
isFirstTime=false
headless=true
extensionName="SNOMED CT International Edition"
dataLocation=$1
previousPublishedPackageName="SnomedCT_Release_INT_20140731.zip"
productName="SNOMED CT Release"
buildName="Int Daily Build"
echo "Configuration set to pull export files from $1"
# Call api_client
source ../api_client.sh
| set -e; # Stop on error
# Declare run specific parameters
effectiveDate="2015-01-31"
readmeEndDate="2014"
isFirstTime=false
headless=true
extensionName="SNOMED CT International Edition"
dataLocation=$1
previousPublishedPackageName="SnomedCT_Release_INT_20140731.zip"
productName="SNOMED CT Release"
buildName="Int Daily Build"
packageName="Snomed Release Package
echo "Configuration set to pull export files from $1"
# Call api_client
source ../api_client.sh
| Package Name must match that being picked up by the daily comparison. | Package Name must match that being picked up by the daily comparison. | Shell | apache-2.0 | IHTSDO/snomed-release-service,IHTSDO/snomed-release-service | shell | ## Code Before:
set -e; # Stop on error
# Declare run specific parameters
effectiveDate="2015-01-31"
readmeEndDate="2014"
isFirstTime=false
headless=true
extensionName="SNOMED CT International Edition"
dataLocation=$1
previousPublishedPackageName="SnomedCT_Release_INT_20140731.zip"
productName="SNOMED CT Release"
buildName="Int Daily Build"
echo "Configuration set to pull export files from $1"
# Call api_client
source ../api_client.sh
## Instruction:
Package Name must match that being picked up by the daily comparison.
## Code After:
set -e; # Stop on error
# Declare run specific parameters
effectiveDate="2015-01-31"
readmeEndDate="2014"
isFirstTime=false
headless=true
extensionName="SNOMED CT International Edition"
dataLocation=$1
previousPublishedPackageName="SnomedCT_Release_INT_20140731.zip"
productName="SNOMED CT Release"
buildName="Int Daily Build"
packageName="Snomed Release Package
echo "Configuration set to pull export files from $1"
# Call api_client
source ../api_client.sh
|
d9c818c0c16aa9861744c3e4f63211aca6c7d1a6 | db_test.go | db_test.go | package hyperdb_test
import (
. "github.com/mysza/hyperdb"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
)
var _ = Describe("DB", func() {
})
| package hyperdb_test
import (
"github.com/mysza/hyperdb"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
"os"
)
var _ = Describe("DB", func() {
Describe("Opening the database", func() {
Context("with defaults", func() {
It("should open default database", func() {
// calling Open with empty string should use default file name
db, err := hyperdb.Open("")
_, pathErr := os.Stat(hyperdb.DefaultDataFileName)
defer func() {
db.Close()
os.Remove(hyperdb.DefaultDataFileName)
}()
Expect(db).ToNot(BeNil())
Expect(err).To(BeNil())
Expect(pathErr).To(BeNil())
})
It("should open named database", func() {
const dbname = "very_unique_name.db"
db, err := hyperdb.Open(dbname)
_, pathErr := os.Stat(dbname)
defer func() {
db.Close()
os.Remove(dbname)
}()
Expect(db).ToNot(BeNil())
Expect(err).To(BeNil())
Expect(pathErr).To(BeNil())
})
})
})
})
| Add test for opening default and named database | Add test for opening default and named database
| Go | mit | mysza/hyperdb | go | ## Code Before:
package hyperdb_test
import (
. "github.com/mysza/hyperdb"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
)
var _ = Describe("DB", func() {
})
## Instruction:
Add test for opening default and named database
## Code After:
package hyperdb_test
import (
"github.com/mysza/hyperdb"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
"os"
)
var _ = Describe("DB", func() {
Describe("Opening the database", func() {
Context("with defaults", func() {
It("should open default database", func() {
// calling Open with empty string should use default file name
db, err := hyperdb.Open("")
_, pathErr := os.Stat(hyperdb.DefaultDataFileName)
defer func() {
db.Close()
os.Remove(hyperdb.DefaultDataFileName)
}()
Expect(db).ToNot(BeNil())
Expect(err).To(BeNil())
Expect(pathErr).To(BeNil())
})
It("should open named database", func() {
const dbname = "very_unique_name.db"
db, err := hyperdb.Open(dbname)
_, pathErr := os.Stat(dbname)
defer func() {
db.Close()
os.Remove(dbname)
}()
Expect(db).ToNot(BeNil())
Expect(err).To(BeNil())
Expect(pathErr).To(BeNil())
})
})
})
})
|
e1ad944dc30d0546b7aa696a57439e0ccd794ba6 | examples/basic/src/actions/index.js | examples/basic/src/actions/index.js | import * as Rx from 'rxjs';
export const FETCH_USER_PENDING = 'FETCH_USER_PENDING';
export const FETCH_USER_FULFILLED = 'FETCH_USER_FULFILLED';
export const FETCH_USER_ABORTED = 'FETCH_USER_ABORTED';
export const fetchUser = () => (
(actions, store) => Rx.Observable.of({ id: 1, name: 'Bilbo Baggins', timestamp: new Date() })
// Delaying to emulate an async request, like Rx.Observable.ajax('/api/path')
.delay(1000)
// When our request comes back, we transform it into an action
// that is then automatically dispatched by the middleware
.map(
payload => ({ type: FETCH_USER_FULFILLED, payload })
)
// Abort fetching the user if someone dispatches an abort action
.takeUntil(
actions.ofType(FETCH_USER_ABORTED)
)
// Let's us immediately update the user's state so we can display
// loading messages to the user, etc.
.startWith({ type: FETCH_USER_PENDING })
);
// Plain old action
export const abortFetchUser = () => ({ type: FETCH_USER_ABORTED });
| import * as Rx from 'rxjs';
export const FETCH_USER_PENDING = 'FETCH_USER_PENDING';
export const FETCH_USER_FULFILLED = 'FETCH_USER_FULFILLED';
export const FETCH_USER_ABORTED = 'FETCH_USER_ABORTED';
export const fetchUser = () => (
(actions, store) => Rx.Observable.of({ id: 1, name: 'Bilbo Baggins', timestamp: new Date() })
// Delaying to emulate an async request, like Rx.Observable.ajax('/api/path')
.delay(1000)
// When our request comes back, we transform it into an action
// which the redux-observable middleware will then dispatch
.map(
payload => ({ type: FETCH_USER_FULFILLED, payload })
)
// Abort fetching the user if someone dispatches an abort action
.takeUntil(
actions.ofType(FETCH_USER_ABORTED)
)
// Let's us immediately update the user's state so we can display
// loading messages to the user, etc.
.startWith({ type: FETCH_USER_PENDING })
);
// Plain old action
export const abortFetchUser = () => ({ type: FETCH_USER_ABORTED });
| Reduce confusion about when middleware dispatches emitted actions | docs(README): Reduce confusion about when middleware dispatches emitted actions
"automatically dispatch" is misleading/confusing. This event is dispatched because `dispatch(fetchUser())` is called elsewhere-- merely returning an action object from `.map` function does not cause the action to be dispatched. | JavaScript | mit | redux-observable/redux-observable,blesh/redux-observable,blesh/redux-observable,redux-observable/redux-observable,jesinity/redux-observable,jesinity/redux-observable,jesinity/redux-observable,redux-observable/redux-observable | javascript | ## Code Before:
import * as Rx from 'rxjs';
export const FETCH_USER_PENDING = 'FETCH_USER_PENDING';
export const FETCH_USER_FULFILLED = 'FETCH_USER_FULFILLED';
export const FETCH_USER_ABORTED = 'FETCH_USER_ABORTED';
export const fetchUser = () => (
(actions, store) => Rx.Observable.of({ id: 1, name: 'Bilbo Baggins', timestamp: new Date() })
// Delaying to emulate an async request, like Rx.Observable.ajax('/api/path')
.delay(1000)
// When our request comes back, we transform it into an action
// that is then automatically dispatched by the middleware
.map(
payload => ({ type: FETCH_USER_FULFILLED, payload })
)
// Abort fetching the user if someone dispatches an abort action
.takeUntil(
actions.ofType(FETCH_USER_ABORTED)
)
// Let's us immediately update the user's state so we can display
// loading messages to the user, etc.
.startWith({ type: FETCH_USER_PENDING })
);
// Plain old action
export const abortFetchUser = () => ({ type: FETCH_USER_ABORTED });
## Instruction:
docs(README): Reduce confusion about when middleware dispatches emitted actions
"automatically dispatch" is misleading/confusing. This event is dispatched because `dispatch(fetchUser())` is called elsewhere-- merely returning an action object from `.map` function does not cause the action to be dispatched.
## Code After:
import * as Rx from 'rxjs';
export const FETCH_USER_PENDING = 'FETCH_USER_PENDING';
export const FETCH_USER_FULFILLED = 'FETCH_USER_FULFILLED';
export const FETCH_USER_ABORTED = 'FETCH_USER_ABORTED';
export const fetchUser = () => (
(actions, store) => Rx.Observable.of({ id: 1, name: 'Bilbo Baggins', timestamp: new Date() })
// Delaying to emulate an async request, like Rx.Observable.ajax('/api/path')
.delay(1000)
// When our request comes back, we transform it into an action
// which the redux-observable middleware will then dispatch
.map(
payload => ({ type: FETCH_USER_FULFILLED, payload })
)
// Abort fetching the user if someone dispatches an abort action
.takeUntil(
actions.ofType(FETCH_USER_ABORTED)
)
// Let's us immediately update the user's state so we can display
// loading messages to the user, etc.
.startWith({ type: FETCH_USER_PENDING })
);
// Plain old action
export const abortFetchUser = () => ({ type: FETCH_USER_ABORTED });
|
780ffaa86cb64913ace2c98a10890d7e40b528c4 | README.md | README.md | This is a simple python class which is using to filter out cards by different keys, the data source is exported JSON from Trello.
|function name | description |
-----------------------------|------------------------------------|
|viewCardsByKeys([key]) | Get valuse for remaining cards |
|cardsFilterByClosed(boolean)| Filter out closed cards |
|cardsFilterByLastUpdateDate(int) | Filter out update dated over n days|
|cardsFilterByList(idList) | Filter by ID of List |
|cardsFilterByLabels | Filter by labels (TBD) |
|cardsFilterByMembers | Filter by members (TBD) |
# Usage example
```
>>> from trellofunnel import trellofunnel
>>> o = trellofunnel(file = '/tmp/trello.json')
>>> o.cardsFilterByClosed()
>>> o.cardsFilterByLastUpdateDate(7)
>>> print o.viewCardsByKeys(['idList', 'labels', 'name'])
ToDo [Dev] Improve the flow
Doing [Ops] Switch account
Done [Server] Upgrade package
```
| This is a simple python class which is using to filter out cards by different keys, the data source is exported JSON from Trello.
|function name | description |
-----------------------------|------------------------------------|
|viewCardsByKeys([keys]) | Get valuse for remaining cards |
|cardsFilterByClosed(boolean)| Filter out closed cards |
|cardsFilterByLastUpdateDate(int) | Filter out update dated over n days|
|cardsFilterByList(idList) | Filter by ID of List |
|cardsFilterByLabels | Filter by labels (TBD) |
|cardsFilterByMembers | Filter by members (TBD) |
# Usage example
Use local file /tmp/trello.json
```
>>> from trellofunnel import trellofunnel
>>> o = trellofunnel(file = '/tmp/trello.json')
>>> o.cardsFilterByClosed()
>>> o.cardsFilterByLastUpdateDate(7)
>>> print o.viewCardsByKeys(['idList', 'labels', 'name'])
ToDo [Dev] Improve the flow
Doing [Ops] Switch account
Done [Server] Upgrade package
```
(Temporary) Use token to download JSON from Trello
```
>>> from trellofunnel import trellofunnel
>>> o = trellofunnel(url= 'https://trello.com/b/ruxunrrp.json', cookies = 'token=xxxxxxx')
>>> o.cardsFilterByClosed()
>>> o.cardsFilterByLastUpdateDate(7)
>>> print o.viewCardsByKeys(['idList', 'labels', 'name'])
ToDo [Dev] Improve the flow
Doing [Ops] Switch account
Done [Server] Upgrade package
``` | Update readme for temporary solution: use token to download trello JSON | Update readme for temporary solution: use token to download trello JSON
| Markdown | mit | cjy0125/trellofunnel | markdown | ## Code Before:
This is a simple python class which is using to filter out cards by different keys, the data source is exported JSON from Trello.
|function name | description |
-----------------------------|------------------------------------|
|viewCardsByKeys([key]) | Get valuse for remaining cards |
|cardsFilterByClosed(boolean)| Filter out closed cards |
|cardsFilterByLastUpdateDate(int) | Filter out update dated over n days|
|cardsFilterByList(idList) | Filter by ID of List |
|cardsFilterByLabels | Filter by labels (TBD) |
|cardsFilterByMembers | Filter by members (TBD) |
# Usage example
```
>>> from trellofunnel import trellofunnel
>>> o = trellofunnel(file = '/tmp/trello.json')
>>> o.cardsFilterByClosed()
>>> o.cardsFilterByLastUpdateDate(7)
>>> print o.viewCardsByKeys(['idList', 'labels', 'name'])
ToDo [Dev] Improve the flow
Doing [Ops] Switch account
Done [Server] Upgrade package
```
## Instruction:
Update readme for temporary solution: use token to download trello JSON
## Code After:
This is a simple python class which is using to filter out cards by different keys, the data source is exported JSON from Trello.
|function name | description |
-----------------------------|------------------------------------|
|viewCardsByKeys([keys]) | Get valuse for remaining cards |
|cardsFilterByClosed(boolean)| Filter out closed cards |
|cardsFilterByLastUpdateDate(int) | Filter out update dated over n days|
|cardsFilterByList(idList) | Filter by ID of List |
|cardsFilterByLabels | Filter by labels (TBD) |
|cardsFilterByMembers | Filter by members (TBD) |
# Usage example
Use local file /tmp/trello.json
```
>>> from trellofunnel import trellofunnel
>>> o = trellofunnel(file = '/tmp/trello.json')
>>> o.cardsFilterByClosed()
>>> o.cardsFilterByLastUpdateDate(7)
>>> print o.viewCardsByKeys(['idList', 'labels', 'name'])
ToDo [Dev] Improve the flow
Doing [Ops] Switch account
Done [Server] Upgrade package
```
(Temporary) Use token to download JSON from Trello
```
>>> from trellofunnel import trellofunnel
>>> o = trellofunnel(url= 'https://trello.com/b/ruxunrrp.json', cookies = 'token=xxxxxxx')
>>> o.cardsFilterByClosed()
>>> o.cardsFilterByLastUpdateDate(7)
>>> print o.viewCardsByKeys(['idList', 'labels', 'name'])
ToDo [Dev] Improve the flow
Doing [Ops] Switch account
Done [Server] Upgrade package
``` |
18c8be74d0f57490192ebcfb63400574e01adf87 | src/moves.ts | src/moves.ts | export function getMoves(grid: boolean[]): number[] {
return grid
.map((value, index) => value == undefined ? index : undefined)
.filter(value => value != undefined);
}
| import { Grid, Move } from './definitions';
export function getMoves(grid: Grid): Move[] {
return grid
.map((value, index) => value == undefined ? index : undefined)
.filter(value => value != undefined);
}
| Make getMoves function use Grid and Move | Make getMoves function use Grid and Move
| TypeScript | mit | artfuldev/tictactoe-ai,artfuldev/tictactoe-ai | typescript | ## Code Before:
export function getMoves(grid: boolean[]): number[] {
return grid
.map((value, index) => value == undefined ? index : undefined)
.filter(value => value != undefined);
}
## Instruction:
Make getMoves function use Grid and Move
## Code After:
import { Grid, Move } from './definitions';
export function getMoves(grid: Grid): Move[] {
return grid
.map((value, index) => value == undefined ? index : undefined)
.filter(value => value != undefined);
}
|
a7409f648898d312f7ef7c61ffd85df5a66165d2 | src/bd/settings/VmSettings.java | src/bd/settings/VmSettings.java | package bd.settings;
/**
* VmSettings are determined based on Java properties. They are used to configure VM-wide
* properties, for instance whether a tool is enabled or not.
*/
public class VmSettings {
public static final boolean DYNAMIC_METRICS;
static {
Settings s = getSettings();
DYNAMIC_METRICS = s.dynamicMetricsEnabled();
}
private static Settings getSettings() {
String className = System.getProperty("bd.settings");
if (className == null) {
return new AllDisabled();
}
try {
Class<?> clazz = VmSettings.class.getClassLoader().loadClass(className);
return (Settings) clazz.newInstance();
} catch (ClassNotFoundException | InstantiationException | IllegalAccessException e) {
// Checkstyle: stop
System.err.println("[BlackDiamonds] Could not load settings class: " + className);
e.printStackTrace();
return new AllDisabled();
// Checkstyle: resume
}
}
}
| package bd.settings;
/**
* VmSettings are determined based on Java properties. They are used to configure VM-wide
* properties, for instance whether a tool is enabled or not.
*/
public class VmSettings {
public static final boolean DYNAMIC_METRICS;
static {
Settings s = getSettings();
DYNAMIC_METRICS = s.dynamicMetricsEnabled();
}
private static Settings getSettings() {
String className = System.getProperty("bd.settings");
if (className == null) {
return new AllDisabled();
}
try {
Class<?> clazz = Class.forName(className);
return (Settings) clazz.newInstance();
} catch (ClassNotFoundException | InstantiationException | IllegalAccessException e) {
// Checkstyle: stop
System.err.println("[BlackDiamonds] Could not load settings class: " + className);
e.printStackTrace();
return new AllDisabled();
// Checkstyle: resume
}
}
}
| Use Class.forName instead of class loader | Use Class.forName instead of class loader
Class loader seems to be null on the bootclasspath
Signed-off-by: Stefan Marr <[email protected]>
| Java | mit | SOM-st/black-diamonds,SOM-st/black-diamonds | java | ## Code Before:
package bd.settings;
/**
* VmSettings are determined based on Java properties. They are used to configure VM-wide
* properties, for instance whether a tool is enabled or not.
*/
public class VmSettings {
public static final boolean DYNAMIC_METRICS;
static {
Settings s = getSettings();
DYNAMIC_METRICS = s.dynamicMetricsEnabled();
}
private static Settings getSettings() {
String className = System.getProperty("bd.settings");
if (className == null) {
return new AllDisabled();
}
try {
Class<?> clazz = VmSettings.class.getClassLoader().loadClass(className);
return (Settings) clazz.newInstance();
} catch (ClassNotFoundException | InstantiationException | IllegalAccessException e) {
// Checkstyle: stop
System.err.println("[BlackDiamonds] Could not load settings class: " + className);
e.printStackTrace();
return new AllDisabled();
// Checkstyle: resume
}
}
}
## Instruction:
Use Class.forName instead of class loader
Class loader seems to be null on the bootclasspath
Signed-off-by: Stefan Marr <[email protected]>
## Code After:
package bd.settings;
/**
* VmSettings are determined based on Java properties. They are used to configure VM-wide
* properties, for instance whether a tool is enabled or not.
*/
public class VmSettings {
public static final boolean DYNAMIC_METRICS;
static {
Settings s = getSettings();
DYNAMIC_METRICS = s.dynamicMetricsEnabled();
}
private static Settings getSettings() {
String className = System.getProperty("bd.settings");
if (className == null) {
return new AllDisabled();
}
try {
Class<?> clazz = Class.forName(className);
return (Settings) clazz.newInstance();
} catch (ClassNotFoundException | InstantiationException | IllegalAccessException e) {
// Checkstyle: stop
System.err.println("[BlackDiamonds] Could not load settings class: " + className);
e.printStackTrace();
return new AllDisabled();
// Checkstyle: resume
}
}
}
|
a8fe0e39e3b6e63656d161d8397b47a8f0a83d42 | app/controllers/reports_controller.rb | app/controllers/reports_controller.rb | class ReportsController < ApplicationController
before_action :authenticate_user
def index
@reports = Report.all
end
def show
end
def new
@report = Report.new
end
def create
@report = Report.build_report(report_params)
if @report.save
redirect_to reports_path, notice: "Report created"
else
render "new"
end
end
private
def report_params
params.require(:report).permit(:start_date, :end_date, :current)
end
end
| class ReportsController < ApplicationController
before_action :authenticate_user
def index
@reports = Report.all
end
def show
@report = Report.find(params[:id])
end
def new
@report = Report.new
end
def create
@report = Report.build_report(report_params)
if @report.save
redirect_to reports_path, notice: "Report created"
else
render "new"
end
end
private
def report_params
params.require(:report).permit(:start_date, :end_date, :current)
end
end
| Add show action to report controller | Add show action to report controller | Ruby | mit | danevron/hours-report-app,danevron/hours-report-app,danevron/hours-report-app | ruby | ## Code Before:
class ReportsController < ApplicationController
before_action :authenticate_user
def index
@reports = Report.all
end
def show
end
def new
@report = Report.new
end
def create
@report = Report.build_report(report_params)
if @report.save
redirect_to reports_path, notice: "Report created"
else
render "new"
end
end
private
def report_params
params.require(:report).permit(:start_date, :end_date, :current)
end
end
## Instruction:
Add show action to report controller
## Code After:
class ReportsController < ApplicationController
before_action :authenticate_user
def index
@reports = Report.all
end
def show
@report = Report.find(params[:id])
end
def new
@report = Report.new
end
def create
@report = Report.build_report(report_params)
if @report.save
redirect_to reports_path, notice: "Report created"
else
render "new"
end
end
private
def report_params
params.require(:report).permit(:start_date, :end_date, :current)
end
end
|
99177cdc64bdec740557007800b610bff07ce46a | shivyc.py | shivyc.py |
import argparse
def get_arguments():
"""Set up the argument parser and return an object storing the
argument values.
return - An object storing argument values, as returned by
argparse.parse_args()
"""
parser = argparse.ArgumentParser(description="Compile C files.")
# The file name of the C file to compile. The file name gets saved to the
# file_name attribute of the returned object, but this parameter appears as
# "filename" (no underscore) on the command line.
parser.add_argument("file_name", metavar="filename")
return parser.parse_args()
def compile_code(source: str) -> str:
"""Compile the provided source code into assembly.
source - The C source code to compile.
return - The asm output
"""
return source
def main():
"""Load the input files, and dispatch to the compile function for the main
processing.
"""
arguments = get_arguments()
try:
c_file = open(arguments.file_name)
except IOError:
print("shivyc: error: no such file or directory: '{}'"
.format(arguments.file_name))
else:
compile_code(c_file.read())
c_file.close()
if __name__ == "__main__":
main()
|
import argparse
def get_arguments():
"""Set up the argument parser and return an object storing the
argument values.
return - An object storing argument values, as returned by
argparse.parse_args()
"""
parser = argparse.ArgumentParser(description="Compile C files.")
# The file name of the C file to compile. The file name gets saved to the
# file_name attribute of the returned object, but this parameter appears as
# "filename" (no underscore) on the command line.
parser.add_argument("file_name", metavar="filename")
return parser.parse_args()
def compile_code(source: str) -> str:
"""Compile the provided source code into assembly.
source - The C source code to compile.
return - The asm output
"""
return source
def main():
"""Load the input files and dispatch to the compile function for the main
processing.
The main function handles interfacing with the user, like reading the
command line arguments, printing errors, and generating output files. The
compilation logic is in the compile_code function to facilitate testing.
"""
arguments = get_arguments()
try:
c_file = open(arguments.file_name)
except IOError:
print("shivyc: error: no such file or directory: '{}'"
.format(arguments.file_name))
else:
compile_code(c_file.read())
c_file.close()
if __name__ == "__main__":
main()
| Improve commenting on main function | Improve commenting on main function
| Python | mit | ShivamSarodia/ShivyC,ShivamSarodia/ShivyC,ShivamSarodia/ShivyC | python | ## Code Before:
import argparse
def get_arguments():
"""Set up the argument parser and return an object storing the
argument values.
return - An object storing argument values, as returned by
argparse.parse_args()
"""
parser = argparse.ArgumentParser(description="Compile C files.")
# The file name of the C file to compile. The file name gets saved to the
# file_name attribute of the returned object, but this parameter appears as
# "filename" (no underscore) on the command line.
parser.add_argument("file_name", metavar="filename")
return parser.parse_args()
def compile_code(source: str) -> str:
"""Compile the provided source code into assembly.
source - The C source code to compile.
return - The asm output
"""
return source
def main():
"""Load the input files, and dispatch to the compile function for the main
processing.
"""
arguments = get_arguments()
try:
c_file = open(arguments.file_name)
except IOError:
print("shivyc: error: no such file or directory: '{}'"
.format(arguments.file_name))
else:
compile_code(c_file.read())
c_file.close()
if __name__ == "__main__":
main()
## Instruction:
Improve commenting on main function
## Code After:
import argparse
def get_arguments():
"""Set up the argument parser and return an object storing the
argument values.
return - An object storing argument values, as returned by
argparse.parse_args()
"""
parser = argparse.ArgumentParser(description="Compile C files.")
# The file name of the C file to compile. The file name gets saved to the
# file_name attribute of the returned object, but this parameter appears as
# "filename" (no underscore) on the command line.
parser.add_argument("file_name", metavar="filename")
return parser.parse_args()
def compile_code(source: str) -> str:
"""Compile the provided source code into assembly.
source - The C source code to compile.
return - The asm output
"""
return source
def main():
"""Load the input files and dispatch to the compile function for the main
processing.
The main function handles interfacing with the user, like reading the
command line arguments, printing errors, and generating output files. The
compilation logic is in the compile_code function to facilitate testing.
"""
arguments = get_arguments()
try:
c_file = open(arguments.file_name)
except IOError:
print("shivyc: error: no such file or directory: '{}'"
.format(arguments.file_name))
else:
compile_code(c_file.read())
c_file.close()
if __name__ == "__main__":
main()
|
c4ed0fcc474e9abcadbcbee2923c87247769a328 | test/function.js | test/function.js | (function(buster, fn, when) {
var assert = buster.assert;
function f(x, y) {
return x + y;
}
buster.testCase('when/function', {
'apply': {
'should return a promise': function() {
var result = fn.apply(f, [1, 2]);
assert(result && typeof result.then === 'function');
},
'should accept values for arguments': function() {
var result = fn.apply(f, [1, 2]);
return when(result, function(result) {
assert.equals(result, 3);
});
},
},
'call': {
'should return a promise': function() {
var result = fn.call(f, 1, 2);
assert(result && typeof result.then === 'function');
},
'should accept values for arguments': function() {
var result = fn.call(f, 1, 2);
return when(result, function(result) {
assert.equals(result, 3);
});
},
},
'bind': {
'should return a function': function() {
assert.isFunction(fn.bind(f, null));
}
}
});
})(
this.buster || require('buster'),
this.when_fn || require('../function'),
this.when || require('../when')
);
| (function(buster, fn, when) {
var assert = buster.assert;
function assertIsPromise(something) {
var message = 'Object is not a promise';
buster.assert(when.isPromise(something), message);
}
function f(x, y) {
return x + y;
}
buster.testCase('when/function', {
'apply': {
'should return a promise': function() {
var result = fn.apply(f, [1, 2]);
assertIsPromise(result);
},
'should accept values for arguments': function() {
var result = fn.apply(f, [1, 2]);
return when(result, function(result) {
assert.equals(result, 3);
});
},
},
'call': {
'should return a promise': function() {
var result = fn.call(f, 1, 2);
assertIsPromise(result);
},
'should accept values for arguments': function() {
var result = fn.call(f, 1, 2);
return when(result, function(result) {
assert.equals(result, 3);
});
},
},
'bind': {
'should return a function': function() {
assert.isFunction(fn.bind(f, null));
}
}
});
})(
this.buster || require('buster'),
this.when_fn || require('../function'),
this.when || require('../when')
);
| Remove duplication and extra knowledge | Remove duplication and extra knowledge
I guess we can trust when.isPromise here.
Maybe we should consider using buster.assertions.add('isPromise')
| JavaScript | mit | mlennon3/when,ning-github/when,petkaantonov/when,frank-weindel/when,ning-github/when,tkirda/when,mlennon3/when,DJDNS/when.js,frank-weindel/when,SourcePointUSA/when,stevage/when,anthonyvia/when,caporta/when,stevage/when,anthonyvia/when,SourcePointUSA/when,caporta/when,tkirda/when | javascript | ## Code Before:
(function(buster, fn, when) {
var assert = buster.assert;
function f(x, y) {
return x + y;
}
buster.testCase('when/function', {
'apply': {
'should return a promise': function() {
var result = fn.apply(f, [1, 2]);
assert(result && typeof result.then === 'function');
},
'should accept values for arguments': function() {
var result = fn.apply(f, [1, 2]);
return when(result, function(result) {
assert.equals(result, 3);
});
},
},
'call': {
'should return a promise': function() {
var result = fn.call(f, 1, 2);
assert(result && typeof result.then === 'function');
},
'should accept values for arguments': function() {
var result = fn.call(f, 1, 2);
return when(result, function(result) {
assert.equals(result, 3);
});
},
},
'bind': {
'should return a function': function() {
assert.isFunction(fn.bind(f, null));
}
}
});
})(
this.buster || require('buster'),
this.when_fn || require('../function'),
this.when || require('../when')
);
## Instruction:
Remove duplication and extra knowledge
I guess we can trust when.isPromise here.
Maybe we should consider using buster.assertions.add('isPromise')
## Code After:
(function(buster, fn, when) {
var assert = buster.assert;
function assertIsPromise(something) {
var message = 'Object is not a promise';
buster.assert(when.isPromise(something), message);
}
function f(x, y) {
return x + y;
}
buster.testCase('when/function', {
'apply': {
'should return a promise': function() {
var result = fn.apply(f, [1, 2]);
assertIsPromise(result);
},
'should accept values for arguments': function() {
var result = fn.apply(f, [1, 2]);
return when(result, function(result) {
assert.equals(result, 3);
});
},
},
'call': {
'should return a promise': function() {
var result = fn.call(f, 1, 2);
assertIsPromise(result);
},
'should accept values for arguments': function() {
var result = fn.call(f, 1, 2);
return when(result, function(result) {
assert.equals(result, 3);
});
},
},
'bind': {
'should return a function': function() {
assert.isFunction(fn.bind(f, null));
}
}
});
})(
this.buster || require('buster'),
this.when_fn || require('../function'),
this.when || require('../when')
);
|
233f50e92e64b5c210ef2a132ba358f674617dbc | tools/check-machine.sh | tools/check-machine.sh | EXIT=0
if ! ps -ef | grep pup*et; then
echo "puppet is not running on $HOSTNAME -- restarting"
# try to restart
/usr/bin/ruby /usr/sbin/puppetd --server=volt1 || EXIT=1
if ! ps -ef | grep pup*et; then
echo "puppet is still not running on $HOSTNAME"
EXIT=1
fi
fi
cd /tmp && ls -tr | grep volt_snapshot | head -n-10 | xargs rm
exit $EXIT
| hostname
EXIT=0
if ! ps -ef | grep pup*et; then
echo "puppet is not running on $HOSTNAME -- restarting"
# try to restart
/usr/bin/ruby /usr/sbin/puppetd --server=volt1 || EXIT=1
if ! ps -ef | grep pup*et; then
echo "puppet is still not running on $HOSTNAME"
EXIT=1
fi
fi
cd /tmp && ls -tr | grep volt_snapshot | head -n-10 | xargs rm -f
exit $EXIT
| Remove confusing output when no snapshot files need to be removed Output which machine is being checked, for easier diagnosis | Remove confusing output when no snapshot files need to be removed
Output which machine is being checked, for easier diagnosis
| Shell | agpl-3.0 | ingted/voltdb,VoltDB/voltdb,deerwalk/voltdb,ingted/voltdb,ingted/voltdb,wolffcm/voltdb,deerwalk/voltdb,kobronson/cs-voltdb,wolffcm/voltdb,deerwalk/voltdb,migue/voltdb,ingted/voltdb,flybird119/voltdb,kobronson/cs-voltdb,flybird119/voltdb,migue/voltdb,kobronson/cs-voltdb,zuowang/voltdb,kobronson/cs-voltdb,flybird119/voltdb,creative-quant/voltdb,kobronson/cs-voltdb,kobronson/cs-voltdb,paulmartel/voltdb,deerwalk/voltdb,VoltDB/voltdb,zuowang/voltdb,kumarrus/voltdb,VoltDB/voltdb,flybird119/voltdb,kumarrus/voltdb,ingted/voltdb,zuowang/voltdb,simonzhangsm/voltdb,flybird119/voltdb,creative-quant/voltdb,paulmartel/voltdb,migue/voltdb,flybird119/voltdb,kobronson/cs-voltdb,wolffcm/voltdb,zuowang/voltdb,simonzhangsm/voltdb,migue/voltdb,ingted/voltdb,kumarrus/voltdb,VoltDB/voltdb,simonzhangsm/voltdb,zuowang/voltdb,wolffcm/voltdb,VoltDB/voltdb,zuowang/voltdb,migue/voltdb,zuowang/voltdb,creative-quant/voltdb,creative-quant/voltdb,wolffcm/voltdb,flybird119/voltdb,migue/voltdb,VoltDB/voltdb,paulmartel/voltdb,paulmartel/voltdb,wolffcm/voltdb,VoltDB/voltdb,kumarrus/voltdb,paulmartel/voltdb,wolffcm/voltdb,simonzhangsm/voltdb,ingted/voltdb,flybird119/voltdb,simonzhangsm/voltdb,paulmartel/voltdb,wolffcm/voltdb,simonzhangsm/voltdb,deerwalk/voltdb,migue/voltdb,kobronson/cs-voltdb,kumarrus/voltdb,simonzhangsm/voltdb,kumarrus/voltdb,creative-quant/voltdb,simonzhangsm/voltdb,creative-quant/voltdb,creative-quant/voltdb,paulmartel/voltdb,ingted/voltdb,kumarrus/voltdb,deerwalk/voltdb,kumarrus/voltdb,deerwalk/voltdb,deerwalk/voltdb,paulmartel/voltdb,creative-quant/voltdb,zuowang/voltdb,migue/voltdb | shell | ## Code Before:
EXIT=0
if ! ps -ef | grep pup*et; then
echo "puppet is not running on $HOSTNAME -- restarting"
# try to restart
/usr/bin/ruby /usr/sbin/puppetd --server=volt1 || EXIT=1
if ! ps -ef | grep pup*et; then
echo "puppet is still not running on $HOSTNAME"
EXIT=1
fi
fi
cd /tmp && ls -tr | grep volt_snapshot | head -n-10 | xargs rm
exit $EXIT
## Instruction:
Remove confusing output when no snapshot files need to be removed
Output which machine is being checked, for easier diagnosis
## Code After:
hostname
EXIT=0
if ! ps -ef | grep pup*et; then
echo "puppet is not running on $HOSTNAME -- restarting"
# try to restart
/usr/bin/ruby /usr/sbin/puppetd --server=volt1 || EXIT=1
if ! ps -ef | grep pup*et; then
echo "puppet is still not running on $HOSTNAME"
EXIT=1
fi
fi
cd /tmp && ls -tr | grep volt_snapshot | head -n-10 | xargs rm -f
exit $EXIT
|
f878604de6113018d04662430ee413f33019e73e | index.js | index.js | var Sandbox = require('sandbox');
module.exports = exports = function(json, callback) {
// run the code in a sandbox to thwart evil people
var s = new Sandbox();
// in a self executing function set json equal to a variable and stringify the result
json = "(function() { var j ="+json+"; return JSON.stringify(j); })()";
s.run(json, function(output) {
//chop off the extra quotes
var out = output.result;
out = out.substring(1, out.length-1);
if (output.result[1] == '{') {
callback(null, out)
} else {
callback(out, null)
}
});
} | var Sandbox = require('sandbox');
module.exports = exports = function(json, callback) {
// run the code in a sandbox to thwart evil people
var s = new Sandbox();
// in a self executing function set json equal to a variable and stringify the result
json = "(function() { var j ="+json+"; return JSON.stringify(j); })()";
s.run(json, function(output) {
//chop off the extra quotes
var out = output.result;
out = out.substring(1, out.length-1);
if (output.result[1] == '{') {
callback(null, out) //no error as valid responses will start with a {
} else {
callback(out, null) //error
}
});
};
| Add better documentation to library | Add better documentation to library
| JavaScript | mit | TechplexEngineer/json_sanitizer | javascript | ## Code Before:
var Sandbox = require('sandbox');
module.exports = exports = function(json, callback) {
// run the code in a sandbox to thwart evil people
var s = new Sandbox();
// in a self executing function set json equal to a variable and stringify the result
json = "(function() { var j ="+json+"; return JSON.stringify(j); })()";
s.run(json, function(output) {
//chop off the extra quotes
var out = output.result;
out = out.substring(1, out.length-1);
if (output.result[1] == '{') {
callback(null, out)
} else {
callback(out, null)
}
});
}
## Instruction:
Add better documentation to library
## Code After:
var Sandbox = require('sandbox');
module.exports = exports = function(json, callback) {
// run the code in a sandbox to thwart evil people
var s = new Sandbox();
// in a self executing function set json equal to a variable and stringify the result
json = "(function() { var j ="+json+"; return JSON.stringify(j); })()";
s.run(json, function(output) {
//chop off the extra quotes
var out = output.result;
out = out.substring(1, out.length-1);
if (output.result[1] == '{') {
callback(null, out) //no error as valid responses will start with a {
} else {
callback(out, null) //error
}
});
};
|
0c2bb4e831da84baaeb837246912140787e0d4bd | firmware/examples/parse_hex.ino | firmware/examples/parse_hex.ino | // This provides a variable, colour, a function to set the variable from a hex
// string, set_colour, and programs the RGB LED on the spark core to reflect
// the RRGGBB value last programmed. The default on reboot is black.
// BEWARE: British spelling ahead!
#include "spark-parse.h"
static int colour = 0;
int set_colour(String args)
{
// parse_hex only likes uppercase
args.toUpperCase();
// Parse arg to colour settings
int val = parse_hex(args);
if (val != -1) {
colour = val;
}
// Returns the value, if it took, or the previous colour setting, if
// it didn't.
return colour;
}
void setup() {
RGB.control(true);
Spark.function("set_colour", set_colour);
Spark.variable("colour", &colour, INT);
}
void loop() {
// Colour value is the standard RRGGBB layout, which we break up here.
RGB.color((colour >> 16) & 255, (colour >> 8) & 255, colour & 255);
}
|
// This provides a variable, colour, a function to set the variable from a hex
// string, set_colour, and programs the RGB LED on the spark core to reflect
// the RRGGBB value last programmed. The default on reboot is black.
// BEWARE: British spelling ahead!
static int colour = 0;
int set_colour(String args)
{
// parse_hex only likes uppercase
args.toUpperCase();
// Parse arg to colour settings
int val = parse_hex(args);
if (val != -1) {
colour = val;
}
// Returns the value, if it took, or the previous colour setting, if
// it didn't.
return colour;
}
void setup() {
RGB.control(true);
Spark.function("set_colour", set_colour);
Spark.variable("colour", &colour, INT);
}
void loop() {
// Colour value is the standard RRGGBB layout, which we break up here.
RGB.color((colour >> 16) & 255, (colour >> 8) & 255, colour & 255);
}
| Correct the include path to the one the Spark IDE wants | Correct the include path to the one the Spark IDE wants
| Arduino | mit | iawells/spark-parse,iawells/spark-parse | arduino | ## Code Before:
// This provides a variable, colour, a function to set the variable from a hex
// string, set_colour, and programs the RGB LED on the spark core to reflect
// the RRGGBB value last programmed. The default on reboot is black.
// BEWARE: British spelling ahead!
#include "spark-parse.h"
static int colour = 0;
int set_colour(String args)
{
// parse_hex only likes uppercase
args.toUpperCase();
// Parse arg to colour settings
int val = parse_hex(args);
if (val != -1) {
colour = val;
}
// Returns the value, if it took, or the previous colour setting, if
// it didn't.
return colour;
}
void setup() {
RGB.control(true);
Spark.function("set_colour", set_colour);
Spark.variable("colour", &colour, INT);
}
void loop() {
// Colour value is the standard RRGGBB layout, which we break up here.
RGB.color((colour >> 16) & 255, (colour >> 8) & 255, colour & 255);
}
## Instruction:
Correct the include path to the one the Spark IDE wants
## Code After:
// This provides a variable, colour, a function to set the variable from a hex
// string, set_colour, and programs the RGB LED on the spark core to reflect
// the RRGGBB value last programmed. The default on reboot is black.
// BEWARE: British spelling ahead!
static int colour = 0;
int set_colour(String args)
{
// parse_hex only likes uppercase
args.toUpperCase();
// Parse arg to colour settings
int val = parse_hex(args);
if (val != -1) {
colour = val;
}
// Returns the value, if it took, or the previous colour setting, if
// it didn't.
return colour;
}
void setup() {
RGB.control(true);
Spark.function("set_colour", set_colour);
Spark.variable("colour", &colour, INT);
}
void loop() {
// Colour value is the standard RRGGBB layout, which we break up here.
RGB.color((colour >> 16) & 255, (colour >> 8) & 255, colour & 255);
}
|
e720307a1ba5790a6001a09e9a8f71247ec3241f | lib/views/general/_before_head_end.html.erb | lib/views/general/_before_head_end.html.erb | <script type="text/javascript" src="//use.typekit.net/csi1ugd.js"></script>
<script type="text/javascript">try{Typekit.load();}catch(e){}</script>
| <script type="text/javascript" src="//use.typekit.net/csi1ugd.js"></script>
<script type="text/javascript">try{Typekit.load();}catch(e){}</script>
<meta name="google-site-verification" content="DbAHEzh0igI0rZziSexQh5fTrbRfNPSw8BdmrmNY_70" />
| Add verification code for Webmaster tools. | Add verification code for Webmaster tools.
| HTML+ERB | mit | schlos/whatdotheyknow-theme,schlos/whatdotheyknow-theme,mysociety/whatdotheyknow-theme,mysociety/whatdotheyknow-theme,mysociety/whatdotheyknow-theme | html+erb | ## Code Before:
<script type="text/javascript" src="//use.typekit.net/csi1ugd.js"></script>
<script type="text/javascript">try{Typekit.load();}catch(e){}</script>
## Instruction:
Add verification code for Webmaster tools.
## Code After:
<script type="text/javascript" src="//use.typekit.net/csi1ugd.js"></script>
<script type="text/javascript">try{Typekit.load();}catch(e){}</script>
<meta name="google-site-verification" content="DbAHEzh0igI0rZziSexQh5fTrbRfNPSw8BdmrmNY_70" />
|
bf090bd2905fabe3346d1771618b99fa86dc588c | .travis.yml | .travis.yml | sudo: false
language: node_js
#cache:
# directories:
# - node_modules
notifications:
email: false
node_js:
- "6"
- "8"
env:
global:
- CXX=g++-4.8
- TWITTER_BEARER_TOKEN=123456789
matrix:
allow_failures:
- node_js: "6"
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- g++-4.8
code_climate:
repo_token: ${CODECLIMATE_REPO_TOKEN}
# before_script:
# - npm install --global snyk
# - snyk auth ${SNYK_TOKEN}
script:
- npm run build
# - npm run security_check
- npm run lint
- npm run test
| sudo: false
language: node_js
#cache:
# directories:
# - node_modules
notifications:
email: false
services:
- redis-server
node_js:
- "8"
- "9"
env:
global:
- CXX=g++-4.8
- TWITTER_BEARER_TOKEN=123456789
matrix:
allow_failures:
- node_js: "9"
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- g++-4.8
code_climate:
repo_token: ${CODECLIMATE_REPO_TOKEN}
rethinkdb: '2.3.6'
# before_script:
# - npm install --global snyk
# - snyk auth ${SNYK_TOKEN}
script:
- npm run build
# - npm run security_check
- npm run test
- npm run lint
| Add Redis and RethinkDB required services, move build to Node.js versions 8 and 9 rather than 6 and 8. Move linting step to after testing step so it's easier to see test results. | Add Redis and RethinkDB required services, move build to Node.js
versions 8 and 9 rather than 6 and 8.
Move linting step to after testing step so it's easier to see
test results.
| YAML | mit | ocean/higgins,ocean/higgins | yaml | ## Code Before:
sudo: false
language: node_js
#cache:
# directories:
# - node_modules
notifications:
email: false
node_js:
- "6"
- "8"
env:
global:
- CXX=g++-4.8
- TWITTER_BEARER_TOKEN=123456789
matrix:
allow_failures:
- node_js: "6"
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- g++-4.8
code_climate:
repo_token: ${CODECLIMATE_REPO_TOKEN}
# before_script:
# - npm install --global snyk
# - snyk auth ${SNYK_TOKEN}
script:
- npm run build
# - npm run security_check
- npm run lint
- npm run test
## Instruction:
Add Redis and RethinkDB required services, move build to Node.js
versions 8 and 9 rather than 6 and 8.
Move linting step to after testing step so it's easier to see
test results.
## Code After:
sudo: false
language: node_js
#cache:
# directories:
# - node_modules
notifications:
email: false
services:
- redis-server
node_js:
- "8"
- "9"
env:
global:
- CXX=g++-4.8
- TWITTER_BEARER_TOKEN=123456789
matrix:
allow_failures:
- node_js: "9"
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- g++-4.8
code_climate:
repo_token: ${CODECLIMATE_REPO_TOKEN}
rethinkdb: '2.3.6'
# before_script:
# - npm install --global snyk
# - snyk auth ${SNYK_TOKEN}
script:
- npm run build
# - npm run security_check
- npm run test
- npm run lint
|
8fe3ee1003d7de5915307594b613a9b34629d3ba | app/models/councillor.rb | app/models/councillor.rb | class Councillor < ActiveRecord::Base
belongs_to :ward
has_and_belongs_to_many :committees
has_many :motions
has_many :items, as: :origin
end
| class Councillor < ActiveRecord::Base
belongs_to :ward
has_and_belongs_to_many :committees
has_many :motions
has_many :councillor_vote
has_many :items, as: :origin
end
| Fix Councillor Vote Not Deleting | Fix Councillor Vote Not Deleting
Because a councillor association for has_many which is not been set up.
| Ruby | mit | CivicHaxx/ok_councillr,CivicHaxx/ok_councillr,CivicHaxx/ok_councillr | ruby | ## Code Before:
class Councillor < ActiveRecord::Base
belongs_to :ward
has_and_belongs_to_many :committees
has_many :motions
has_many :items, as: :origin
end
## Instruction:
Fix Councillor Vote Not Deleting
Because a councillor association for has_many which is not been set up.
## Code After:
class Councillor < ActiveRecord::Base
belongs_to :ward
has_and_belongs_to_many :committees
has_many :motions
has_many :councillor_vote
has_many :items, as: :origin
end
|
4bc31a4091f8dcd2e8aca5dcf6168c4f8c524637 | priv/public/ui/app/mn_admin/mn_indexes/mn_views/mn_views.html | priv/public/ui/app/mn_admin/mn_indexes/mn_views/mn_views.html | <div ng-if="!viewsCtl.mnPoolDefault.value.isKvNode">
Views are not supported on this server node as it does not have a Data service. You may access views functionality on any Data service node in the cluster, such as: <a id="js_healthy_kv_node_link" href="{{::viewsCtl.getKvNodeLink()}}" target="_blank">{{::viewsCtl.getKvNodeLink()}}</a>
</div>
<div ng-if="viewsCtl.mnPoolDefault.value.isKvNode" id="js_views" class="views">
<h1 style="float:left;">
<span >
<ui-select
ng-model="viewsCtl.state.bucketsNames.selected"
on-select="viewsCtl.onSelectBucket($item)"
theme="selectize"
style="display: inline-block; vertical-align: middle;">
<ui-select-match>{{$select.selected}}</ui-select-match>
<ui-select-choices repeat="name in viewsCtl.state.bucketsNames | filter: $select.search">
<span class="select-item ellipsis" ng-bind-html="name | highlight: $select.search" ></span>
</ui-select-choices>
</ui-select>
<span style="vertical-align: middle;">
> <a style="vertical-align: middle;" ui-sref="app.admin.indexes.views.list">Views</a>
</span>
</span>
</h1>
<ui-view></ui-view>
</div> | <div ng-if="!viewsCtl.mnPoolDefault.value.isKvNode">
The views interface is only available on nodes running the Data service. You may access the interface here:
<a id="js_healthy_kv_node_link" href="{{::viewsCtl.getKvNodeLink()}}" target="_blank">{{::viewsCtl.getKvNodeLink()}}</a>
</div>
<div ng-if="viewsCtl.mnPoolDefault.value.isKvNode" id="js_views" class="views">
<h1 style="float:left;">
<span >
<ui-select
ng-model="viewsCtl.state.bucketsNames.selected"
on-select="viewsCtl.onSelectBucket($item)"
theme="selectize"
style="display: inline-block; vertical-align: middle;">
<ui-select-match>{{$select.selected}}</ui-select-match>
<ui-select-choices repeat="name in viewsCtl.state.bucketsNames | filter: $select.search">
<span class="select-item ellipsis" ng-bind-html="name | highlight: $select.search" ></span>
</ui-select-choices>
</ui-select>
<span style="vertical-align: middle;">
> <a style="vertical-align: middle;" ui-sref="app.admin.indexes.views.list">Views</a>
</span>
</span>
</h1>
<ui-view></ui-view>
</div> | Fix for MB-17875 - Make redirection links more consistent. | Fix for MB-17875 - Make redirection links more consistent.
After some discussion with Dave F., Rob, Perry, and myself, we chose a
consistent message for Query, View, and FTS. This change updates it
for View.
Change-Id: Ia4e0a4083e1cac2a283dc7f5e4f59bd2db16b96f
Reviewed-on: http://review.couchbase.org/62501
Reviewed-by: Pavel Blagodov <[email protected]>
Tested-by: Pavel Blagodov <[email protected]>
| HTML | apache-2.0 | membase/ns_server,ceejatec/ns_server,ceejatec/ns_server,membase/ns_server,membase/ns_server,ceejatec/ns_server,ceejatec/ns_server,ceejatec/ns_server,ceejatec/ns_server,membase/ns_server,membase/ns_server,membase/ns_server | html | ## Code Before:
<div ng-if="!viewsCtl.mnPoolDefault.value.isKvNode">
Views are not supported on this server node as it does not have a Data service. You may access views functionality on any Data service node in the cluster, such as: <a id="js_healthy_kv_node_link" href="{{::viewsCtl.getKvNodeLink()}}" target="_blank">{{::viewsCtl.getKvNodeLink()}}</a>
</div>
<div ng-if="viewsCtl.mnPoolDefault.value.isKvNode" id="js_views" class="views">
<h1 style="float:left;">
<span >
<ui-select
ng-model="viewsCtl.state.bucketsNames.selected"
on-select="viewsCtl.onSelectBucket($item)"
theme="selectize"
style="display: inline-block; vertical-align: middle;">
<ui-select-match>{{$select.selected}}</ui-select-match>
<ui-select-choices repeat="name in viewsCtl.state.bucketsNames | filter: $select.search">
<span class="select-item ellipsis" ng-bind-html="name | highlight: $select.search" ></span>
</ui-select-choices>
</ui-select>
<span style="vertical-align: middle;">
> <a style="vertical-align: middle;" ui-sref="app.admin.indexes.views.list">Views</a>
</span>
</span>
</h1>
<ui-view></ui-view>
</div>
## Instruction:
Fix for MB-17875 - Make redirection links more consistent.
After some discussion with Dave F., Rob, Perry, and myself, we chose a
consistent message for Query, View, and FTS. This change updates it
for View.
Change-Id: Ia4e0a4083e1cac2a283dc7f5e4f59bd2db16b96f
Reviewed-on: http://review.couchbase.org/62501
Reviewed-by: Pavel Blagodov <[email protected]>
Tested-by: Pavel Blagodov <[email protected]>
## Code After:
<div ng-if="!viewsCtl.mnPoolDefault.value.isKvNode">
The views interface is only available on nodes running the Data service. You may access the interface here:
<a id="js_healthy_kv_node_link" href="{{::viewsCtl.getKvNodeLink()}}" target="_blank">{{::viewsCtl.getKvNodeLink()}}</a>
</div>
<div ng-if="viewsCtl.mnPoolDefault.value.isKvNode" id="js_views" class="views">
<h1 style="float:left;">
<span >
<ui-select
ng-model="viewsCtl.state.bucketsNames.selected"
on-select="viewsCtl.onSelectBucket($item)"
theme="selectize"
style="display: inline-block; vertical-align: middle;">
<ui-select-match>{{$select.selected}}</ui-select-match>
<ui-select-choices repeat="name in viewsCtl.state.bucketsNames | filter: $select.search">
<span class="select-item ellipsis" ng-bind-html="name | highlight: $select.search" ></span>
</ui-select-choices>
</ui-select>
<span style="vertical-align: middle;">
> <a style="vertical-align: middle;" ui-sref="app.admin.indexes.views.list">Views</a>
</span>
</span>
</h1>
<ui-view></ui-view>
</div> |
80e8535442d75b511741e5f09badf52e9c5ca3b4 | examples/hellomessage/hello_message.go | examples/hellomessage/hello_message.go | package hellomessage // import "myitcv.io/react/examples/hellomessage"
import (
r "myitcv.io/react"
)
//go:generate reactGen
// HelloMessageDef is the definition of the HelloMessage component
type HelloMessageDef struct {
r.ComponentDef
}
// HelloMessageProps is the props type for the HelloMessage component
type HelloMessageProps struct {
Name string
}
// HelloMessage creates instances of the HelloMessage component
func HelloMessage(p HelloMessageProps) *HelloMessageElem {
return &HelloMessageElem{
Element: r.CreateElement(buildCmp, p),
}
}
func buildCmp(elem r.ComponentDef) r.Component {
return HelloMessageDef{ComponentDef: elem}
}
// Render renders the HelloMessage component
func (h HelloMessageDef) Render() r.Element {
return r.Div(nil,
r.S("Hello "+h.Props().Name),
)
}
| package hellomessage // import "myitcv.io/react/examples/hellomessage"
import (
r "myitcv.io/react"
)
//go:generate reactGen
// HelloMessageDef is the definition of the HelloMessage component
type HelloMessageDef struct {
r.ComponentDef
}
// HelloMessageProps is the props type for the HelloMessage component
type HelloMessageProps struct {
Name string
}
// HelloMessage creates instances of the HelloMessage component
func HelloMessage(p HelloMessageProps) *HelloMessageElem {
return buildHelloMessageElem(p)
}
// Render renders the HelloMessage component
func (h HelloMessageDef) Render() r.Element {
return r.Div(nil,
r.S("Hello "+h.Props().Name),
)
}
| Remove old code from hello message example | Remove old code from hello message example
| Go | bsd-3-clause | myitcv/react,myitcv/react,myitcv/react,myitcv/react | go | ## Code Before:
package hellomessage // import "myitcv.io/react/examples/hellomessage"
import (
r "myitcv.io/react"
)
//go:generate reactGen
// HelloMessageDef is the definition of the HelloMessage component
type HelloMessageDef struct {
r.ComponentDef
}
// HelloMessageProps is the props type for the HelloMessage component
type HelloMessageProps struct {
Name string
}
// HelloMessage creates instances of the HelloMessage component
func HelloMessage(p HelloMessageProps) *HelloMessageElem {
return &HelloMessageElem{
Element: r.CreateElement(buildCmp, p),
}
}
func buildCmp(elem r.ComponentDef) r.Component {
return HelloMessageDef{ComponentDef: elem}
}
// Render renders the HelloMessage component
func (h HelloMessageDef) Render() r.Element {
return r.Div(nil,
r.S("Hello "+h.Props().Name),
)
}
## Instruction:
Remove old code from hello message example
## Code After:
package hellomessage // import "myitcv.io/react/examples/hellomessage"
import (
r "myitcv.io/react"
)
//go:generate reactGen
// HelloMessageDef is the definition of the HelloMessage component
type HelloMessageDef struct {
r.ComponentDef
}
// HelloMessageProps is the props type for the HelloMessage component
type HelloMessageProps struct {
Name string
}
// HelloMessage creates instances of the HelloMessage component
func HelloMessage(p HelloMessageProps) *HelloMessageElem {
return buildHelloMessageElem(p)
}
// Render renders the HelloMessage component
func (h HelloMessageDef) Render() r.Element {
return r.Div(nil,
r.S("Hello "+h.Props().Name),
)
}
|
15f95832b24a88bbeb6440a9a5234f900e7d51e7 | app/models/concerns/job/job_result.rb | app/models/concerns/job/job_result.rb | class Job < ActiveRecord::Base
module JobResult
extend ActiveSupport::Concern
def calculate_result
# Default to errored if no results have been reported
self.result = 'errored'
# Set initial result using the exit value
if self.exit_value
if self.exit_value == 0
self.result = 'passed'
else
self.result = 'failed'
end
end
# If we've got counts, attempt to be a bit smarter
if self.errored_count.to_i > 0
self.result = 'errored'
elsif self.failed_count.to_i > 0
self.result = 'failed'
elsif self.passed_count.to_i > 0
self.result = 'passed'
end
self.queued_count = 0
self.running_count = 0
self.save
end
def move_queued_to_running
self.running_count = self.queued_count
self.queued_count = 0
self.save
end
def move_all_to_errored
self.errored_count = self.queued_count.to_i + self.running_count.to_i
self.running_count = 0
self.queued_count = 0
self.save
end
end
end
| class Job < ActiveRecord::Base
module JobResult
extend ActiveSupport::Concern
def calculate_result
# Default to errored if no results have been reported
self.result = 'errored'
# Set initial result using the exit value
if self.exit_value
if self.exit_value == 0
self.result = 'passed'
else
self.result = 'failed'
end
end
# If we've got counts, attempt to be a bit smarter
if self.errored_count.to_i > 0
self.result = 'errored'
elsif self.failed_count.to_i > 0
self.result = 'failed'
elsif self.passed_count.to_i > 0
self.result = 'passed'
end
self.queued_count = 0
self.running_count = 0
self.save
end
def move_queued_to_running
if test_results
test_results.each { |tr| tr.update(status: 'running') }
end
self.running_count = self.queued_count
self.queued_count = 0
self.save
end
def move_all_to_errored
if test_results
test_results.each { |tr| tr.update(status: 'errored') }
end
self.errored_count = self.queued_count.to_i + self.running_count.to_i
self.running_count = 0
self.queued_count = 0
self.save
end
end
end
| Update test_result statuses upon state changes | Update test_result statuses upon state changes
| Ruby | mit | bbc/hive-scheduler,bbc/hive-scheduler,bbc/hive-scheduler | ruby | ## Code Before:
class Job < ActiveRecord::Base
module JobResult
extend ActiveSupport::Concern
def calculate_result
# Default to errored if no results have been reported
self.result = 'errored'
# Set initial result using the exit value
if self.exit_value
if self.exit_value == 0
self.result = 'passed'
else
self.result = 'failed'
end
end
# If we've got counts, attempt to be a bit smarter
if self.errored_count.to_i > 0
self.result = 'errored'
elsif self.failed_count.to_i > 0
self.result = 'failed'
elsif self.passed_count.to_i > 0
self.result = 'passed'
end
self.queued_count = 0
self.running_count = 0
self.save
end
def move_queued_to_running
self.running_count = self.queued_count
self.queued_count = 0
self.save
end
def move_all_to_errored
self.errored_count = self.queued_count.to_i + self.running_count.to_i
self.running_count = 0
self.queued_count = 0
self.save
end
end
end
## Instruction:
Update test_result statuses upon state changes
## Code After:
class Job < ActiveRecord::Base
module JobResult
extend ActiveSupport::Concern
def calculate_result
# Default to errored if no results have been reported
self.result = 'errored'
# Set initial result using the exit value
if self.exit_value
if self.exit_value == 0
self.result = 'passed'
else
self.result = 'failed'
end
end
# If we've got counts, attempt to be a bit smarter
if self.errored_count.to_i > 0
self.result = 'errored'
elsif self.failed_count.to_i > 0
self.result = 'failed'
elsif self.passed_count.to_i > 0
self.result = 'passed'
end
self.queued_count = 0
self.running_count = 0
self.save
end
def move_queued_to_running
if test_results
test_results.each { |tr| tr.update(status: 'running') }
end
self.running_count = self.queued_count
self.queued_count = 0
self.save
end
def move_all_to_errored
if test_results
test_results.each { |tr| tr.update(status: 'errored') }
end
self.errored_count = self.queued_count.to_i + self.running_count.to_i
self.running_count = 0
self.queued_count = 0
self.save
end
end
end
|
e81a54a5df2aaafae230084fe3b4d59c5b4f61cc | parallel/runner.py | parallel/runner.py | import Queue
from parallel import worker
class Runner(object):
def __init__(self, num_workers=4):
self.in_queue = Queue.Queue()
self.out_queue = Queue.Queue()
self.num_workers = num_workers
self.workers = None
self._start_workers()
def _start_workers(self):
self.workers = [worker.Worker(self.in_queue, self.out_queue)
for i in range(self.num_workers)]
def add_task(self, task, *args, **kwargs):
self.in_queue.put((task, args, kwargs))
def results(self):
self.in_queue.join()
return self.out_queue.queue
| import Queue
from parallel import config
from parallel import worker
class Runner(object):
def __init__(self, num_workers=config.NUM_WORKERS):
self.in_queue = Queue.Queue()
self.out_queue = Queue.Queue()
self.num_workers = num_workers
self.workers = None
self._start_workers()
def _start_workers(self):
self.workers = [worker.Worker(self.in_queue, self.out_queue)
for i in range(self.num_workers)]
def add_task(self, task, *args, **kwargs):
self.in_queue.put((task, args, kwargs))
def results(self):
self.in_queue.join()
return self.out_queue.queue
| Use default number of workers from config | Use default number of workers from config
config does a better job at figuring out the optimal number of workers,
so it will be used instead.
| Python | mit | andersonvom/mparallel | python | ## Code Before:
import Queue
from parallel import worker
class Runner(object):
def __init__(self, num_workers=4):
self.in_queue = Queue.Queue()
self.out_queue = Queue.Queue()
self.num_workers = num_workers
self.workers = None
self._start_workers()
def _start_workers(self):
self.workers = [worker.Worker(self.in_queue, self.out_queue)
for i in range(self.num_workers)]
def add_task(self, task, *args, **kwargs):
self.in_queue.put((task, args, kwargs))
def results(self):
self.in_queue.join()
return self.out_queue.queue
## Instruction:
Use default number of workers from config
config does a better job at figuring out the optimal number of workers,
so it will be used instead.
## Code After:
import Queue
from parallel import config
from parallel import worker
class Runner(object):
def __init__(self, num_workers=config.NUM_WORKERS):
self.in_queue = Queue.Queue()
self.out_queue = Queue.Queue()
self.num_workers = num_workers
self.workers = None
self._start_workers()
def _start_workers(self):
self.workers = [worker.Worker(self.in_queue, self.out_queue)
for i in range(self.num_workers)]
def add_task(self, task, *args, **kwargs):
self.in_queue.put((task, args, kwargs))
def results(self):
self.in_queue.join()
return self.out_queue.queue
|
e637e5f53990709ed654b661465685ad9d05a182 | api/spawner/templates/constants.py | api/spawner/templates/constants.py | from __future__ import absolute_import, division, print_function
from django.conf import settings
JOB_NAME = 'plxjob-{task_type}{task_idx}-{experiment_uuid}'
DEFAULT_PORT = 2222
ENV_VAR_TEMPLATE = '{name: "{var_name}", value: "{var_value}"}'
VOLUME_NAME = 'pv-{vol_name}'
VOLUME_CLAIM_NAME = 'pvc-{vol_name}'
CLUSTER_CONFIG_MAP_NAME = 'plxcluster-{experiment_uuid}'
CLUSTER_CONFIG_MAP_KEY_NAME = 'plxcluster_{experiment_uuid}_{task_type}'
POD_CONTAINER_PROJECT_NAME = 'plxproject-{project_uuid}-{name}'
DEPLOYMENT_NAME = 'plxproject-{project_uuid}-{name}'
def SIDECAR_ARGS_FN(pod_id):
return ["python3", "api/manage.py", "start_sidecar", pod_id,
"--log_sleep_interval={}".format(settings.JOB_SIDECAR_LOG_SLEEP_INTERVAL),
"--persist={}".format(settings.JOB_SIDECAR_PERSIST)]
DATA_VOLUME = 'data'
OUTPUTS_VOLUME = 'outputs'
| from __future__ import absolute_import, division, print_function
from django.conf import settings
JOB_NAME = 'plxjob-{task_type}{task_idx}-{experiment_uuid}'
DEFAULT_PORT = 2222
ENV_VAR_TEMPLATE = '{name: "{var_name}", value: "{var_value}"}'
VOLUME_NAME = 'pv-{vol_name}'
VOLUME_CLAIM_NAME = 'pvc-{vol_name}'
CLUSTER_CONFIG_MAP_NAME = 'plxcluster-{experiment_uuid}'
CLUSTER_CONFIG_MAP_KEY_NAME = 'plxcluster_{task_type}'
POD_CONTAINER_PROJECT_NAME = 'plxproject-{project_uuid}-{name}'
DEPLOYMENT_NAME = 'plxproject-{project_uuid}-{name}'
def SIDECAR_ARGS_FN(pod_id):
return ["python3", "api/manage.py", "start_sidecar", pod_id,
"--log_sleep_interval={}".format(settings.JOB_SIDECAR_LOG_SLEEP_INTERVAL),
"--persist={}".format(settings.JOB_SIDECAR_PERSIST)]
DATA_VOLUME = 'data'
OUTPUTS_VOLUME = 'outputs'
| Update cluster config map key format | Update cluster config map key format
| Python | apache-2.0 | polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon | python | ## Code Before:
from __future__ import absolute_import, division, print_function
from django.conf import settings
JOB_NAME = 'plxjob-{task_type}{task_idx}-{experiment_uuid}'
DEFAULT_PORT = 2222
ENV_VAR_TEMPLATE = '{name: "{var_name}", value: "{var_value}"}'
VOLUME_NAME = 'pv-{vol_name}'
VOLUME_CLAIM_NAME = 'pvc-{vol_name}'
CLUSTER_CONFIG_MAP_NAME = 'plxcluster-{experiment_uuid}'
CLUSTER_CONFIG_MAP_KEY_NAME = 'plxcluster_{experiment_uuid}_{task_type}'
POD_CONTAINER_PROJECT_NAME = 'plxproject-{project_uuid}-{name}'
DEPLOYMENT_NAME = 'plxproject-{project_uuid}-{name}'
def SIDECAR_ARGS_FN(pod_id):
return ["python3", "api/manage.py", "start_sidecar", pod_id,
"--log_sleep_interval={}".format(settings.JOB_SIDECAR_LOG_SLEEP_INTERVAL),
"--persist={}".format(settings.JOB_SIDECAR_PERSIST)]
DATA_VOLUME = 'data'
OUTPUTS_VOLUME = 'outputs'
## Instruction:
Update cluster config map key format
## Code After:
from __future__ import absolute_import, division, print_function
from django.conf import settings
JOB_NAME = 'plxjob-{task_type}{task_idx}-{experiment_uuid}'
DEFAULT_PORT = 2222
ENV_VAR_TEMPLATE = '{name: "{var_name}", value: "{var_value}"}'
VOLUME_NAME = 'pv-{vol_name}'
VOLUME_CLAIM_NAME = 'pvc-{vol_name}'
CLUSTER_CONFIG_MAP_NAME = 'plxcluster-{experiment_uuid}'
CLUSTER_CONFIG_MAP_KEY_NAME = 'plxcluster_{task_type}'
POD_CONTAINER_PROJECT_NAME = 'plxproject-{project_uuid}-{name}'
DEPLOYMENT_NAME = 'plxproject-{project_uuid}-{name}'
def SIDECAR_ARGS_FN(pod_id):
return ["python3", "api/manage.py", "start_sidecar", pod_id,
"--log_sleep_interval={}".format(settings.JOB_SIDECAR_LOG_SLEEP_INTERVAL),
"--persist={}".format(settings.JOB_SIDECAR_PERSIST)]
DATA_VOLUME = 'data'
OUTPUTS_VOLUME = 'outputs'
|
6c960d4d67feec85378ab885218c4d10746615c2 | addon/helpers/register-select-helper.js | addon/helpers/register-select-helper.js | import Ember from 'ember';
export default function() {
Ember.Test.registerAsyncHelper('select', function(app, selector, text) {
const $el = app.testHelpers.findWithAssert(`${selector} option:contains("${text}")`);
$el.each(function() {
Ember.run(() => {
this.selected = true;
Ember.$(this).trigger('change');
});
});
return app.testHelpers.wait();
});
}
| import Ember from 'ember';
export default function() {
Ember.Test.registerAsyncHelper('select', function(app, selector, text) {
var $el = app.testHelpers.findWithAssert(selector + "option:contains('" + text + "')");
$el.each(function() {
var _this = this;
Ember.run(function() {
_this.selected = true;
Ember.$(_this).trigger('change');
});
});
return app.testHelpers.wait();
});
}
| Stop using ES6 due to PhantomJS issues | Stop using ES6 due to PhantomJS issues
| JavaScript | mit | arenoir/emberx-select,steveklabnik/emberx-select,thefrontside/emberx-select,gte451f/emberx-select,stefanpenner/emberx-select,tejasmanohar/emberx-select,martypenner/emberx-select,jeffreybiles/emberx-select,martypenner/emberx-select,knownasilya/emberx-select,tejasmanohar/emberx-select,knownasilya/emberx-select,stefanpenner/emberx-select,steveklabnik/emberx-select,jeffreybiles/emberx-select,arenoir/emberx-select,gte451f/emberx-select,thefrontside/emberx-select | javascript | ## Code Before:
import Ember from 'ember';
export default function() {
Ember.Test.registerAsyncHelper('select', function(app, selector, text) {
const $el = app.testHelpers.findWithAssert(`${selector} option:contains("${text}")`);
$el.each(function() {
Ember.run(() => {
this.selected = true;
Ember.$(this).trigger('change');
});
});
return app.testHelpers.wait();
});
}
## Instruction:
Stop using ES6 due to PhantomJS issues
## Code After:
import Ember from 'ember';
export default function() {
Ember.Test.registerAsyncHelper('select', function(app, selector, text) {
var $el = app.testHelpers.findWithAssert(selector + "option:contains('" + text + "')");
$el.each(function() {
var _this = this;
Ember.run(function() {
_this.selected = true;
Ember.$(_this).trigger('change');
});
});
return app.testHelpers.wait();
});
}
|
077c136788e70ae9f8c616c4018cff633fcf1d7a | experimental/example/README.md | experimental/example/README.md |
To run,
```
make all
./run.sh
```
# "architecture"
```
curl -> frontend --> app --> searchapp -> elasticsearch
(nginx) |
--> qotd -> internet
|
--> redis
```
|
To run a on a Mac, run:
```
$ docker-machine create -d virtualbox --virtualbox-memory=4096
$ eval $(docker-machine env scope-tastic)
$ sudo curl -L git.io/weave -o /usr/local/bin/weave
$ sudo chmod +x /usr/local/bin/weave
$ weave launch
$ curl -o run.sh https://...
$ ./run.sh
$ sudo wget -O /usr/local/bin/scope https://github.com/weaveworks/scope/releases/download/latest_release/scope
$ sudo chmod a+x /usr/local/bin/scope
$ scope launch
```
# "architecture"
```
curl -> frontend --> app --> searchapp -> elasticsearch
(nginx) |
--> qotd -> internet
|
--> redis
```
| Update instruction on running demo. | Update instruction on running demo.
| Markdown | apache-2.0 | alban/scope,weaveworks/scope,alban/scope,paulbellamy/scope,dilgerma/scope,kinvolk/scope,dilgerma/scope,kinvolk/scope,kinvolk/scope,weaveworks/scope,weaveworks/scope,weaveworks/scope,paulbellamy/scope,kinvolk/scope,kinvolk/scope,alban/scope,paulbellamy/scope,weaveworks/scope,alban/scope,alban/scope,paulbellamy/scope,paulbellamy/scope,dilgerma/scope,kinvolk/scope,dilgerma/scope,weaveworks/scope,dilgerma/scope,alban/scope,paulbellamy/scope,dilgerma/scope | markdown | ## Code Before:
To run,
```
make all
./run.sh
```
# "architecture"
```
curl -> frontend --> app --> searchapp -> elasticsearch
(nginx) |
--> qotd -> internet
|
--> redis
```
## Instruction:
Update instruction on running demo.
## Code After:
To run a on a Mac, run:
```
$ docker-machine create -d virtualbox --virtualbox-memory=4096
$ eval $(docker-machine env scope-tastic)
$ sudo curl -L git.io/weave -o /usr/local/bin/weave
$ sudo chmod +x /usr/local/bin/weave
$ weave launch
$ curl -o run.sh https://...
$ ./run.sh
$ sudo wget -O /usr/local/bin/scope https://github.com/weaveworks/scope/releases/download/latest_release/scope
$ sudo chmod a+x /usr/local/bin/scope
$ scope launch
```
# "architecture"
```
curl -> frontend --> app --> searchapp -> elasticsearch
(nginx) |
--> qotd -> internet
|
--> redis
```
|
18edcf2d00886d975c37ac8afd0e2c75c330c6ca | spec/build/dependencygrapher/print_dependencies_spec.rb | spec/build/dependencygrapher/print_dependencies_spec.rb | require File.dirname(__FILE__) + '/../../spec_helper'
require 'rakelib/dependency_grapher'
describe "DependencyGrapher#print_dependencies" do
before :each do
@stdout, $stdout = $stdout, IOStub.new
@grapher = DependencyGrapher.new []
@grapher.should_receive(:get_system_defines)
end
it "prints the dependencies for all object files" do
@grapher.file_names = [fixture(__FILE__, "c.cpp"), fixture(__FILE__, "d.cpp")]
@grapher.directories = [fixture(__FILE__)]
@grapher.process
@grapher.print_dependencies $stdout
path = fixture(__FILE__)
$stdout.should == <<EOS
#{path}/c.o: \\
#{path}/c.cpp \\
#{path}/c.hpp \\
#{path}/d.hpp
#{path}/d.o: \\
#{path}/d.cpp \\
#{path}/d.hpp \\
#{path}/e.hpp
EOS
end
end
| require File.dirname(__FILE__) + '/../../spec_helper'
require 'rakelib/dependency_grapher'
describe "DependencyGrapher#print_dependencies" do
before :each do
@stdout, $stdout = $stdout, IOStub.new
@grapher = DependencyGrapher.new []
@grapher.should_receive(:get_system_defines)
end
after :each do
$stdout = @stdout
end
it "prints the dependencies for all object files" do
@grapher.file_names = [fixture(__FILE__, "c.cpp"), fixture(__FILE__, "d.cpp")]
@grapher.directories = [fixture(__FILE__)]
@grapher.process
@grapher.print_dependencies $stdout
path = fixture(__FILE__)
$stdout.should == <<EOS
#{path}/c.o: \\
#{path}/c.cpp \\
#{path}/c.hpp \\
#{path}/d.hpp
#{path}/d.o: \\
#{path}/d.cpp \\
#{path}/d.hpp \\
#{path}/e.hpp
EOS
end
end
| Make sure $stdout is reassigned in :after action. | Make sure $stdout is reassigned in :after action.
| Ruby | bsd-3-clause | digitalextremist/rubinius,heftig/rubinius,Wirachmat/rubinius,sferik/rubinius,travis-repos/rubinius,pH14/rubinius,digitalextremist/rubinius,dblock/rubinius,dblock/rubinius,slawosz/rubinius,slawosz/rubinius,benlovell/rubinius,Azizou/rubinius,ngpestelos/rubinius,slawosz/rubinius,ruipserra/rubinius,heftig/rubinius,dblock/rubinius,pH14/rubinius,Azizou/rubinius,jsyeo/rubinius,kachick/rubinius,Azizou/rubinius,mlarraz/rubinius,slawosz/rubinius,dblock/rubinius,travis-repos/rubinius,kachick/rubinius,heftig/rubinius,benlovell/rubinius,jemc/rubinius,jsyeo/rubinius,Azizou/rubinius,sferik/rubinius,kachick/rubinius,benlovell/rubinius,ngpestelos/rubinius,jemc/rubinius,heftig/rubinius,kachick/rubinius,digitalextremist/rubinius,lgierth/rubinius,ruipserra/rubinius,kachick/rubinius,slawosz/rubinius,Azizou/rubinius,travis-repos/rubinius,ngpestelos/rubinius,jsyeo/rubinius,travis-repos/rubinius,jemc/rubinius,ruipserra/rubinius,ngpestelos/rubinius,pH14/rubinius,dblock/rubinius,Wirachmat/rubinius,travis-repos/rubinius,pH14/rubinius,kachick/rubinius,heftig/rubinius,pH14/rubinius,heftig/rubinius,mlarraz/rubinius,jemc/rubinius,jsyeo/rubinius,sferik/rubinius,jemc/rubinius,pH14/rubinius,mlarraz/rubinius,slawosz/rubinius,Wirachmat/rubinius,mlarraz/rubinius,jemc/rubinius,sferik/rubinius,benlovell/rubinius,benlovell/rubinius,Azizou/rubinius,ruipserra/rubinius,ngpestelos/rubinius,Wirachmat/rubinius,benlovell/rubinius,Azizou/rubinius,digitalextremist/rubinius,Wirachmat/rubinius,ruipserra/rubinius,digitalextremist/rubinius,mlarraz/rubinius,ruipserra/rubinius,pH14/rubinius,mlarraz/rubinius,kachick/rubinius,ruipserra/rubinius,dblock/rubinius,sferik/rubinius,digitalextremist/rubinius,benlovell/rubinius,sferik/rubinius,heftig/rubinius,slawosz/rubinius,Wirachmat/rubinius,ngpestelos/rubinius,sferik/rubinius,kachick/rubinius,lgierth/rubinius,lgierth/rubinius,jsyeo/rubinius,lgierth/rubinius,mlarraz/rubinius,Wirachmat/rubinius,travis-repos/rubinius,dblock/rubinius,digitalextremist/rubinius,jemc/rubinius,lgierth/rubinius,jsyeo/rubinius,jsyeo/rubinius,ngpestelos/rubinius,travis-repos/rubinius,lgierth/rubinius,lgierth/rubinius | ruby | ## Code Before:
require File.dirname(__FILE__) + '/../../spec_helper'
require 'rakelib/dependency_grapher'
describe "DependencyGrapher#print_dependencies" do
before :each do
@stdout, $stdout = $stdout, IOStub.new
@grapher = DependencyGrapher.new []
@grapher.should_receive(:get_system_defines)
end
it "prints the dependencies for all object files" do
@grapher.file_names = [fixture(__FILE__, "c.cpp"), fixture(__FILE__, "d.cpp")]
@grapher.directories = [fixture(__FILE__)]
@grapher.process
@grapher.print_dependencies $stdout
path = fixture(__FILE__)
$stdout.should == <<EOS
#{path}/c.o: \\
#{path}/c.cpp \\
#{path}/c.hpp \\
#{path}/d.hpp
#{path}/d.o: \\
#{path}/d.cpp \\
#{path}/d.hpp \\
#{path}/e.hpp
EOS
end
end
## Instruction:
Make sure $stdout is reassigned in :after action.
## Code After:
require File.dirname(__FILE__) + '/../../spec_helper'
require 'rakelib/dependency_grapher'
describe "DependencyGrapher#print_dependencies" do
before :each do
@stdout, $stdout = $stdout, IOStub.new
@grapher = DependencyGrapher.new []
@grapher.should_receive(:get_system_defines)
end
after :each do
$stdout = @stdout
end
it "prints the dependencies for all object files" do
@grapher.file_names = [fixture(__FILE__, "c.cpp"), fixture(__FILE__, "d.cpp")]
@grapher.directories = [fixture(__FILE__)]
@grapher.process
@grapher.print_dependencies $stdout
path = fixture(__FILE__)
$stdout.should == <<EOS
#{path}/c.o: \\
#{path}/c.cpp \\
#{path}/c.hpp \\
#{path}/d.hpp
#{path}/d.o: \\
#{path}/d.cpp \\
#{path}/d.hpp \\
#{path}/e.hpp
EOS
end
end
|
bee3daac70c2f3c56628203a93b2442fd93b10a5 | ksql-workshop/README.md | ksql-workshop/README.md | 
# Overview
This KSQL ratings demo showcases Kafka stream processing using KSQL. This demo was initially created for a KSQL workshop.
As with the other demos in this repo, you may run the entire demo end-to-end with `./start.sh`, and it runs on your local Confluent Platform install instead of Docker. Alternatively, you may follow the [step-by-step guide](ksql-workshop.adoc), and that instruction is based on Docker instead of your local Confluent Platform install.

# Prerequisites
* [Common demo prerequisites](https://github.com/confluentinc/quickstart-demos#prerequisites)
* [Confluent Platform 5.0](https://www.confluent.io/download/)
* If you are running the [step-by-step guide](ksql-workshop.adoc)
* Docker
* Docker Compose
* 8GB+ RAM
# What Should I see?
After you run `./start.sh`:
* If you are running Confluent Enterprise, open your browser and navigate to the Control Center web interface Monitoring -> Data streams tab at http://localhost:9021/monitoring/streams to see throughput and latency performance of the KSQL queries
* If you are running Confluent Enterprise, use Control Center to view and create KSQL queries. Otherwise, run the KSQL CLI `ksql http://localhost:8088`.
| 
# Overview
This KSQL ratings demo showcases Kafka stream processing using KSQL. This demo was initially created for a KSQL workshop.
----
**You can follow the [step-by-step guide](ksql-workshop.adoc) workshop instructions here. These instructions are based on Docker instead of your local Confluent Platform install.**
----
As with the other demos in this repo, you may run the entire demo end-to-end with `./start.sh`, and it runs on your local Confluent Platform install instead of Docker.

# Prerequisites
* [Common demo prerequisites](https://github.com/confluentinc/quickstart-demos#prerequisites)
* [Confluent Platform 5.0](https://www.confluent.io/download/)
* If you are running the [step-by-step guide](ksql-workshop.adoc)
* Docker
* Docker Compose
* 8GB+ RAM
# What Should I see?
After you run `./start.sh`:
* If you are running Confluent Enterprise, open your browser and navigate to the Control Center web interface Monitoring -> Data streams tab at http://localhost:9021/monitoring/streams to see throughput and latency performance of the KSQL queries
* If you are running Confluent Enterprise, use Control Center to view and create KSQL queries. Otherwise, run the KSQL CLI `ksql http://localhost:8088`.
| Make workshop instructions more obvious | Make workshop instructions more obvious
| Markdown | apache-2.0 | confluentinc/examples,confluentinc/examples,confluentinc/examples,confluentinc/examples,confluentinc/examples,confluentinc/examples,confluentinc/examples,confluentinc/examples,confluentinc/examples,confluentinc/examples | markdown | ## Code Before:

# Overview
This KSQL ratings demo showcases Kafka stream processing using KSQL. This demo was initially created for a KSQL workshop.
As with the other demos in this repo, you may run the entire demo end-to-end with `./start.sh`, and it runs on your local Confluent Platform install instead of Docker. Alternatively, you may follow the [step-by-step guide](ksql-workshop.adoc), and that instruction is based on Docker instead of your local Confluent Platform install.

# Prerequisites
* [Common demo prerequisites](https://github.com/confluentinc/quickstart-demos#prerequisites)
* [Confluent Platform 5.0](https://www.confluent.io/download/)
* If you are running the [step-by-step guide](ksql-workshop.adoc)
* Docker
* Docker Compose
* 8GB+ RAM
# What Should I see?
After you run `./start.sh`:
* If you are running Confluent Enterprise, open your browser and navigate to the Control Center web interface Monitoring -> Data streams tab at http://localhost:9021/monitoring/streams to see throughput and latency performance of the KSQL queries
* If you are running Confluent Enterprise, use Control Center to view and create KSQL queries. Otherwise, run the KSQL CLI `ksql http://localhost:8088`.
## Instruction:
Make workshop instructions more obvious
## Code After:

# Overview
This KSQL ratings demo showcases Kafka stream processing using KSQL. This demo was initially created for a KSQL workshop.
----
**You can follow the [step-by-step guide](ksql-workshop.adoc) workshop instructions here. These instructions are based on Docker instead of your local Confluent Platform install.**
----
As with the other demos in this repo, you may run the entire demo end-to-end with `./start.sh`, and it runs on your local Confluent Platform install instead of Docker.

# Prerequisites
* [Common demo prerequisites](https://github.com/confluentinc/quickstart-demos#prerequisites)
* [Confluent Platform 5.0](https://www.confluent.io/download/)
* If you are running the [step-by-step guide](ksql-workshop.adoc)
* Docker
* Docker Compose
* 8GB+ RAM
# What Should I see?
After you run `./start.sh`:
* If you are running Confluent Enterprise, open your browser and navigate to the Control Center web interface Monitoring -> Data streams tab at http://localhost:9021/monitoring/streams to see throughput and latency performance of the KSQL queries
* If you are running Confluent Enterprise, use Control Center to view and create KSQL queries. Otherwise, run the KSQL CLI `ksql http://localhost:8088`.
|
f57b40f1b360fc5b1d6e2ae0575a0ed73ce9fe31 | README.md | README.md | humanize-bytes -- Utilities to convert "MiB" etc to raw numbers
===============================================================
Install with
go get github.com/tv42/humanize-bytes/cmd/bytes2human github.com/tv42/humanize-bytes/cmd/human2bytes
Use like this:
$ human2bytes 42GiB
45097156608
$ human2bytes 42GB
42000000000
For example, sort your `du`, but still get readable results:
$ du * | sort -nr | head -5 | bytes2human
| humanize-bytes -- Utilities to convert "MiB" etc to raw numbers
===============================================================
Install with
go get github.com/tv42/humanize-bytes/cmd/bytes2human github.com/tv42/humanize-bytes/cmd/human2bytes
Use like this:
$ human2bytes 42GiB
45097156608
$ human2bytes 42GB
42000000000
For example, sort your `du`, but still get readable results:
$ du * | sort -nr | head -5 | bytes2human
| Indent more for proper markdown layout. | Indent more for proper markdown layout.
| Markdown | mit | tv42/humanize-bytes,tv42/humanize-bytes | markdown | ## Code Before:
humanize-bytes -- Utilities to convert "MiB" etc to raw numbers
===============================================================
Install with
go get github.com/tv42/humanize-bytes/cmd/bytes2human github.com/tv42/humanize-bytes/cmd/human2bytes
Use like this:
$ human2bytes 42GiB
45097156608
$ human2bytes 42GB
42000000000
For example, sort your `du`, but still get readable results:
$ du * | sort -nr | head -5 | bytes2human
## Instruction:
Indent more for proper markdown layout.
## Code After:
humanize-bytes -- Utilities to convert "MiB" etc to raw numbers
===============================================================
Install with
go get github.com/tv42/humanize-bytes/cmd/bytes2human github.com/tv42/humanize-bytes/cmd/human2bytes
Use like this:
$ human2bytes 42GiB
45097156608
$ human2bytes 42GB
42000000000
For example, sort your `du`, but still get readable results:
$ du * | sort -nr | head -5 | bytes2human
|
46ab176c20494fce8f66d9042645e4110d3198aa | test/integration/default/default_spec.rb | test/integration/default/default_spec.rb |
describe service('W3SVC') do
it { should be_installed }
it { should be_running }
end
# Unless we are on a 'polluted' machine, the default website should
# be present if the IIS Role was freshly installed. All our vagrant
# configurations install with the system drive at C:\
describe iis_site('Default Web Site') do
it { should exist }
it { should be_running }
it { should have_app_pool('DefaultAppPool') }
end
|
describe service('W3SVC') do
it { should be_installed }
it { should be_running }
its ('startmode') { should be 'Auto'}
end
# Unless we are on a 'polluted' machine, the default website should
# be present if the IIS Role was freshly installed. All our vagrant
# configurations install with the system drive at C:\
describe iis_site('Default Web Site') do
it { should exist }
it { should be_running }
it { should have_app_pool('DefaultAppPool') }
end
| Make sure IIS is set to auto | Make sure IIS is set to auto
Signed-off-by: Tim Smith <[email protected]>
| Ruby | apache-2.0 | jonathanmorley/iis,chef-cookbooks/iis,opscode-cookbooks/iis | ruby | ## Code Before:
describe service('W3SVC') do
it { should be_installed }
it { should be_running }
end
# Unless we are on a 'polluted' machine, the default website should
# be present if the IIS Role was freshly installed. All our vagrant
# configurations install with the system drive at C:\
describe iis_site('Default Web Site') do
it { should exist }
it { should be_running }
it { should have_app_pool('DefaultAppPool') }
end
## Instruction:
Make sure IIS is set to auto
Signed-off-by: Tim Smith <[email protected]>
## Code After:
describe service('W3SVC') do
it { should be_installed }
it { should be_running }
its ('startmode') { should be 'Auto'}
end
# Unless we are on a 'polluted' machine, the default website should
# be present if the IIS Role was freshly installed. All our vagrant
# configurations install with the system drive at C:\
describe iis_site('Default Web Site') do
it { should exist }
it { should be_running }
it { should have_app_pool('DefaultAppPool') }
end
|
b42d12c40223f19c0c26d4ec7c64f3ab6e144c6c | src/model/mixin/colorPalette.js | src/model/mixin/colorPalette.js | import {makeInner} from '../../util/model';
var inner = makeInner();
export default {
clearColorPalette: function () {
inner(this).colorIdx = 0;
inner(this).colorNameMap = {};
},
getColorFromPalette: function (name, scope) {
scope = scope || this;
var scopeFields = inner(scope);
var colorIdx = scopeFields.colorIdx || 0;
var colorNameMap = scopeFields.colorNameMap = scopeFields.colorNameMap || {};
// Use `hasOwnProperty` to avoid conflict with Object.prototype.
if (colorNameMap.hasOwnProperty(name)) {
return colorNameMap[name];
}
var colorPalette = this.get('color', true) || [];
if (!colorPalette.length) {
return;
}
var color = colorPalette[colorIdx];
if (name) {
colorNameMap[name] = color;
}
scopeFields.colorIdx = (colorIdx + 1) % colorPalette.length;
return color;
}
};
| import {makeInner, normalizeToArray} from '../../util/model';
var inner = makeInner();
export default {
clearColorPalette: function () {
inner(this).colorIdx = 0;
inner(this).colorNameMap = {};
},
getColorFromPalette: function (name, scope) {
scope = scope || this;
var scopeFields = inner(scope);
var colorIdx = scopeFields.colorIdx || 0;
var colorNameMap = scopeFields.colorNameMap = scopeFields.colorNameMap || {};
// Use `hasOwnProperty` to avoid conflict with Object.prototype.
if (colorNameMap.hasOwnProperty(name)) {
return colorNameMap[name];
}
var colorPalette = normalizeToArray(this.get('color', true));
if (!colorPalette.length) {
return;
}
var color = colorPalette[colorIdx];
if (name) {
colorNameMap[name] = color;
}
scopeFields.colorIdx = (colorIdx + 1) % colorPalette.length;
return color;
}
};
| Support string value for color. | Support string value for color.
| JavaScript | apache-2.0 | chenfwind/echarts,ecomfe/echarts,apache/incubator-echarts,100star/echarts,ecomfe/echarts,chenfwind/echarts,100star/echarts,apache/incubator-echarts | javascript | ## Code Before:
import {makeInner} from '../../util/model';
var inner = makeInner();
export default {
clearColorPalette: function () {
inner(this).colorIdx = 0;
inner(this).colorNameMap = {};
},
getColorFromPalette: function (name, scope) {
scope = scope || this;
var scopeFields = inner(scope);
var colorIdx = scopeFields.colorIdx || 0;
var colorNameMap = scopeFields.colorNameMap = scopeFields.colorNameMap || {};
// Use `hasOwnProperty` to avoid conflict with Object.prototype.
if (colorNameMap.hasOwnProperty(name)) {
return colorNameMap[name];
}
var colorPalette = this.get('color', true) || [];
if (!colorPalette.length) {
return;
}
var color = colorPalette[colorIdx];
if (name) {
colorNameMap[name] = color;
}
scopeFields.colorIdx = (colorIdx + 1) % colorPalette.length;
return color;
}
};
## Instruction:
Support string value for color.
## Code After:
import {makeInner, normalizeToArray} from '../../util/model';
var inner = makeInner();
export default {
clearColorPalette: function () {
inner(this).colorIdx = 0;
inner(this).colorNameMap = {};
},
getColorFromPalette: function (name, scope) {
scope = scope || this;
var scopeFields = inner(scope);
var colorIdx = scopeFields.colorIdx || 0;
var colorNameMap = scopeFields.colorNameMap = scopeFields.colorNameMap || {};
// Use `hasOwnProperty` to avoid conflict with Object.prototype.
if (colorNameMap.hasOwnProperty(name)) {
return colorNameMap[name];
}
var colorPalette = normalizeToArray(this.get('color', true));
if (!colorPalette.length) {
return;
}
var color = colorPalette[colorIdx];
if (name) {
colorNameMap[name] = color;
}
scopeFields.colorIdx = (colorIdx + 1) % colorPalette.length;
return color;
}
};
|
3c6a6677be6aca24d73182505772dbc6f2b49684 | Sources/TAR/DataWithPointer+Tar.swift | Sources/TAR/DataWithPointer+Tar.swift | // Copyright (c) 2017 Timofey Solomko
// Licensed under MIT License
//
// See LICENSE for license information
import Foundation
extension DataWithPointer {
func nullEndedBuffer(cutoff: Int) -> [UInt8] {
let startIndex = index
var buffer = [UInt8]()
while index - startIndex < cutoff {
let byte = self.byte()
if byte == 0 {
index -= 1
break
}
buffer.append(byte)
}
index += cutoff - (index - startIndex)
return buffer
}
func nullEndedAsciiString(cutoff: Int) throws -> String {
if let string = String(bytes: self.nullEndedBuffer(cutoff: cutoff), encoding: .ascii) {
return string
} else {
throw TarError.notAsciiString
}
}
func nullSpaceEndedBuffer(cutoff: Int) -> [UInt8] {
let startIndex = index
var buffer = [UInt8]()
while index - startIndex < cutoff {
let byte = self.byte()
if byte == 0 || byte == 0x20 {
index -= 1
break
}
buffer.append(byte)
}
index += cutoff - (index - startIndex)
return buffer
}
func nullSpaceEndedAsciiString(cutoff: Int) throws -> String {
if let string = String(bytes: self.nullSpaceEndedBuffer(cutoff: cutoff), encoding: .ascii) {
return string
} else {
throw TarError.notAsciiString
}
}
}
| // Copyright (c) 2017 Timofey Solomko
// Licensed under MIT License
//
// See LICENSE for license information
import Foundation
extension DataWithPointer {
private func nullEndedBuffer(cutoff: Int) -> [UInt8] {
let startIndex = index
var buffer = [UInt8]()
while index - startIndex < cutoff {
let byte = self.byte()
if byte == 0 {
index -= 1
break
}
buffer.append(byte)
}
index += cutoff - (index - startIndex)
return buffer
}
func nullEndedAsciiString(cutoff: Int) throws -> String {
if let string = String(bytes: self.nullEndedBuffer(cutoff: cutoff), encoding: .ascii) {
return string
} else {
throw TarError.notAsciiString
}
}
private func nullSpaceEndedBuffer(cutoff: Int) -> [UInt8] {
let startIndex = index
var buffer = [UInt8]()
while index - startIndex < cutoff {
let byte = self.byte()
if byte == 0 || byte == 0x20 {
index -= 1
break
}
buffer.append(byte)
}
index += cutoff - (index - startIndex)
return buffer
}
func nullSpaceEndedAsciiString(cutoff: Int) throws -> String {
if let string = String(bytes: self.nullSpaceEndedBuffer(cutoff: cutoff), encoding: .ascii) {
return string
} else {
throw TarError.notAsciiString
}
}
}
| Make private couple of dwp+tar functions | Make private couple of dwp+tar functions
| Swift | mit | tsolomko/SWCompression,tsolomko/SWCompression,tsolomko/SWCompression | swift | ## Code Before:
// Copyright (c) 2017 Timofey Solomko
// Licensed under MIT License
//
// See LICENSE for license information
import Foundation
extension DataWithPointer {
func nullEndedBuffer(cutoff: Int) -> [UInt8] {
let startIndex = index
var buffer = [UInt8]()
while index - startIndex < cutoff {
let byte = self.byte()
if byte == 0 {
index -= 1
break
}
buffer.append(byte)
}
index += cutoff - (index - startIndex)
return buffer
}
func nullEndedAsciiString(cutoff: Int) throws -> String {
if let string = String(bytes: self.nullEndedBuffer(cutoff: cutoff), encoding: .ascii) {
return string
} else {
throw TarError.notAsciiString
}
}
func nullSpaceEndedBuffer(cutoff: Int) -> [UInt8] {
let startIndex = index
var buffer = [UInt8]()
while index - startIndex < cutoff {
let byte = self.byte()
if byte == 0 || byte == 0x20 {
index -= 1
break
}
buffer.append(byte)
}
index += cutoff - (index - startIndex)
return buffer
}
func nullSpaceEndedAsciiString(cutoff: Int) throws -> String {
if let string = String(bytes: self.nullSpaceEndedBuffer(cutoff: cutoff), encoding: .ascii) {
return string
} else {
throw TarError.notAsciiString
}
}
}
## Instruction:
Make private couple of dwp+tar functions
## Code After:
// Copyright (c) 2017 Timofey Solomko
// Licensed under MIT License
//
// See LICENSE for license information
import Foundation
extension DataWithPointer {
private func nullEndedBuffer(cutoff: Int) -> [UInt8] {
let startIndex = index
var buffer = [UInt8]()
while index - startIndex < cutoff {
let byte = self.byte()
if byte == 0 {
index -= 1
break
}
buffer.append(byte)
}
index += cutoff - (index - startIndex)
return buffer
}
func nullEndedAsciiString(cutoff: Int) throws -> String {
if let string = String(bytes: self.nullEndedBuffer(cutoff: cutoff), encoding: .ascii) {
return string
} else {
throw TarError.notAsciiString
}
}
private func nullSpaceEndedBuffer(cutoff: Int) -> [UInt8] {
let startIndex = index
var buffer = [UInt8]()
while index - startIndex < cutoff {
let byte = self.byte()
if byte == 0 || byte == 0x20 {
index -= 1
break
}
buffer.append(byte)
}
index += cutoff - (index - startIndex)
return buffer
}
func nullSpaceEndedAsciiString(cutoff: Int) throws -> String {
if let string = String(bytes: self.nullSpaceEndedBuffer(cutoff: cutoff), encoding: .ascii) {
return string
} else {
throw TarError.notAsciiString
}
}
}
|
ad72a44f6bf9df675b659eefd04aaa3d425825be | spec/classes/rsyslog_spec.rb | spec/classes/rsyslog_spec.rb |
require 'spec_helper'
describe 'rsyslog', :type => :class do
let(:title) { 'rsyslog' }
it { should contain_package('rsyslog').with_ensure('installed') }
it { should contain_service('rsyslog').with_ensure('running') }
end
|
require 'spec_helper'
describe 'rsyslog', :type => :class do
context "On an Ubuntu install install rsyslog" do
let :facts do {
:operatingsystem => 'Debian'
} end
let(:title) { 'rsyslog' }
it { should contain_package('rsyslog').with_ensure('installed') }
it { should contain_service('rsyslog').with_ensure('running') }
end
context "On an RHEL install install rsyslog" do
let :facts do {
:operatingsystem => 'RedHat'
} end
let(:title) { 'rsyslog' }
it { should contain_package('rsyslog').with_ensure('installed') }
it { should contain_service('rsyslog').with_ensure('running') }
end
end
| Fix test cases by introducing facts to rspec | Fix test cases by introducing facts to rspec
| Ruby | apache-2.0 | Brainsware/puppet-rsyslog,Brainsware/puppet-rsyslog | ruby | ## Code Before:
require 'spec_helper'
describe 'rsyslog', :type => :class do
let(:title) { 'rsyslog' }
it { should contain_package('rsyslog').with_ensure('installed') }
it { should contain_service('rsyslog').with_ensure('running') }
end
## Instruction:
Fix test cases by introducing facts to rspec
## Code After:
require 'spec_helper'
describe 'rsyslog', :type => :class do
context "On an Ubuntu install install rsyslog" do
let :facts do {
:operatingsystem => 'Debian'
} end
let(:title) { 'rsyslog' }
it { should contain_package('rsyslog').with_ensure('installed') }
it { should contain_service('rsyslog').with_ensure('running') }
end
context "On an RHEL install install rsyslog" do
let :facts do {
:operatingsystem => 'RedHat'
} end
let(:title) { 'rsyslog' }
it { should contain_package('rsyslog').with_ensure('installed') }
it { should contain_service('rsyslog').with_ensure('running') }
end
end
|
fb0f1b16b1d71ef6e353d768ed00eeffb973b715 | app/views/systems/_selection.html.haml | app/views/systems/_selection.html.haml | %h3= t('selection')
.row
.col-sm-5.sortableHospitals
- @hospitals.each do |hospital|
.selection.bordered-well{ 'data-hospital-id': hospital.id }
= hospital.name
%span.pull-right.icon.icon--trash.removeHospital{ 'aria-hidden': 'true', style: 'color:#757575', title: t('remove_from_comparison'), 'data-hospital-id': hospital.id }
.col-sm-7.sortableCodes
- @codes.each do |code|
.selection.bordered{ 'data-code': code.code }
= code.code_display + ' '
%span.small{style: "color:#757575" }= code.text locale
%span.pull-right.icon.icon--trash.removeCode{ 'aria-hidden': 'true', style: 'color:#757575', title: t('remove_from_comparison'), 'data-code': code.code }
| %h3= t('selection')
.row
.col-sm-5.sortableHospitals
- @hospitals.each do |hospital|
.selection.bordered-well{ 'data-hospital-id': hospital.id }
= hospital.name
%span.pull-right.removeHospital{ 'aria-hidden': 'true', style: 'color:#757575', title: t('remove_from_comparison'), 'data-hospital-id': hospital.id }= fa_icon('remove')
.col-sm-7.sortableCodes
- @codes.each do |code|
.selection.bordered{ 'data-code': code.code }
= code.code_display + ' '
%span.small{style: "color:#757575" }= code.text locale
%span.pull-right.removeCode{ 'aria-hidden': 'true', style: 'color:#757575', title: t('remove_from_comparison'), 'data-code': code.code }= fa_icon('remove')
| Replace trash with removal icon. | Replace trash with removal icon.
| Haml | mit | eonum/drg-search,eonum/drg-search,eonum/drg-search | haml | ## Code Before:
%h3= t('selection')
.row
.col-sm-5.sortableHospitals
- @hospitals.each do |hospital|
.selection.bordered-well{ 'data-hospital-id': hospital.id }
= hospital.name
%span.pull-right.icon.icon--trash.removeHospital{ 'aria-hidden': 'true', style: 'color:#757575', title: t('remove_from_comparison'), 'data-hospital-id': hospital.id }
.col-sm-7.sortableCodes
- @codes.each do |code|
.selection.bordered{ 'data-code': code.code }
= code.code_display + ' '
%span.small{style: "color:#757575" }= code.text locale
%span.pull-right.icon.icon--trash.removeCode{ 'aria-hidden': 'true', style: 'color:#757575', title: t('remove_from_comparison'), 'data-code': code.code }
## Instruction:
Replace trash with removal icon.
## Code After:
%h3= t('selection')
.row
.col-sm-5.sortableHospitals
- @hospitals.each do |hospital|
.selection.bordered-well{ 'data-hospital-id': hospital.id }
= hospital.name
%span.pull-right.removeHospital{ 'aria-hidden': 'true', style: 'color:#757575', title: t('remove_from_comparison'), 'data-hospital-id': hospital.id }= fa_icon('remove')
.col-sm-7.sortableCodes
- @codes.each do |code|
.selection.bordered{ 'data-code': code.code }
= code.code_display + ' '
%span.small{style: "color:#757575" }= code.text locale
%span.pull-right.removeCode{ 'aria-hidden': 'true', style: 'color:#757575', title: t('remove_from_comparison'), 'data-code': code.code }= fa_icon('remove')
|
663c9486a2482a84a02552c25ddfb1b1a58e2349 | src/main/bash/gvm-help.sh | src/main/bash/gvm-help.sh |
function __gvmtool_help {
echo ""
echo "Usage: gvm <command> <candidate> [version]"
echo " gvm offline <enable|disable>"
echo ""
echo " command : install, uninstall, list, use, current, version, default, selfupdate, broadcast, help, offline"
echo " candidate : groovy, grails, griffon, gradle, lazybones, vertx"
echo " version : optional, defaults to latest stable if not provided"
echo ""
echo "eg: gvm install groovy"
}
|
function __gvmtool_help {
echo ""
echo "Usage: gvm <command> <candidate> [version]"
echo " gvm offline <enable|disable>"
echo ""
echo " commands:"
echo " install or i [candidate] [version]"
echo " uninstall or rm [candidate] <version>"
echo " list or ls [candidate]"
echo " use or u [candidate] <version>"
echo " default or d [candidate] <version>"
echo " current or c <candidate>"
echo " version or v"
echo " broadcast or b"
echo " help or h"
echo " offline <enable|disable>"
echo " selfupdate"
echo ""
echo " candidate : groovy, grails, griffon, gradle, lazybones, vertx"
echo " version : where optional, defaults to latest stable if not provided"
echo ""
echo "eg: gvm install groovy"
}
| Update help to reflect new mnemonics. | Update help to reflect new mnemonics.
| Shell | apache-2.0 | skpal/sdkman-cli,DealerDotCom/gvm-cli,DealerDotCom/gvm-cli,GsusRecovery/sdkman-cli,skpal/sdkman-cli,gvmtool/gvm-cli,nobeans/gvm-cli,GsusRecovery/sdkman-cli,jbovet/gvm,nobeans/gvm-cli,sdkman/sdkman-cli,busches/gvm-cli,shanman190/sdkman-cli | shell | ## Code Before:
function __gvmtool_help {
echo ""
echo "Usage: gvm <command> <candidate> [version]"
echo " gvm offline <enable|disable>"
echo ""
echo " command : install, uninstall, list, use, current, version, default, selfupdate, broadcast, help, offline"
echo " candidate : groovy, grails, griffon, gradle, lazybones, vertx"
echo " version : optional, defaults to latest stable if not provided"
echo ""
echo "eg: gvm install groovy"
}
## Instruction:
Update help to reflect new mnemonics.
## Code After:
function __gvmtool_help {
echo ""
echo "Usage: gvm <command> <candidate> [version]"
echo " gvm offline <enable|disable>"
echo ""
echo " commands:"
echo " install or i [candidate] [version]"
echo " uninstall or rm [candidate] <version>"
echo " list or ls [candidate]"
echo " use or u [candidate] <version>"
echo " default or d [candidate] <version>"
echo " current or c <candidate>"
echo " version or v"
echo " broadcast or b"
echo " help or h"
echo " offline <enable|disable>"
echo " selfupdate"
echo ""
echo " candidate : groovy, grails, griffon, gradle, lazybones, vertx"
echo " version : where optional, defaults to latest stable if not provided"
echo ""
echo "eg: gvm install groovy"
}
|
63b68df9f3a8a94acdf73b7457ccf7dd5c65744d | .github/workflows/unity-build-cafemap.yml | .github/workflows/unity-build-cafemap.yml | name: Build CafeMap
on:
workflow_dispatch:
jobs:
activate:
runs-on: ubuntu-latest
steps:
- name: Checkout Repository
uses: actions/checkout@v1
- uses: actions/[email protected]
with:
path: cafe-map/client/unity/Library
key: Library-CafeMap-Android
restore-keys: |
Library-CafeMap-
Library-
- name: Build for Android
uses: webbertakken/unity-builder@1
env:
UNITY_LICENSE: ${{ secrets.UNITY_LICENSE }}
with:
projectPath: cafe-map/client/unity
unityVersion: 2019.4.12f1
targetPlatform: Android
- name: Upload build
uses: actions/upload-artifact@v2
with:
name: Build
path: build
| name: Build CafeMap
on:
workflow_dispatch:
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Checkout Repository
uses: actions/checkout@v1
- uses: actions/[email protected]
with:
path: cafe-map/client/unity/Library
key: Library-CafeMap-Android
restore-keys: |
Library-CafeMap-
Library-
- name: Build for Android
uses: webbertakken/unity-builder@v1
env:
UNITY_LICENSE: ${{ secrets.UNITY_LICENSE_UBUNTU }}
with:
projectPath: cafe-map/client/unity
unityVersion: 2019.4.12f1
targetPlatform: Android
androidKeystoreBase64: ${{ secrets.CAFEMAP_ANDROID_KEYSTORE_BASE64 }}
androidKeystorePass: ${{ secrets.CAFEMAP_ANDROID_KEYSTORE_PASS }}
androidKeyaliasName: cafemap-release
androidKeyaliasPass: ${{ secrets.CAFEMAP_ANDROID_KEYSTORE_PASS }}
- name: Upload build
uses: actions/upload-artifact@v2
with:
name: Build
path: build
| Fix build.yml and add android keys | Fix build.yml and add android keys
| YAML | mit | curioswitch/curiostack,curioswitch/curiostack,curioswitch/curiostack,curioswitch/curiostack,curioswitch/curiostack,curioswitch/curiostack,curioswitch/curiostack,curioswitch/curiostack,curioswitch/curiostack,curioswitch/curiostack | yaml | ## Code Before:
name: Build CafeMap
on:
workflow_dispatch:
jobs:
activate:
runs-on: ubuntu-latest
steps:
- name: Checkout Repository
uses: actions/checkout@v1
- uses: actions/[email protected]
with:
path: cafe-map/client/unity/Library
key: Library-CafeMap-Android
restore-keys: |
Library-CafeMap-
Library-
- name: Build for Android
uses: webbertakken/unity-builder@1
env:
UNITY_LICENSE: ${{ secrets.UNITY_LICENSE }}
with:
projectPath: cafe-map/client/unity
unityVersion: 2019.4.12f1
targetPlatform: Android
- name: Upload build
uses: actions/upload-artifact@v2
with:
name: Build
path: build
## Instruction:
Fix build.yml and add android keys
## Code After:
name: Build CafeMap
on:
workflow_dispatch:
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Checkout Repository
uses: actions/checkout@v1
- uses: actions/[email protected]
with:
path: cafe-map/client/unity/Library
key: Library-CafeMap-Android
restore-keys: |
Library-CafeMap-
Library-
- name: Build for Android
uses: webbertakken/unity-builder@v1
env:
UNITY_LICENSE: ${{ secrets.UNITY_LICENSE_UBUNTU }}
with:
projectPath: cafe-map/client/unity
unityVersion: 2019.4.12f1
targetPlatform: Android
androidKeystoreBase64: ${{ secrets.CAFEMAP_ANDROID_KEYSTORE_BASE64 }}
androidKeystorePass: ${{ secrets.CAFEMAP_ANDROID_KEYSTORE_PASS }}
androidKeyaliasName: cafemap-release
androidKeyaliasPass: ${{ secrets.CAFEMAP_ANDROID_KEYSTORE_PASS }}
- name: Upload build
uses: actions/upload-artifact@v2
with:
name: Build
path: build
|
84de65fd751d874602dadcec006d8f5f5e4029d5 | .travis.yml | .travis.yml | os: osx
osx_image: xcode8.3
after_success:
- bash <(curl -s https://codecov.io/bash) -t $CODECOV_TOKEN
branches:
only:
- release
- dev
script:
- fastlane beta
| os: osx
osx_image: xcode8.3
before_install:
- sudo gem install xcov
after_success:
- bash <(curl -s https://codecov.io/bash) -t $CODECOV_TOKEN
branches:
only:
- release
- dev
script:
- fastlane beta
| Install xcove tool before project is build | CI: Install xcove tool before project is build
| YAML | mit | 3DprintFIT/octoprint-ios-client,3DprintFIT/octoprint-ios-client,3DprintFIT/octoprint-ios-client,3DprintFIT/octoprint-ios-client,3DprintFIT/octoprint-ios-client | yaml | ## Code Before:
os: osx
osx_image: xcode8.3
after_success:
- bash <(curl -s https://codecov.io/bash) -t $CODECOV_TOKEN
branches:
only:
- release
- dev
script:
- fastlane beta
## Instruction:
CI: Install xcove tool before project is build
## Code After:
os: osx
osx_image: xcode8.3
before_install:
- sudo gem install xcov
after_success:
- bash <(curl -s https://codecov.io/bash) -t $CODECOV_TOKEN
branches:
only:
- release
- dev
script:
- fastlane beta
|
4dd316f42c39479a673aa171a1ea376bd87087c7 | README.md | README.md |
The official Jekyll version of the Clean Blog theme by [Start Bootstrap](http://startbootstrap.com/).
###[View Live Demo →](http://ironsummitmedia.github.io/startbootstrap-clean-blog-jekyll/)
## Before You Begin
In the _config.yml file, the base URL is set to /startbootstrap-clean-blog-jekyll which is this themes gh-pages preview. It's recommended that you remove the base URL before working with this theme locally!
It should look like this:
`baseurl: ""`
## What's Included
A full Jekyll environment is included with this theme. If you have Jekyll installed, simply run `jekyll serve` in your command line and preview the build in your browser. You can use `jekyll serve --watch` to watch for changes in the source files as well.
A Grunt environment is also included. There are a number of tasks it performs like minification of the JavaScript, compiling of the LESS files, adding banners to keep the Apache 2.0 license intact, and watching for changes. Run the grunt default task by entering `grunt` into your command line which will build the files. You can use `grunt watch` if you are working on the JavaScript or the LESS.
You can run `jekyll serve --watch` and `grunt watch` at the same time to watch for changes and then build them all at once.
## Support
Visit Clean Blog's template overview page on Start Bootstrap at http://startbootstrap.com/template-overviews/clean-blog/ and leave a comment, email [email protected], or open an issue here on GitHub for support. |
The official Jekyll version of the Clean Blog theme by [Start Bootstrap](http://startbootstrap.com/).
###[View Live Demo →](http://caseman72.github.io/startbootstrap-clean-blog-jekyll/)
## Before You Begin
In the _config.yml file, the base URL is set to /startbootstrap-clean-blog-jekyll which is this themes gh-pages preview. It's recommended that you remove the base URL before working with this theme locally!
It should look like this:
`baseurl: ""`
## What's Included
A full Jekyll environment is included with this theme. If you have Jekyll installed, simply run `jekyll serve` in your command line and preview the build in your browser. You can use `jekyll serve --watch` to watch for changes in the source files as well.
A Grunt environment is also included. There are a number of tasks it performs like minification of the JavaScript, compiling of the LESS files, adding banners to keep the Apache 2.0 license intact, and watching for changes. Run the grunt default task by entering `grunt` into your command line which will build the files. You can use `grunt watch` if you are working on the JavaScript or the LESS.
You can run `jekyll serve --watch` and `grunt watch` at the same time to watch for changes and then build them all at once.
## Support
Visit Clean Blog's template overview page on Start Bootstrap at http://startbootstrap.com/template-overviews/clean-blog/ and leave a comment, email [email protected], or open an issue here on GitHub for support. | Fix Demo link to mine | Fix Demo link to mine
| Markdown | apache-2.0 | caseman72/startbootstrap-clean-blog-jekyll,caseman72/startbootstrap-clean-blog-jekyll,caseman72/startbootstrap-clean-blog-jekyll | markdown | ## Code Before:
The official Jekyll version of the Clean Blog theme by [Start Bootstrap](http://startbootstrap.com/).
###[View Live Demo →](http://ironsummitmedia.github.io/startbootstrap-clean-blog-jekyll/)
## Before You Begin
In the _config.yml file, the base URL is set to /startbootstrap-clean-blog-jekyll which is this themes gh-pages preview. It's recommended that you remove the base URL before working with this theme locally!
It should look like this:
`baseurl: ""`
## What's Included
A full Jekyll environment is included with this theme. If you have Jekyll installed, simply run `jekyll serve` in your command line and preview the build in your browser. You can use `jekyll serve --watch` to watch for changes in the source files as well.
A Grunt environment is also included. There are a number of tasks it performs like minification of the JavaScript, compiling of the LESS files, adding banners to keep the Apache 2.0 license intact, and watching for changes. Run the grunt default task by entering `grunt` into your command line which will build the files. You can use `grunt watch` if you are working on the JavaScript or the LESS.
You can run `jekyll serve --watch` and `grunt watch` at the same time to watch for changes and then build them all at once.
## Support
Visit Clean Blog's template overview page on Start Bootstrap at http://startbootstrap.com/template-overviews/clean-blog/ and leave a comment, email [email protected], or open an issue here on GitHub for support.
## Instruction:
Fix Demo link to mine
## Code After:
The official Jekyll version of the Clean Blog theme by [Start Bootstrap](http://startbootstrap.com/).
###[View Live Demo →](http://caseman72.github.io/startbootstrap-clean-blog-jekyll/)
## Before You Begin
In the _config.yml file, the base URL is set to /startbootstrap-clean-blog-jekyll which is this themes gh-pages preview. It's recommended that you remove the base URL before working with this theme locally!
It should look like this:
`baseurl: ""`
## What's Included
A full Jekyll environment is included with this theme. If you have Jekyll installed, simply run `jekyll serve` in your command line and preview the build in your browser. You can use `jekyll serve --watch` to watch for changes in the source files as well.
A Grunt environment is also included. There are a number of tasks it performs like minification of the JavaScript, compiling of the LESS files, adding banners to keep the Apache 2.0 license intact, and watching for changes. Run the grunt default task by entering `grunt` into your command line which will build the files. You can use `grunt watch` if you are working on the JavaScript or the LESS.
You can run `jekyll serve --watch` and `grunt watch` at the same time to watch for changes and then build them all at once.
## Support
Visit Clean Blog's template overview page on Start Bootstrap at http://startbootstrap.com/template-overviews/clean-blog/ and leave a comment, email [email protected], or open an issue here on GitHub for support. |
bc0b635ae47d05b3f83e73c6f0645fff9f76d5c7 | app/components/comment-thread/template.hbs | app/components/comment-thread/template.hbs | <div local-class="root">
{{#each (filter-by 'blockId' blockId comments) as |comment|}}
<div local-class="item">
{{comment-thread/item comment=comment removeComment=(perform removeComment comment)}}
</div>
{{/each}}
{{comment-thread/create-item createComment=(perform createComment)}}
</div> | <div local-class="root">
{{#each blockComments as |comment|}}
<div local-class="item">
{{comment-thread/item comment=comment removeComment=(perform removeComment comment)}}
</div>
{{/each}}
{{comment-thread/create-item createComment=(perform createComment)}}
</div> | Use passed down blockComments instead of computing the value at comment-thread | Use passed down blockComments instead of computing the value at comment-thread
| Handlebars | apache-2.0 | usecanvas/web-v2,usecanvas/web-v2,usecanvas/web-v2 | handlebars | ## Code Before:
<div local-class="root">
{{#each (filter-by 'blockId' blockId comments) as |comment|}}
<div local-class="item">
{{comment-thread/item comment=comment removeComment=(perform removeComment comment)}}
</div>
{{/each}}
{{comment-thread/create-item createComment=(perform createComment)}}
</div>
## Instruction:
Use passed down blockComments instead of computing the value at comment-thread
## Code After:
<div local-class="root">
{{#each blockComments as |comment|}}
<div local-class="item">
{{comment-thread/item comment=comment removeComment=(perform removeComment comment)}}
</div>
{{/each}}
{{comment-thread/create-item createComment=(perform createComment)}}
</div> |
f15d43080d44ae207640061a4e37752e161574b1 | .travis.yml | .travis.yml | sudo:
false
addons:
postgresql: '9.4'
sauce_connect: true
language:
python
python:
- 3.5
services:
- postgresql
- redis-server
env:
global:
- TESTING=True
- BASE_URL=http://localhost:5000
- TEST_DATABASE_URL=postgres://postgres@localhost:5432/goodtables_test
- BROKER_URL=redis://localhost:6379/10
- RESULT_BACKEND=redis://localhost:6379/11
- FLASK_SECRET_KEY=test-key
- GITHUB_HOOK_SECRET=test-github-hook-secret
- GITHUB_CLIENT_ID=test-github-client-id
- GITHUB_CLIENT_SECRET=test-github-client-secret
- S3_GT_ACCESS_KEY_ID=test
- S3_GT_SECRET_ACCESS_KEY=test
- S3_GT_AWS_REGION=test
- S3_GT_ACCOUNT_ID=test
- S3_LAMBDA_ARN=test
- S3_LAMBDA_HOOK_SECRET=test
before_install:
- nvm install 6
- nvm use 6
install:
- npm run init
- psql -c 'create database goodtables_test;' -U postgres
- npm run migrate
script:
- npm run build:dev
- npm run test
- npm run spec
after_success:
- pip install coveralls
- coveralls
cache:
directories:
- $HOME/.cache/pip
| sudo:
false
addons:
postgresql: '9.4'
language:
python
python:
- 3.5
services:
- postgresql
- redis-server
env:
global:
- TESTING=True
- BASE_URL=http://localhost:5000
- TEST_DATABASE_URL=postgres://postgres@localhost:5432/goodtables_test
- BROKER_URL=redis://localhost:6379/10
- RESULT_BACKEND=redis://localhost:6379/11
- FLASK_SECRET_KEY=test-key
- GITHUB_HOOK_SECRET=test-github-hook-secret
- GITHUB_CLIENT_ID=test-github-client-id
- GITHUB_CLIENT_SECRET=test-github-client-secret
- S3_GT_ACCESS_KEY_ID=test
- S3_GT_SECRET_ACCESS_KEY=test
- S3_GT_AWS_REGION=test
- S3_GT_ACCOUNT_ID=test
- S3_LAMBDA_ARN=test
- S3_LAMBDA_HOOK_SECRET=test
before_install:
- nvm install 6
- nvm use 6
install:
- npm run init
- psql -c 'create database goodtables_test;' -U postgres
- npm run migrate
script:
- npm run build:dev
- npm run test
after_success:
- pip install coveralls
- coveralls
cache:
directories:
- $HOME/.cache/pip
| Disable sauce labs for now | Disable sauce labs for now
| YAML | agpl-3.0 | frictionlessdata/goodtables.io,frictionlessdata/goodtables.io,frictionlessdata/goodtables.io,frictionlessdata/goodtables.io | yaml | ## Code Before:
sudo:
false
addons:
postgresql: '9.4'
sauce_connect: true
language:
python
python:
- 3.5
services:
- postgresql
- redis-server
env:
global:
- TESTING=True
- BASE_URL=http://localhost:5000
- TEST_DATABASE_URL=postgres://postgres@localhost:5432/goodtables_test
- BROKER_URL=redis://localhost:6379/10
- RESULT_BACKEND=redis://localhost:6379/11
- FLASK_SECRET_KEY=test-key
- GITHUB_HOOK_SECRET=test-github-hook-secret
- GITHUB_CLIENT_ID=test-github-client-id
- GITHUB_CLIENT_SECRET=test-github-client-secret
- S3_GT_ACCESS_KEY_ID=test
- S3_GT_SECRET_ACCESS_KEY=test
- S3_GT_AWS_REGION=test
- S3_GT_ACCOUNT_ID=test
- S3_LAMBDA_ARN=test
- S3_LAMBDA_HOOK_SECRET=test
before_install:
- nvm install 6
- nvm use 6
install:
- npm run init
- psql -c 'create database goodtables_test;' -U postgres
- npm run migrate
script:
- npm run build:dev
- npm run test
- npm run spec
after_success:
- pip install coveralls
- coveralls
cache:
directories:
- $HOME/.cache/pip
## Instruction:
Disable sauce labs for now
## Code After:
sudo:
false
addons:
postgresql: '9.4'
language:
python
python:
- 3.5
services:
- postgresql
- redis-server
env:
global:
- TESTING=True
- BASE_URL=http://localhost:5000
- TEST_DATABASE_URL=postgres://postgres@localhost:5432/goodtables_test
- BROKER_URL=redis://localhost:6379/10
- RESULT_BACKEND=redis://localhost:6379/11
- FLASK_SECRET_KEY=test-key
- GITHUB_HOOK_SECRET=test-github-hook-secret
- GITHUB_CLIENT_ID=test-github-client-id
- GITHUB_CLIENT_SECRET=test-github-client-secret
- S3_GT_ACCESS_KEY_ID=test
- S3_GT_SECRET_ACCESS_KEY=test
- S3_GT_AWS_REGION=test
- S3_GT_ACCOUNT_ID=test
- S3_LAMBDA_ARN=test
- S3_LAMBDA_HOOK_SECRET=test
before_install:
- nvm install 6
- nvm use 6
install:
- npm run init
- psql -c 'create database goodtables_test;' -U postgres
- npm run migrate
script:
- npm run build:dev
- npm run test
after_success:
- pip install coveralls
- coveralls
cache:
directories:
- $HOME/.cache/pip
|
13a79e69dcda03307798f8bfa7f260b057574ace | docs/modules.rst | docs/modules.rst | ========
Modules
========
regressors.plots
----------------
.. autosummary::
:toctree: generated/
regressors.plots.plot_residuals
regressors.stats
----------------
.. autosummary::
:toctree: generated/
regressors.stats.residuals
| ========
Modules
========
regressors.plots
----------------
.. autosummary::
:toctree: generated/
regressors.plots.plot_residuals
regressors.stats
----------------
.. autosummary::
:toctree: generated/
regressors.stats.residuals
regressors.regressors
---------------------
.. autosummary::
:toctree: generated/
regressors.regressors.PCR
| Update Modules doc to include PCR class | Update Modules doc to include PCR class
| reStructuredText | isc | nsh87/regressors | restructuredtext | ## Code Before:
========
Modules
========
regressors.plots
----------------
.. autosummary::
:toctree: generated/
regressors.plots.plot_residuals
regressors.stats
----------------
.. autosummary::
:toctree: generated/
regressors.stats.residuals
## Instruction:
Update Modules doc to include PCR class
## Code After:
========
Modules
========
regressors.plots
----------------
.. autosummary::
:toctree: generated/
regressors.plots.plot_residuals
regressors.stats
----------------
.. autosummary::
:toctree: generated/
regressors.stats.residuals
regressors.regressors
---------------------
.. autosummary::
:toctree: generated/
regressors.regressors.PCR
|
8bb4bf3c9e944239e4c69a096db14390c512079b | lib/postmark-rails.rb | lib/postmark-rails.rb | require 'action_mailer'
require 'postmark'
require 'postmark-rails/delivery_method'
module PostmarkRails
extend self
def auto_detect_and_install
if ActionMailer::Base.respond_to?(:add_delivery_method)
install_in_rails
else
install_in_legacy_rails
end
end
def install_in_legacy_rails
ActionMailer::Base.send(:include, PostmarkRails::DeliveryMethod)
end
def install_in_rails
ActionMailer::Base.add_delivery_method :postmark, Mail::Postmark, :api_key => nil
end
end
PostmarkRails.auto_detect_and_install
| require 'action_mailer'
require 'postmark'
module PostmarkRails
extend ActiveSupport::Autoload
extend self
autoload :DeliveryMethod, 'postmark-rails/delivery_method'
def auto_detect_and_install
if ActionMailer::Base.respond_to?(:add_delivery_method)
install_in_rails
else
install_in_legacy_rails
end
end
def install_in_legacy_rails
ActionMailer::Base.send(:include, PostmarkRails::DeliveryMethod)
end
def install_in_rails
ActionMailer::Base.add_delivery_method :postmark, Mail::Postmark, :api_key => nil
end
end
PostmarkRails.auto_detect_and_install
| Use ActiveSupport::Autoload so that we don't even load the PostmarkRails::DeliveryMethod module in newer versions. | Use ActiveSupport::Autoload so that we don't even load the PostmarkRails::DeliveryMethod module in newer versions.
| Ruby | mit | mattbrictson/postmark-rails,mattbrictson/postmark-rails,wildbit/postmark-rails,wildbit/postmark-rails | ruby | ## Code Before:
require 'action_mailer'
require 'postmark'
require 'postmark-rails/delivery_method'
module PostmarkRails
extend self
def auto_detect_and_install
if ActionMailer::Base.respond_to?(:add_delivery_method)
install_in_rails
else
install_in_legacy_rails
end
end
def install_in_legacy_rails
ActionMailer::Base.send(:include, PostmarkRails::DeliveryMethod)
end
def install_in_rails
ActionMailer::Base.add_delivery_method :postmark, Mail::Postmark, :api_key => nil
end
end
PostmarkRails.auto_detect_and_install
## Instruction:
Use ActiveSupport::Autoload so that we don't even load the PostmarkRails::DeliveryMethod module in newer versions.
## Code After:
require 'action_mailer'
require 'postmark'
module PostmarkRails
extend ActiveSupport::Autoload
extend self
autoload :DeliveryMethod, 'postmark-rails/delivery_method'
def auto_detect_and_install
if ActionMailer::Base.respond_to?(:add_delivery_method)
install_in_rails
else
install_in_legacy_rails
end
end
def install_in_legacy_rails
ActionMailer::Base.send(:include, PostmarkRails::DeliveryMethod)
end
def install_in_rails
ActionMailer::Base.add_delivery_method :postmark, Mail::Postmark, :api_key => nil
end
end
PostmarkRails.auto_detect_and_install
|
a6b136c11ac805f1b0fbcee53eb1cb74c946e329 | content/showcase/nike-just-do-it.md | content/showcase/nike-just-do-it.md | ---
title: Nike Just Do It
date: 2018-09-14 16:20:09 -0400
related_tools:
- tool/gatsby.md
website: https://justdoit.nike.com/
repo: ''
creator: []
image_path: "/uploads/showcase/"
images:
- "/uploads/showcase-nike-gatsby.jpg"
---
| ---
title: Nike Just Do It
date: 2018-09-14 16:20:09 -0400
related_tools:
- tool/gatsby.md
- tool/datocms.md
website: https://justdoit.nike.com/
repo: ''
creator: []
image_path: "/uploads/showcase/"
images:
- "/uploads/showcase-nike-gatsby.jpg"
---
| Add dato to showcase item | Add dato to showcase item
| Markdown | mit | budparr/thenewdynamic,budparr/thenewdynamic,budparr/thenewdynamic,thenewdynamic-org/thenewdynamic.org,thenewdynamic-org/thenewdynamic.org | markdown | ## Code Before:
---
title: Nike Just Do It
date: 2018-09-14 16:20:09 -0400
related_tools:
- tool/gatsby.md
website: https://justdoit.nike.com/
repo: ''
creator: []
image_path: "/uploads/showcase/"
images:
- "/uploads/showcase-nike-gatsby.jpg"
---
## Instruction:
Add dato to showcase item
## Code After:
---
title: Nike Just Do It
date: 2018-09-14 16:20:09 -0400
related_tools:
- tool/gatsby.md
- tool/datocms.md
website: https://justdoit.nike.com/
repo: ''
creator: []
image_path: "/uploads/showcase/"
images:
- "/uploads/showcase-nike-gatsby.jpg"
---
|
a01bd737ffaa5bea4524b5dc65e27f7d43d9435e | README.rst | README.rst | ===============
python-webuntis
===============
.. image:: https://travis-ci.org/untitaker/python-webuntis.png?branch=master
:target: https://travis-ci.org/untitaker/python-webuntis
.. image:: https://coveralls.io/repos/untitaker/python-webuntis/badge.png
:target: https://coveralls.io/r/untitaker/python-webuntis
Bindings for WebUntis API
=========================
::
import webuntis
s = webuntis.Session(
username='api',
password='api',
server='webuntis.grupet.at:8080',
school='demo_inf'
).login()
for klasse in s.klassen():
print(klasse.name)
`read more... <http://python-webuntis.readthedocs.org/en/latest/>`_
Installation
============
Latest version (this is the normal way)
+++++++++++++++++++++++++++++++++++++++
::
pip install webuntis
| =====================================
This project is no longer maintained.
=====================================
I moved on and do no longer use it. See `the relevant ticket
<https://github.com/untitaker/python-webuntis/issues/3>`_.
|
|
|
|
|
|
===============
python-webuntis
===============
.. image:: https://travis-ci.org/untitaker/python-webuntis.png?branch=master
:target: https://travis-ci.org/untitaker/python-webuntis
.. image:: https://coveralls.io/repos/untitaker/python-webuntis/badge.png
:target: https://coveralls.io/r/untitaker/python-webuntis
Bindings for WebUntis API
=========================
::
import webuntis
s = webuntis.Session(
username='api',
password='api',
server='webuntis.grupet.at:8080',
school='demo_inf'
).login()
for klasse in s.klassen():
print(klasse.name)
`read more... <http://python-webuntis.readthedocs.org/en/latest/>`_
Installation
============
Latest version (this is the normal way)
+++++++++++++++++++++++++++++++++++++++
::
pip install webuntis
| Add huge warning about maintenance | Add huge warning about maintenance
| reStructuredText | bsd-3-clause | maphy-psd/python-webuntis,untitaker/python-webuntis | restructuredtext | ## Code Before:
===============
python-webuntis
===============
.. image:: https://travis-ci.org/untitaker/python-webuntis.png?branch=master
:target: https://travis-ci.org/untitaker/python-webuntis
.. image:: https://coveralls.io/repos/untitaker/python-webuntis/badge.png
:target: https://coveralls.io/r/untitaker/python-webuntis
Bindings for WebUntis API
=========================
::
import webuntis
s = webuntis.Session(
username='api',
password='api',
server='webuntis.grupet.at:8080',
school='demo_inf'
).login()
for klasse in s.klassen():
print(klasse.name)
`read more... <http://python-webuntis.readthedocs.org/en/latest/>`_
Installation
============
Latest version (this is the normal way)
+++++++++++++++++++++++++++++++++++++++
::
pip install webuntis
## Instruction:
Add huge warning about maintenance
## Code After:
=====================================
This project is no longer maintained.
=====================================
I moved on and do no longer use it. See `the relevant ticket
<https://github.com/untitaker/python-webuntis/issues/3>`_.
|
|
|
|
|
|
===============
python-webuntis
===============
.. image:: https://travis-ci.org/untitaker/python-webuntis.png?branch=master
:target: https://travis-ci.org/untitaker/python-webuntis
.. image:: https://coveralls.io/repos/untitaker/python-webuntis/badge.png
:target: https://coveralls.io/r/untitaker/python-webuntis
Bindings for WebUntis API
=========================
::
import webuntis
s = webuntis.Session(
username='api',
password='api',
server='webuntis.grupet.at:8080',
school='demo_inf'
).login()
for klasse in s.klassen():
print(klasse.name)
`read more... <http://python-webuntis.readthedocs.org/en/latest/>`_
Installation
============
Latest version (this is the normal way)
+++++++++++++++++++++++++++++++++++++++
::
pip install webuntis
|
1e8f9a95badc1e2b558bae7570ef9bc23f26a0df | pyhaystack/info.py | pyhaystack/info.py |
__author__ = 'Christian Tremblay, @sjlongland, @sudo-Whateverman, Igor'
__author_email__ = '[email protected]'
__version__ = '0.71.1.8.2'
__license__ = 'LGPL'
|
__author__ = 'Christian Tremblay, Stuart J. Longland, @sudo-Whateverman, Igor'
__author_email__ = '[email protected]'
__version__ = '0.72'
__license__ = 'LGPL'
| Modify version to 0.72 to mark change | Modify version to 0.72 to mark change
Signed-off-by: Christian Tremblay <[email protected]>
| Python | apache-2.0 | ChristianTremblay/pyhaystack,vrtsystems/pyhaystack,ChristianTremblay/pyhaystack | python | ## Code Before:
__author__ = 'Christian Tremblay, @sjlongland, @sudo-Whateverman, Igor'
__author_email__ = '[email protected]'
__version__ = '0.71.1.8.2'
__license__ = 'LGPL'
## Instruction:
Modify version to 0.72 to mark change
Signed-off-by: Christian Tremblay <[email protected]>
## Code After:
__author__ = 'Christian Tremblay, Stuart J. Longland, @sudo-Whateverman, Igor'
__author_email__ = '[email protected]'
__version__ = '0.72'
__license__ = 'LGPL'
|
25f63f33d03b5c9141557b17cac0c442c19bbcec | WebSearch/src/main/res/layout/import_layout.xml | WebSearch/src/main/res/layout/import_layout.xml | <?xml version="1.0" encoding="utf-8"?>
<RelativeLayout
xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent">
<ProgressBar
android:id="@id/detail_progressBar"
android:visibility="gone"
android:layout_centerInParent="true"
android:layout_width="wrap_content" android:layout_height="wrap_content" />
<EditText
android:id="@id/detail_fieldImportFromUrl"
android:hint="@string/hint_import_url"
android:inputType="text|textUri"
android:layout_marginTop="@dimen/field_vertical_margin"
android:layout_width="match_parent" android:layout_height="wrap_content" />
</RelativeLayout> | <?xml version="1.0" encoding="utf-8"?>
<RelativeLayout
xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent">
<EditText
android:id="@id/detail_fieldImportFromUrl"
android:hint="@string/hint_import_url"
android:inputType="text|textUri"
android:layout_marginTop="@dimen/field_vertical_margin"
android:layout_width="match_parent" android:layout_height="wrap_content" />
<ProgressBar
android:id="@id/detail_progressBar"
android:visibility="invisible"
android:indeterminateOnly="true"
style="@android:style/Widget.ProgressBar.Horizontal"
android:layout_below="@id/detail_fieldImportFromUrl"
android:layout_width="match_parent" android:layout_height="wrap_content" />
</RelativeLayout> | Change progressbar style in import layout | Change progressbar style in import layout
| XML | apache-2.0 | balesz/android-WebSearch,balesz/android-WebSearch | xml | ## Code Before:
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout
xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent">
<ProgressBar
android:id="@id/detail_progressBar"
android:visibility="gone"
android:layout_centerInParent="true"
android:layout_width="wrap_content" android:layout_height="wrap_content" />
<EditText
android:id="@id/detail_fieldImportFromUrl"
android:hint="@string/hint_import_url"
android:inputType="text|textUri"
android:layout_marginTop="@dimen/field_vertical_margin"
android:layout_width="match_parent" android:layout_height="wrap_content" />
</RelativeLayout>
## Instruction:
Change progressbar style in import layout
## Code After:
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout
xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent">
<EditText
android:id="@id/detail_fieldImportFromUrl"
android:hint="@string/hint_import_url"
android:inputType="text|textUri"
android:layout_marginTop="@dimen/field_vertical_margin"
android:layout_width="match_parent" android:layout_height="wrap_content" />
<ProgressBar
android:id="@id/detail_progressBar"
android:visibility="invisible"
android:indeterminateOnly="true"
style="@android:style/Widget.ProgressBar.Horizontal"
android:layout_below="@id/detail_fieldImportFromUrl"
android:layout_width="match_parent" android:layout_height="wrap_content" />
</RelativeLayout> |
3a845a9a45d7b6d9c31664c50ee9b27cd5cbd670 | mod/pythonauto/sendcode.php | mod/pythonauto/sendcode.php | <?php
require_once "../../config.php";
require_once $CFG->dirroot."/pdo.php";
// Sanity checks
$LTI = requireData(array('user_id', 'link_id', 'role','context_id'));
$instructor = isInstructor($LTI);
$user_id = $LTI['user_id'];
$grade = 1.0;
$code = $_POST['code'];
$json = json_encode(array("code" => $code));
$retval = updateGradeJSON($pdo, $json);
$retval = Array("status" => "success");
echo json_encode($retval);
| <?php
require_once "../../config.php";
require_once $CFG->dirroot."/pdo.php";
// Sanity checks
$LTI = requireData(array('user_id', 'link_id', 'role','context_id'));
$instructor = isInstructor($LTI);
$user_id = $LTI['user_id'];
$grade = 1.0;
if ( ! isset($_POST['code']) ) {
echo(json_encode(array("error" => "Missing code")));
return;
}
$code = $_POST['code'];
$json = json_encode(array("code" => $code));
$retval = updateGradeJSON($pdo, $json);
$retval = Array("status" => "success");
echo json_encode($retval);
| Check for missing code variable. | Check for missing code variable.
| PHP | apache-2.0 | reysantos/tsugi,komizutama/tsugi,tsugiproject/tsugi,tsugiproject/tsugi,komizutama/tsugi,reysantos/tsugi,reysantos/tsugi,cvanlent/tsugi,JANQLIANGTSAI/tsugi,tsugiproject/tsugi,cvanlent/tsugi,komizutama/tsugi,tsugiproject/tsugi,cvanlent/tsugi,JANQLIANGTSAI/tsugi,JANQLIANGTSAI/tsugi,csev/tsugi | php | ## Code Before:
<?php
require_once "../../config.php";
require_once $CFG->dirroot."/pdo.php";
// Sanity checks
$LTI = requireData(array('user_id', 'link_id', 'role','context_id'));
$instructor = isInstructor($LTI);
$user_id = $LTI['user_id'];
$grade = 1.0;
$code = $_POST['code'];
$json = json_encode(array("code" => $code));
$retval = updateGradeJSON($pdo, $json);
$retval = Array("status" => "success");
echo json_encode($retval);
## Instruction:
Check for missing code variable.
## Code After:
<?php
require_once "../../config.php";
require_once $CFG->dirroot."/pdo.php";
// Sanity checks
$LTI = requireData(array('user_id', 'link_id', 'role','context_id'));
$instructor = isInstructor($LTI);
$user_id = $LTI['user_id'];
$grade = 1.0;
if ( ! isset($_POST['code']) ) {
echo(json_encode(array("error" => "Missing code")));
return;
}
$code = $_POST['code'];
$json = json_encode(array("code" => $code));
$retval = updateGradeJSON($pdo, $json);
$retval = Array("status" => "success");
echo json_encode($retval);
|
f8054636ef5cf071e855a8bc6897dd7f4e5f8cfa | JDBC/SelectValues.java | JDBC/SelectValues.java | import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
public class SelectValues {
public static void main(String[] args) throws SQLException{
Connection con=DBInfo2.getConnection();
String query="SELECT * FROM login";
PreparedStatement stmt=con.prepareStatement(query);
ResultSet rs=stmt.executeQuery();
// getting meta data
System.out.println("Getting Meta Data\n---------------------------------------");
ResultSetMetaData rsmd=rs.getMetaData();
int count=rsmd.getColumnCount();
System.out.println("Column count : "+count);
for(int i=1;i<=count;i++){
}
}
}
| import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
public class SelectValues {
public static void main(String[] args) throws SQLException{
Connection con=DBInfo2.getConnection();
String query="SELECT * FROM login";
PreparedStatement stmt=con.prepareStatement(query);
ResultSet rs=stmt.executeQuery();
// getting meta data
System.out.println("Getting Meta Data\n---------------------------------------");
ResultSetMetaData rsmd=rs.getMetaData();
int count=rsmd.getColumnCount();
System.out.println("Column count : "+count);
for(int i=1;i<=count;i++){
System.out.println(rsmd.getColumnName(i)+":::"+rsmd.getColumnDisplaySize(i)+":::"+rsmd.getColumnTypeName(i));
}
System.out.println("---------------------------------------\nGetting Records Data\n---------------------------------------");
// getting records data
while(rs.next()){
System.out.println(rs.getInt(1)+":::"+rs.getString(2)+":::"+rs.getString(3)+":::"+rs.getString(4));
}
rs.close();
con.close();
stmt.close();
}
} | Select querry implementation with meta data | Select querry implementation with meta data
| Java | mit | AlphaBAT69/Java-Programs | java | ## Code Before:
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
public class SelectValues {
public static void main(String[] args) throws SQLException{
Connection con=DBInfo2.getConnection();
String query="SELECT * FROM login";
PreparedStatement stmt=con.prepareStatement(query);
ResultSet rs=stmt.executeQuery();
// getting meta data
System.out.println("Getting Meta Data\n---------------------------------------");
ResultSetMetaData rsmd=rs.getMetaData();
int count=rsmd.getColumnCount();
System.out.println("Column count : "+count);
for(int i=1;i<=count;i++){
}
}
}
## Instruction:
Select querry implementation with meta data
## Code After:
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
public class SelectValues {
public static void main(String[] args) throws SQLException{
Connection con=DBInfo2.getConnection();
String query="SELECT * FROM login";
PreparedStatement stmt=con.prepareStatement(query);
ResultSet rs=stmt.executeQuery();
// getting meta data
System.out.println("Getting Meta Data\n---------------------------------------");
ResultSetMetaData rsmd=rs.getMetaData();
int count=rsmd.getColumnCount();
System.out.println("Column count : "+count);
for(int i=1;i<=count;i++){
System.out.println(rsmd.getColumnName(i)+":::"+rsmd.getColumnDisplaySize(i)+":::"+rsmd.getColumnTypeName(i));
}
System.out.println("---------------------------------------\nGetting Records Data\n---------------------------------------");
// getting records data
while(rs.next()){
System.out.println(rs.getInt(1)+":::"+rs.getString(2)+":::"+rs.getString(3)+":::"+rs.getString(4));
}
rs.close();
con.close();
stmt.close();
}
} |
201fc9a9d6f81ed3ae39f9d0512bb2221f241d8d | lib/active_job/stats/callbacks.rb | lib/active_job/stats/callbacks.rb | module ActiveJob
module Stats
module Callbacks
extend ActiveSupport::Concern
included do
before_enqueue :after_enqueue_stats, if: :monitored
after_enqueue :after_enqueue_stats, if: :monitored
before_perform :before_perform_stats, if: :monitored
after_perform :after_perform_stats, if: :monitored
around_perform :benchmark_stats, if: :benchmarked
private
def benchmark_stats
require 'active_support/core_ext/benchmark'
benchmark = Benchmark.ms { yield }
ActiveJob::Stats.reporter.timing("#{self.class.queue_name}.processed", benchmark)
ActiveJob::Stats.reporter.timing("#{self.class}.processed", benchmark)
end
def before_perform_stats
ActiveJob::Stats.reporter.increment("#{self.class.queue_name}.started")
ActiveJob::Stats.reporter.increment("#{self.class}.started")
ActiveJob::Stats.reporter.increment('total.started')
end
def after_enqueue_stats
ActiveJob::Stats.reporter.increment("#{self.class.queue_name}.enqueued")
ActiveJob::Stats.reporter.increment("#{self.class}.enqueued")
ActiveJob::Stats.reporter.increment('total.enqueued')
end
def after_perform_stats
ActiveJob::Stats.reporter.increment("#{self.class.queue_name}.finished")
ActiveJob::Stats.reporter.increment("#{self.class}.finished")
ActiveJob::Stats.reporter.increment('total.finished')
end
delegate :benchmarked, :monitored, to: :class
end
end
end
end
| module ActiveJob
module Stats
module Callbacks
extend ActiveSupport::Concern
included do
before_enqueue :after_enqueue_stats, if: :monitored
after_enqueue :after_enqueue_stats, if: :monitored
before_perform :before_perform_stats, if: :monitored
after_perform :after_perform_stats, if: :monitored
around_perform :benchmark_stats, if: :benchmarked
private
def benchmark_stats
require 'active_support/core_ext/benchmark'
benchmark = Benchmark.ms { yield }
ActiveJob::Stats.reporter.timing("#{queue_name}.processed", benchmark)
ActiveJob::Stats.reporter.timing("#{self.class}.processed", benchmark)
end
def before_perform_stats
ActiveJob::Stats.reporter.increment("#{queue_name}.started")
ActiveJob::Stats.reporter.increment("#{self.class}.started")
ActiveJob::Stats.reporter.increment('total.started')
end
def after_enqueue_stats
ActiveJob::Stats.reporter.increment("#{queue_name}.enqueued")
ActiveJob::Stats.reporter.increment("#{self.class}.enqueued")
ActiveJob::Stats.reporter.increment('total.enqueued')
end
def after_perform_stats
ActiveJob::Stats.reporter.increment("#{queue_name}.finished")
ActiveJob::Stats.reporter.increment("#{self.class}.finished")
ActiveJob::Stats.reporter.increment('total.finished')
end
delegate :benchmarked, :monitored, to: :class
end
end
end
end
| Use job instance's queue_name instead of class's | fix:dev: Use job instance's queue_name instead of class's
Two good reasons:
- The job instance can be enqueued with a different queue_name than its
class normally has.
- The job class can pass a Proc to queue_name to dynamically determine
the name. The queue_name is still set on the job instance.
| Ruby | mit | seuros/activejob-stats | ruby | ## Code Before:
module ActiveJob
module Stats
module Callbacks
extend ActiveSupport::Concern
included do
before_enqueue :after_enqueue_stats, if: :monitored
after_enqueue :after_enqueue_stats, if: :monitored
before_perform :before_perform_stats, if: :monitored
after_perform :after_perform_stats, if: :monitored
around_perform :benchmark_stats, if: :benchmarked
private
def benchmark_stats
require 'active_support/core_ext/benchmark'
benchmark = Benchmark.ms { yield }
ActiveJob::Stats.reporter.timing("#{self.class.queue_name}.processed", benchmark)
ActiveJob::Stats.reporter.timing("#{self.class}.processed", benchmark)
end
def before_perform_stats
ActiveJob::Stats.reporter.increment("#{self.class.queue_name}.started")
ActiveJob::Stats.reporter.increment("#{self.class}.started")
ActiveJob::Stats.reporter.increment('total.started')
end
def after_enqueue_stats
ActiveJob::Stats.reporter.increment("#{self.class.queue_name}.enqueued")
ActiveJob::Stats.reporter.increment("#{self.class}.enqueued")
ActiveJob::Stats.reporter.increment('total.enqueued')
end
def after_perform_stats
ActiveJob::Stats.reporter.increment("#{self.class.queue_name}.finished")
ActiveJob::Stats.reporter.increment("#{self.class}.finished")
ActiveJob::Stats.reporter.increment('total.finished')
end
delegate :benchmarked, :monitored, to: :class
end
end
end
end
## Instruction:
fix:dev: Use job instance's queue_name instead of class's
Two good reasons:
- The job instance can be enqueued with a different queue_name than its
class normally has.
- The job class can pass a Proc to queue_name to dynamically determine
the name. The queue_name is still set on the job instance.
## Code After:
module ActiveJob
module Stats
module Callbacks
extend ActiveSupport::Concern
included do
before_enqueue :after_enqueue_stats, if: :monitored
after_enqueue :after_enqueue_stats, if: :monitored
before_perform :before_perform_stats, if: :monitored
after_perform :after_perform_stats, if: :monitored
around_perform :benchmark_stats, if: :benchmarked
private
def benchmark_stats
require 'active_support/core_ext/benchmark'
benchmark = Benchmark.ms { yield }
ActiveJob::Stats.reporter.timing("#{queue_name}.processed", benchmark)
ActiveJob::Stats.reporter.timing("#{self.class}.processed", benchmark)
end
def before_perform_stats
ActiveJob::Stats.reporter.increment("#{queue_name}.started")
ActiveJob::Stats.reporter.increment("#{self.class}.started")
ActiveJob::Stats.reporter.increment('total.started')
end
def after_enqueue_stats
ActiveJob::Stats.reporter.increment("#{queue_name}.enqueued")
ActiveJob::Stats.reporter.increment("#{self.class}.enqueued")
ActiveJob::Stats.reporter.increment('total.enqueued')
end
def after_perform_stats
ActiveJob::Stats.reporter.increment("#{queue_name}.finished")
ActiveJob::Stats.reporter.increment("#{self.class}.finished")
ActiveJob::Stats.reporter.increment('total.finished')
end
delegate :benchmarked, :monitored, to: :class
end
end
end
end
|
a5ab6b608f87bb22705f7ba99f74082f5c38b1a1 | tests/Transaction/TransactionInputStateTest.php | tests/Transaction/TransactionInputStateTest.php | <?php
namespace BitWasp\Bitcoin\Tests\Transaction;
class TransactionInputStateTest {
} | <?php
namespace BitWasp\Bitcoin\Tests\Transaction;
use BitWasp\Bitcoin\Bitcoin;
use BitWasp\Bitcoin\Crypto\EcAdapter\EcAdapterFactory;
use BitWasp\Bitcoin\Key\PrivateKeyFactory;
use BitWasp\Bitcoin\Script\ScriptFactory;
use BitWasp\Bitcoin\Tests\AbstractTestCase;
use BitWasp\Bitcoin\Transaction\TransactionBuilderInputState;
class TransactionInputStateTest extends AbstractTestCase
{
public function getRedeemScript()
{
$script = ScriptFactory::multisig(2, [
PrivateKeyFactory::create()->getPublicKey(),
PrivateKeyFactory::create()->getPublicKey(),
PrivateKeyFactory::create()->getPublicKey()
]);
return $script;
}
public function getOutputScript()
{
$script = ScriptFactory::scriptPubKey()
->payToAddress(PrivateKeyFactory::create()->getAddress());
return $script;
}
public function testCreateState()
{
$math = Bitcoin::getMath();
$G = Bitcoin::getGenerator();
$ecAdapter = EcAdapterFactory::getAdapter($math, $G);
$rs = $this->getRedeemScript();
$outputScript = $rs->getOutputScript();
$state = new TransactionBuilderInputState($ecAdapter, $outputScript, $rs);
$this->assertSame($outputScript, $state->getPrevOutScript());
$this->assertSame($rs, $state->getRedeemScript());
}
} | Add initial test of TransactionBuilderInputState | Add initial test of TransactionBuilderInputState
| PHP | unlicense | Bit-Wasp/bitcoin-php,Bit-Wasp/bitcoin-php,afk11/bitcoin-php,blocktrail/bitcoin-php,afk11/bitcoin-php | php | ## Code Before:
<?php
namespace BitWasp\Bitcoin\Tests\Transaction;
class TransactionInputStateTest {
}
## Instruction:
Add initial test of TransactionBuilderInputState
## Code After:
<?php
namespace BitWasp\Bitcoin\Tests\Transaction;
use BitWasp\Bitcoin\Bitcoin;
use BitWasp\Bitcoin\Crypto\EcAdapter\EcAdapterFactory;
use BitWasp\Bitcoin\Key\PrivateKeyFactory;
use BitWasp\Bitcoin\Script\ScriptFactory;
use BitWasp\Bitcoin\Tests\AbstractTestCase;
use BitWasp\Bitcoin\Transaction\TransactionBuilderInputState;
class TransactionInputStateTest extends AbstractTestCase
{
public function getRedeemScript()
{
$script = ScriptFactory::multisig(2, [
PrivateKeyFactory::create()->getPublicKey(),
PrivateKeyFactory::create()->getPublicKey(),
PrivateKeyFactory::create()->getPublicKey()
]);
return $script;
}
public function getOutputScript()
{
$script = ScriptFactory::scriptPubKey()
->payToAddress(PrivateKeyFactory::create()->getAddress());
return $script;
}
public function testCreateState()
{
$math = Bitcoin::getMath();
$G = Bitcoin::getGenerator();
$ecAdapter = EcAdapterFactory::getAdapter($math, $G);
$rs = $this->getRedeemScript();
$outputScript = $rs->getOutputScript();
$state = new TransactionBuilderInputState($ecAdapter, $outputScript, $rs);
$this->assertSame($outputScript, $state->getPrevOutScript());
$this->assertSame($rs, $state->getRedeemScript());
}
} |
1f0050d57fdd72992ee9e6cc87e6fdf76b9f3c93 | Build-Images.ps1 | Build-Images.ps1 | Param(
[Parameter(Mandatory=$true)]
[string] $Repo,
[Parameter()]
[string] $Version = '1.0.0-dev'
)
$Components = @(
'api',
'sql-executor',
'provisioning',
'ui'
)
$docker = Get-Command docker
Write-Host 'Building images...'
ForEach ($Component in $Components) {
& $docker build -t "${Repo}/${Component}:${Version}" -f Dockerfile.${Component} .
}
Write-Host 'Pushing images...'
ForEach ($Component in $Components) {
& $docker push "${Repo}/${Component}:${Version}"
}
| Param(
[Parameter(Mandatory=$true)]
[string] $Repo,
[Parameter()]
[string] $Version = '1.0.0-dev',
[Parameter()]
[switch] $Deploy
)
$Components = @(
'api',
'sql-executor',
'provisioning',
'ui'
)
$docker = Get-Command docker
Write-Host 'Building images...'
ForEach ($Component in $Components) {
& $docker build -t "${Repo}/${Component}:${Version}" -f Dockerfile.${Component} .
}
Write-Host 'Pushing images...'
ForEach ($Component in $Components) {
& $docker push "${Repo}/${Component}:${Version}"
}
If ($Deploy) {
$kubectl = Get-Command kubectl
$manifestDirectory = Join-Path $PSScriptRoot 'deploy\k8s'
Write-Host 'Deploying application...'
ForEach ($Component in $Components) {
$manifestFile = Join-Path $manifestDirectory "$Component.yml"
& $kubectl delete -f $manifestFile
& $kubectl apply -f $manifestFile
}
}
| Add deployment support to image-build script. | Add deployment support to image-build script.
| PowerShell | mit | DimensionDataResearch/daas-demo,DimensionDataResearch/daas-demo,DimensionDataResearch/daas-demo,DimensionDataResearch/daas-demo,DimensionDataResearch/daas-demo,DimensionDataResearch/daas-demo,DimensionDataResearch/daas-demo | powershell | ## Code Before:
Param(
[Parameter(Mandatory=$true)]
[string] $Repo,
[Parameter()]
[string] $Version = '1.0.0-dev'
)
$Components = @(
'api',
'sql-executor',
'provisioning',
'ui'
)
$docker = Get-Command docker
Write-Host 'Building images...'
ForEach ($Component in $Components) {
& $docker build -t "${Repo}/${Component}:${Version}" -f Dockerfile.${Component} .
}
Write-Host 'Pushing images...'
ForEach ($Component in $Components) {
& $docker push "${Repo}/${Component}:${Version}"
}
## Instruction:
Add deployment support to image-build script.
## Code After:
Param(
[Parameter(Mandatory=$true)]
[string] $Repo,
[Parameter()]
[string] $Version = '1.0.0-dev',
[Parameter()]
[switch] $Deploy
)
$Components = @(
'api',
'sql-executor',
'provisioning',
'ui'
)
$docker = Get-Command docker
Write-Host 'Building images...'
ForEach ($Component in $Components) {
& $docker build -t "${Repo}/${Component}:${Version}" -f Dockerfile.${Component} .
}
Write-Host 'Pushing images...'
ForEach ($Component in $Components) {
& $docker push "${Repo}/${Component}:${Version}"
}
If ($Deploy) {
$kubectl = Get-Command kubectl
$manifestDirectory = Join-Path $PSScriptRoot 'deploy\k8s'
Write-Host 'Deploying application...'
ForEach ($Component in $Components) {
$manifestFile = Join-Path $manifestDirectory "$Component.yml"
& $kubectl delete -f $manifestFile
& $kubectl apply -f $manifestFile
}
}
|
576af3b35d80de6b07c93bb4c5259431687da83c | examples/settings.json | examples/settings.json | {
"type": "settings",
"deviceTime": "2014-02-17T12:12:12",
"activeBasalSchedule": "standard",
"basalSchedules": {
"standard": [
{ "rate": 0.8, "start": 0 },
{ "rate": 0.75, "start": 3600000 },
...
],
"pattern a": [
{ "rate": 0.95, "start": 0 },
{ "rate": 0.9, "start": 3600000 },
...
},
"carbRatio" : [
{ "amount": 12, "units": "grams", "start": 0 },
{ "amount": 10, "units": "grams", "start": 21600000 },
...
],
"insulinSensitivity" : [
{ "amount": 65, "start": 0 },
{ "amount": 45, "start": 18000000 },
...
],
"bgTarget": [
{ "low": 100, "high": 120, "start": 0 },
{ "low": 90, "high": 110, "start": 18000000 },
...
]
}
| {
"type": "settings",
"deviceTime": "2014-02-17T12:12:12",
"activeBasalSchedule": "standard",
"basalSchedules": {
"standard": [
{ "rate": 0.8, "start": 0 },
{ "rate": 0.75, "start": 3600000 },
...
],
"pattern a": [
{ "rate": 0.95, "start": 0 },
{ "rate": 0.9, "start": 3600000 },
...
]
},
"carbRatio" : [
{ "amount": 12, "units": "grams", "start": 0 },
{ "amount": 10, "units": "grams", "start": 21600000 },
...
],
"insulinSensitivity" : [
{ "amount": 65, "units": "mg dL", "start": 0 },
{ "amount": 45, "units": "mg dL", "start": 18000000 },
...
],
"bgTarget": [
{ "low": 100, "high": 120, "units": "mg dL", "start": 0 },
{ "low": 90, "high": 110, "units": "mg dL", "start": 18000000 },
...
]
}
| Add units where units are provided | Add units where units are provided
| JSON | bsd-2-clause | tidepool-org/deprecated-data-model,tidepool-org/deprecated-data-model,tidepool-org/deprecated-data-model,tidepool-org/deprecated-data-model | json | ## Code Before:
{
"type": "settings",
"deviceTime": "2014-02-17T12:12:12",
"activeBasalSchedule": "standard",
"basalSchedules": {
"standard": [
{ "rate": 0.8, "start": 0 },
{ "rate": 0.75, "start": 3600000 },
...
],
"pattern a": [
{ "rate": 0.95, "start": 0 },
{ "rate": 0.9, "start": 3600000 },
...
},
"carbRatio" : [
{ "amount": 12, "units": "grams", "start": 0 },
{ "amount": 10, "units": "grams", "start": 21600000 },
...
],
"insulinSensitivity" : [
{ "amount": 65, "start": 0 },
{ "amount": 45, "start": 18000000 },
...
],
"bgTarget": [
{ "low": 100, "high": 120, "start": 0 },
{ "low": 90, "high": 110, "start": 18000000 },
...
]
}
## Instruction:
Add units where units are provided
## Code After:
{
"type": "settings",
"deviceTime": "2014-02-17T12:12:12",
"activeBasalSchedule": "standard",
"basalSchedules": {
"standard": [
{ "rate": 0.8, "start": 0 },
{ "rate": 0.75, "start": 3600000 },
...
],
"pattern a": [
{ "rate": 0.95, "start": 0 },
{ "rate": 0.9, "start": 3600000 },
...
]
},
"carbRatio" : [
{ "amount": 12, "units": "grams", "start": 0 },
{ "amount": 10, "units": "grams", "start": 21600000 },
...
],
"insulinSensitivity" : [
{ "amount": 65, "units": "mg dL", "start": 0 },
{ "amount": 45, "units": "mg dL", "start": 18000000 },
...
],
"bgTarget": [
{ "low": 100, "high": 120, "units": "mg dL", "start": 0 },
{ "low": 90, "high": 110, "units": "mg dL", "start": 18000000 },
...
]
}
|
e809e87fa3e40cf0f147c469759b238135a25e5e | README.md | README.md |
TorrentUploader is a simple utility that allows you to upload **.torrent** files or
**magnet** links to a headless uTorrent server from a remote client with a single click
without logging into the WebUI Management console. It does this by associating itself
.torrent files and magnet links and then using the uTorrent API to start the downloading
of the torrent contents. |
TorrentUploader is a simple utility that allows you to upload **.torrent** files or
**magnet** links to a headless uTorrent server from a remote client with a single click
without logging into the WebUI Management console. It does this by associating itself
with .torrent files and magnet links on the client machine and then using the uTorrent
API to start the download of the torrent contents on the server machine. | Fix up readme a little bit | Fix up readme a little bit
| Markdown | bsd-2-clause | thegrandpoobah/TorrentUploader | markdown | ## Code Before:
TorrentUploader is a simple utility that allows you to upload **.torrent** files or
**magnet** links to a headless uTorrent server from a remote client with a single click
without logging into the WebUI Management console. It does this by associating itself
.torrent files and magnet links and then using the uTorrent API to start the downloading
of the torrent contents.
## Instruction:
Fix up readme a little bit
## Code After:
TorrentUploader is a simple utility that allows you to upload **.torrent** files or
**magnet** links to a headless uTorrent server from a remote client with a single click
without logging into the WebUI Management console. It does this by associating itself
with .torrent files and magnet links on the client machine and then using the uTorrent
API to start the download of the torrent contents on the server machine. |
15189b66b3cac5aea140af8e54468c1652252113 | README.md | README.md | shadowcraft-vm
==============
Vagrant configuration for running Shadowcraft-UI and Shadowcraft-Engine
This configuration is originally based on https://github.com/mulderp/chef-rails-stack.
| shadowcraft-vm
==============
Vagrant configuration for running Shadowcraft-UI and Shadowcraft-Engine
This configuration is originally based on https://github.com/mulderp/chef-rails-stack.
## Installation
1. Install Vagrant from http://vagrantup.com
2. Install Virtualbox from http://virtualbox.org
3. Clone this repo
4. Open a command line (cmd.exe, Terminal.app, xterm, etc) and enter the directory of the repo
5. Run the command `vagrant up`. This will download, install, boot, and provision the VM.
6. Run the command `vagrant ssh`. This will ssh into the VM that is now running.
7. Start the ShadowCraft UI backend running by running the following commands:
cd /var/www/shadowcraft-ui/backend
twistd -ny server-5.4.toc
## Runtime and provisioning edits
The environment can be modified to use other versions of the shadowcraft UI and backend as needed during the provisioning by modifying the shadowcraft-setup.sh file. If this file is changed after the VM has already been provisioned, you may recreate the VM by running `vagrant destroy` followed by `vagrant up`. This will completely rebuild the VM, so you'll need to restart the backend again.
The version of ruby/rails/passenger/nginx/etc can be changed by modifying the node.json file. It currently defaults to the following versions:
ruby: 1.8.7-p374
rails: 3.2.19
nginx: 1.2.5
passenger: 3.0.18
The same destroy/up cycle needs to happen if you change the node.json file as well.
## Running Shadowcraft from the VM
Once the VM is up and configured, you can get to the Shadowcraft UI by opening a web browser on your local machine and going to http://localhost:8080.
| Update installation bits in the readme | Update installation bits in the readme
| Markdown | mit | timwoj/shadowcraft-vm | markdown | ## Code Before:
shadowcraft-vm
==============
Vagrant configuration for running Shadowcraft-UI and Shadowcraft-Engine
This configuration is originally based on https://github.com/mulderp/chef-rails-stack.
## Instruction:
Update installation bits in the readme
## Code After:
shadowcraft-vm
==============
Vagrant configuration for running Shadowcraft-UI and Shadowcraft-Engine
This configuration is originally based on https://github.com/mulderp/chef-rails-stack.
## Installation
1. Install Vagrant from http://vagrantup.com
2. Install Virtualbox from http://virtualbox.org
3. Clone this repo
4. Open a command line (cmd.exe, Terminal.app, xterm, etc) and enter the directory of the repo
5. Run the command `vagrant up`. This will download, install, boot, and provision the VM.
6. Run the command `vagrant ssh`. This will ssh into the VM that is now running.
7. Start the ShadowCraft UI backend running by running the following commands:
cd /var/www/shadowcraft-ui/backend
twistd -ny server-5.4.toc
## Runtime and provisioning edits
The environment can be modified to use other versions of the shadowcraft UI and backend as needed during the provisioning by modifying the shadowcraft-setup.sh file. If this file is changed after the VM has already been provisioned, you may recreate the VM by running `vagrant destroy` followed by `vagrant up`. This will completely rebuild the VM, so you'll need to restart the backend again.
The version of ruby/rails/passenger/nginx/etc can be changed by modifying the node.json file. It currently defaults to the following versions:
ruby: 1.8.7-p374
rails: 3.2.19
nginx: 1.2.5
passenger: 3.0.18
The same destroy/up cycle needs to happen if you change the node.json file as well.
## Running Shadowcraft from the VM
Once the VM is up and configured, you can get to the Shadowcraft UI by opening a web browser on your local machine and going to http://localhost:8080.
|
bf461b7fb3f65ae1683f9a0f5beaf4da453cad3d | grunt/config/watch.coffee | grunt/config/watch.coffee | module.exports = (grunt) ->
return {
express:
files: [
'Gruntfile.coffee'
'server-src/*.coffee'
'config/*.yml'
]
options:
spawn: false
tasks: ['copy:app', 'coffee:server', 'express:dev']
client:
files: [
'<%= src %>/*.coffee'
'<%= src %>/views/*.coffee'
'<%= src %>/util/*.coffee'
]
tasks: ['copy:app', 'publish']
coffee2css:
files: [
'grunt/tasks/color2css.coffee'
'<%= src %>/color.coffee'
]
tasks: ['coffee2css', 'publish']
less:
files: ['<%= styles %>/**/*.less']
tasks: ['less', 'publish']
i18n:
files: ['<%= locales %>/*.yaml']
tasks: ['i18next-yaml', 'publish']
jade:
files: ['<%= views %>/**/*.jade']
tasks: ['jade', 'publish']
livereload:
options:
livereload: true
files: [
'<%= watch.express.files %>'
'<%= watch.client.files %>'
'<%= watch.coffee2css.files %>'
#'<%= watch.less.files %>'
'static/css/*.css'
'<%= watch.i18n.files %>'
'<%= watch.jade.files %>'
]
}
| module.exports = (grunt) ->
return {
express:
files: [
'Gruntfile.coffee'
'server-src/*.coffee'
'config/*.yml'
]
options:
spawn: false
tasks: ['copy:app', 'coffee:server', 'express:dev']
client:
files: [
'<%= src %>/*.coffee'
'<%= src %>/views/*.coffee'
'<%= src %>/util/*.coffee'
]
tasks: ['copy:app', 'publish']
coffee2css:
files: [
'grunt/tasks/color2css.coffee'
'<%= src %>/color.coffee'
]
tasks: ['coffee2css', 'publish']
less:
files: ['<%= styles %>/**/*.less']
tasks: ['less', 'publish']
i18n:
files: ['<%= locales %>/*.yaml']
tasks: ['i18next-yaml', 'publish']
jade:
files: ['<%= views %>/**/*.jade']
tasks: ['jade', 'publish']
livereload:
options:
livereload: true
files: [
'<%= watch.express.files %>'
'<%= watch.client.files %>'
'<%= watch.i18n.files %>'
'<%= watch.jade.files %>'
'<%= static %>/css/*.css'
]
}
| Handle css live reloading correctly. | Handle css live reloading correctly.
| CoffeeScript | agpl-3.0 | City-of-Helsinki/servicemap,vaaralav/servicemap,City-of-Helsinki/servicemap,vaaralav/servicemap,City-of-Helsinki/servicemap,vaaralav/servicemap | coffeescript | ## Code Before:
module.exports = (grunt) ->
return {
express:
files: [
'Gruntfile.coffee'
'server-src/*.coffee'
'config/*.yml'
]
options:
spawn: false
tasks: ['copy:app', 'coffee:server', 'express:dev']
client:
files: [
'<%= src %>/*.coffee'
'<%= src %>/views/*.coffee'
'<%= src %>/util/*.coffee'
]
tasks: ['copy:app', 'publish']
coffee2css:
files: [
'grunt/tasks/color2css.coffee'
'<%= src %>/color.coffee'
]
tasks: ['coffee2css', 'publish']
less:
files: ['<%= styles %>/**/*.less']
tasks: ['less', 'publish']
i18n:
files: ['<%= locales %>/*.yaml']
tasks: ['i18next-yaml', 'publish']
jade:
files: ['<%= views %>/**/*.jade']
tasks: ['jade', 'publish']
livereload:
options:
livereload: true
files: [
'<%= watch.express.files %>'
'<%= watch.client.files %>'
'<%= watch.coffee2css.files %>'
#'<%= watch.less.files %>'
'static/css/*.css'
'<%= watch.i18n.files %>'
'<%= watch.jade.files %>'
]
}
## Instruction:
Handle css live reloading correctly.
## Code After:
module.exports = (grunt) ->
return {
express:
files: [
'Gruntfile.coffee'
'server-src/*.coffee'
'config/*.yml'
]
options:
spawn: false
tasks: ['copy:app', 'coffee:server', 'express:dev']
client:
files: [
'<%= src %>/*.coffee'
'<%= src %>/views/*.coffee'
'<%= src %>/util/*.coffee'
]
tasks: ['copy:app', 'publish']
coffee2css:
files: [
'grunt/tasks/color2css.coffee'
'<%= src %>/color.coffee'
]
tasks: ['coffee2css', 'publish']
less:
files: ['<%= styles %>/**/*.less']
tasks: ['less', 'publish']
i18n:
files: ['<%= locales %>/*.yaml']
tasks: ['i18next-yaml', 'publish']
jade:
files: ['<%= views %>/**/*.jade']
tasks: ['jade', 'publish']
livereload:
options:
livereload: true
files: [
'<%= watch.express.files %>'
'<%= watch.client.files %>'
'<%= watch.i18n.files %>'
'<%= watch.jade.files %>'
'<%= static %>/css/*.css'
]
}
|
4158399f50f5ec8f92e19c1e683c7ccebe32c109 | .github/workflows/sentry_release.yml | .github/workflows/sentry_release.yml | name: sentry release
on:
push:
branches:
- release-*/**
jobs:
build:
name: Build
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v1
- name: Set up Ruby
uses: ruby/setup-ruby@v1
with:
ruby-version: 2.6 # Not needed with a .ruby-version file
- name: Capture sdk name
uses: actions-ecosystem/action-regex-match@v2
id: regex-match
with:
text: ${{ github.ref }}
regex: 'refs\/heads\/release-(sentry-\w+)\/.*'
- name: Set sdk-directory path
run: echo ${{format('sdk-directory={0}', steps.regex-match.outputs.group1)}} >> $GITHUB_ENV
- name: Build gem source
working-directory: ${{env.sdk-directory}}
run: |
bundle install
gem build sentry-ruby-core.gemspec
gem build sentry-ruby.gemspec
- name: Archive Artifacts
uses: actions/upload-artifact@v2
with:
name: ${{ github.sha }}
path: ${{env.sdk-directory}}/*.gem
| name: sentry release
on:
push:
branches:
- release-*/**
jobs:
build:
name: Build
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v1
- name: Set up Ruby
uses: ruby/setup-ruby@v1
with:
ruby-version: 2.6 # Not needed with a .ruby-version file
- name: Capture sdk name
uses: actions-ecosystem/action-regex-match@v2
id: regex-match
with:
text: ${{ github.ref }}
regex: 'refs\/heads\/release-(sentry-\w+)\/.*'
- name: Set sdk-directory path
run: echo ${{format('sdk-directory={0}', steps.regex-match.outputs.group1)}} >> $GITHUB_ENV
- name: Build gem source
working-directory: ${{env.sdk-directory}}
run: make build
- name: Archive Artifacts
uses: actions/upload-artifact@v2
with:
name: ${{ github.sha }}
path: ${{env.sdk-directory}}/*.gem
| Use make build for building SDKs | Use make build for building SDKs
| YAML | apache-2.0 | getsentry/raven-ruby | yaml | ## Code Before:
name: sentry release
on:
push:
branches:
- release-*/**
jobs:
build:
name: Build
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v1
- name: Set up Ruby
uses: ruby/setup-ruby@v1
with:
ruby-version: 2.6 # Not needed with a .ruby-version file
- name: Capture sdk name
uses: actions-ecosystem/action-regex-match@v2
id: regex-match
with:
text: ${{ github.ref }}
regex: 'refs\/heads\/release-(sentry-\w+)\/.*'
- name: Set sdk-directory path
run: echo ${{format('sdk-directory={0}', steps.regex-match.outputs.group1)}} >> $GITHUB_ENV
- name: Build gem source
working-directory: ${{env.sdk-directory}}
run: |
bundle install
gem build sentry-ruby-core.gemspec
gem build sentry-ruby.gemspec
- name: Archive Artifacts
uses: actions/upload-artifact@v2
with:
name: ${{ github.sha }}
path: ${{env.sdk-directory}}/*.gem
## Instruction:
Use make build for building SDKs
## Code After:
name: sentry release
on:
push:
branches:
- release-*/**
jobs:
build:
name: Build
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v1
- name: Set up Ruby
uses: ruby/setup-ruby@v1
with:
ruby-version: 2.6 # Not needed with a .ruby-version file
- name: Capture sdk name
uses: actions-ecosystem/action-regex-match@v2
id: regex-match
with:
text: ${{ github.ref }}
regex: 'refs\/heads\/release-(sentry-\w+)\/.*'
- name: Set sdk-directory path
run: echo ${{format('sdk-directory={0}', steps.regex-match.outputs.group1)}} >> $GITHUB_ENV
- name: Build gem source
working-directory: ${{env.sdk-directory}}
run: make build
- name: Archive Artifacts
uses: actions/upload-artifact@v2
with:
name: ${{ github.sha }}
path: ${{env.sdk-directory}}/*.gem
|
ef8a4696a0b5103f62ac2ae07b559a5f323f29ee | grails-app/services/especies/TaxonService.groovy | grails-app/services/especies/TaxonService.groovy | package especies
import grails.transaction.Transactional
@Transactional
class TaxonService {
List list(params) {
params = params + [max: 50]
return Taxon.list(params)
}
Taxon save(taxon) {
taxon.save()
}
Taxon addGbifDetails(scientificName, sourceId, gbifId, gbifName) {
Taxon taxon
if(sourceId == "#N/A") {
taxon = Taxon.findByScientificNameLike(scientificName)
} else {
taxon = Taxon.findBySourceId(sourceId)
}
if(taxon != null && gbifId != "NULL") {
taxon.gbifId = Integer.parseInt(gbifId)
taxon.gbifName = gbifName
taxon.save()
}
taxon
}
Taxon addSpeciesPlusId(gbifId, speciesPlusId) {
Taxon taxon = Taxon.findByGbifId(gbifId)
if(taxon != null) {
taxon.speciesPlusId = speciesPlusId
taxon.save()
}
taxon
}
}
| package especies
import grails.transaction.Transactional
@Transactional
class TaxonService {
List list(params) {
params = params + [max: 50]
params = params + [sort: "scientificName", order: "asc"]
return Taxon.list(params)
}
Taxon save(taxon) {
taxon.save()
}
Taxon addGbifDetails(scientificName, sourceId, gbifId, gbifName) {
Taxon taxon
if(sourceId == "#N/A") {
taxon = Taxon.findByScientificNameLike(scientificName)
} else {
taxon = Taxon.findBySourceId(sourceId)
}
if(taxon != null && gbifId != "NULL") {
taxon.gbifId = Integer.parseInt(gbifId)
taxon.gbifName = gbifName
taxon.save()
}
taxon
}
Taxon addSpeciesPlusId(gbifId, speciesPlusId) {
Taxon taxon = Taxon.findByGbifId(gbifId)
if(taxon != null) {
taxon.speciesPlusId = speciesPlusId
taxon.save()
}
taxon
}
}
| Sort results by scientific name | Sort results by scientific name
| Groovy | mit | unepwcmc/Taxonify,unepwcmc/Taxonify | groovy | ## Code Before:
package especies
import grails.transaction.Transactional
@Transactional
class TaxonService {
List list(params) {
params = params + [max: 50]
return Taxon.list(params)
}
Taxon save(taxon) {
taxon.save()
}
Taxon addGbifDetails(scientificName, sourceId, gbifId, gbifName) {
Taxon taxon
if(sourceId == "#N/A") {
taxon = Taxon.findByScientificNameLike(scientificName)
} else {
taxon = Taxon.findBySourceId(sourceId)
}
if(taxon != null && gbifId != "NULL") {
taxon.gbifId = Integer.parseInt(gbifId)
taxon.gbifName = gbifName
taxon.save()
}
taxon
}
Taxon addSpeciesPlusId(gbifId, speciesPlusId) {
Taxon taxon = Taxon.findByGbifId(gbifId)
if(taxon != null) {
taxon.speciesPlusId = speciesPlusId
taxon.save()
}
taxon
}
}
## Instruction:
Sort results by scientific name
## Code After:
package especies
import grails.transaction.Transactional
@Transactional
class TaxonService {
List list(params) {
params = params + [max: 50]
params = params + [sort: "scientificName", order: "asc"]
return Taxon.list(params)
}
Taxon save(taxon) {
taxon.save()
}
Taxon addGbifDetails(scientificName, sourceId, gbifId, gbifName) {
Taxon taxon
if(sourceId == "#N/A") {
taxon = Taxon.findByScientificNameLike(scientificName)
} else {
taxon = Taxon.findBySourceId(sourceId)
}
if(taxon != null && gbifId != "NULL") {
taxon.gbifId = Integer.parseInt(gbifId)
taxon.gbifName = gbifName
taxon.save()
}
taxon
}
Taxon addSpeciesPlusId(gbifId, speciesPlusId) {
Taxon taxon = Taxon.findByGbifId(gbifId)
if(taxon != null) {
taxon.speciesPlusId = speciesPlusId
taxon.save()
}
taxon
}
}
|
f262a97eac03be09bc9a2f62e681a1337021f459 | build_registry.sh | build_registry.sh |
SRC="https://github.com/docker/distribution.git"
COMMIT_ID=ece8e132bf6585815fdd00990f6215122c58fb3f
CUR_DIR=`pwd`
INSTALL_DIR="${CUR_DIR}/go.bld"
BIN_DIR="${CUR_DIR}/go.bld/bin"
mkdir -p ${BIN_DIR}
GOPATH_BASE="${INSTALL_DIR}/src/github.com/docker"
mkdir -p ${GOPATH_BASE}
cd ${GOPATH_BASE}
git clone ${SRC}
cd distribution
git checkout -q ${COMMIT_ID}
export GOPATH="${GOPATH_BASE}/distribution/Godeps/_workspace:${INSTALL_DIR}:${GOPATH}"
go build -o ${BIN_DIR}/registry ./cmd/registry
#To use the registry you need to copy the file cmd/registry/config-example.yml as config.yml and run it
#./registry ./config.yml
|
build_type=${1}
BUILD_TYPE=${build_type:-dynamic}
SRC="https://github.com/docker/distribution.git"
COMMIT_ID=ece8e132bf6585815fdd00990f6215122c58fb3f
#Install git
yum install -y git
CUR_DIR=`pwd`
INSTALL_DIR="${CUR_DIR}/go.bld"
BIN_DIR="${CUR_DIR}/go.bld/bin"
mkdir -p ${BIN_DIR}
GOPATH_BASE="${INSTALL_DIR}/src/github.com/docker"
mkdir -p ${GOPATH_BASE}
cd ${GOPATH_BASE}
git clone ${SRC}
cd distribution
git checkout -q ${COMMIT_ID}
export GOPATH="${GOPATH_BASE}/distribution/Godeps/_workspace:${INSTALL_DIR}:${GOPATH}"
if [ "${BUILD_TYPE}" == "static" ]
then
BUILDFLAGS="-static -lnetgo"
else
BUILDFLAGS=""
fi
go build -gccgoflags "${BUILDFLAGS}" -o ${BIN_DIR}/registry ./cmd/registry
#To use the registry you need to copy the file cmd/registry/config-example.yml as config.yml and run it
#./registry ./config.yml
cp ./cmd/registry/config-example.yml ${BIN_DIR}/config.yml
| Add option for both dynamic and static build | Add option for both dynamic and static build
| Shell | apache-2.0 | bpradipt/docker-build | shell | ## Code Before:
SRC="https://github.com/docker/distribution.git"
COMMIT_ID=ece8e132bf6585815fdd00990f6215122c58fb3f
CUR_DIR=`pwd`
INSTALL_DIR="${CUR_DIR}/go.bld"
BIN_DIR="${CUR_DIR}/go.bld/bin"
mkdir -p ${BIN_DIR}
GOPATH_BASE="${INSTALL_DIR}/src/github.com/docker"
mkdir -p ${GOPATH_BASE}
cd ${GOPATH_BASE}
git clone ${SRC}
cd distribution
git checkout -q ${COMMIT_ID}
export GOPATH="${GOPATH_BASE}/distribution/Godeps/_workspace:${INSTALL_DIR}:${GOPATH}"
go build -o ${BIN_DIR}/registry ./cmd/registry
#To use the registry you need to copy the file cmd/registry/config-example.yml as config.yml and run it
#./registry ./config.yml
## Instruction:
Add option for both dynamic and static build
## Code After:
build_type=${1}
BUILD_TYPE=${build_type:-dynamic}
SRC="https://github.com/docker/distribution.git"
COMMIT_ID=ece8e132bf6585815fdd00990f6215122c58fb3f
#Install git
yum install -y git
CUR_DIR=`pwd`
INSTALL_DIR="${CUR_DIR}/go.bld"
BIN_DIR="${CUR_DIR}/go.bld/bin"
mkdir -p ${BIN_DIR}
GOPATH_BASE="${INSTALL_DIR}/src/github.com/docker"
mkdir -p ${GOPATH_BASE}
cd ${GOPATH_BASE}
git clone ${SRC}
cd distribution
git checkout -q ${COMMIT_ID}
export GOPATH="${GOPATH_BASE}/distribution/Godeps/_workspace:${INSTALL_DIR}:${GOPATH}"
if [ "${BUILD_TYPE}" == "static" ]
then
BUILDFLAGS="-static -lnetgo"
else
BUILDFLAGS=""
fi
go build -gccgoflags "${BUILDFLAGS}" -o ${BIN_DIR}/registry ./cmd/registry
#To use the registry you need to copy the file cmd/registry/config-example.yml as config.yml and run it
#./registry ./config.yml
cp ./cmd/registry/config-example.yml ${BIN_DIR}/config.yml
|
8acde49dee699c4055d930eb4bfb9916e884026f | app/events/model.js | app/events/model.js | var mongoose = require('mongoose');
var schema = require('validate');
var Event = mongoose.model('Event', {
name: String,
start: Date,
end: Date,
group: String,
notify: Boolean
});
var validate = function (event) {
var test = schema({
name: {
type: 'string',
required: true,
message: 'You must provide a name for the event.'
},
start: {
type: 'date',
required: true,
message: 'You must provide a start time.'
},
end: {
type: 'date',
required: true,
message: 'You must provide an end time.'
},
group: {
type: 'string',
required: false
},
notify: {
type: 'boolean',
required: false
}
}, {typecast: true});
return test.validate(event);
};
module.exports = Event;
module.exports.validate = validate; | var mongoose = require('mongoose');
var schema = require('validate');
var Event = mongoose.model('Event', {
name: String,
start: Date,
end: Date,
group: {type: String, enum: ['attendee', 'staff', 'admin'], default: 'attendee'},
notify: {type: Boolean, default: true}
});
var validate = function (event) {
var test = schema({
name: {
type: 'string',
required: true,
message: 'You must provide a name for the event.'
},
start: {
type: 'date',
required: true,
message: 'You must provide a start time.'
},
end: {
type: 'date',
required: true,
message: 'You must provide an end time.'
},
group: {
type: 'string'
},
notify: {
type: 'boolean'
}
}, {typecast: true});
return test.validate(event);
};
module.exports = Event;
module.exports.validate = validate; | Fix validation issue on events | Fix validation issue on events
| JavaScript | mit | hacksu/kenthackenough,hacksu/kenthackenough | javascript | ## Code Before:
var mongoose = require('mongoose');
var schema = require('validate');
var Event = mongoose.model('Event', {
name: String,
start: Date,
end: Date,
group: String,
notify: Boolean
});
var validate = function (event) {
var test = schema({
name: {
type: 'string',
required: true,
message: 'You must provide a name for the event.'
},
start: {
type: 'date',
required: true,
message: 'You must provide a start time.'
},
end: {
type: 'date',
required: true,
message: 'You must provide an end time.'
},
group: {
type: 'string',
required: false
},
notify: {
type: 'boolean',
required: false
}
}, {typecast: true});
return test.validate(event);
};
module.exports = Event;
module.exports.validate = validate;
## Instruction:
Fix validation issue on events
## Code After:
var mongoose = require('mongoose');
var schema = require('validate');
var Event = mongoose.model('Event', {
name: String,
start: Date,
end: Date,
group: {type: String, enum: ['attendee', 'staff', 'admin'], default: 'attendee'},
notify: {type: Boolean, default: true}
});
var validate = function (event) {
var test = schema({
name: {
type: 'string',
required: true,
message: 'You must provide a name for the event.'
},
start: {
type: 'date',
required: true,
message: 'You must provide a start time.'
},
end: {
type: 'date',
required: true,
message: 'You must provide an end time.'
},
group: {
type: 'string'
},
notify: {
type: 'boolean'
}
}, {typecast: true});
return test.validate(event);
};
module.exports = Event;
module.exports.validate = validate; |
50c6cd94b52777bbd8ca2214b8547789432fcd28 | pwil/README.md | pwil/README.md | PWIL: Primal Wasserstein Imitation Learning
===
Robert Dadashi, Leonard Hussenot, Matthieu Geist, Olivier Pietquin
---
This directory contains the source code accompanying the paper:
Primal Wasserstein Imitation Learning [https://arxiv.org/abs/2006.04678](https://arxiv.org/abs/2006.04678).
# Dependencies
PWIL is compatible with Python 3.7.7. You can install the dependencies using:
pip install -r requirements.txt
You will also need to install Mujoco and use a valid license. Follow the install
instructions [here](https://github.com/openai/mujoco-py).
# Expert demonstrations
We are working on making expert demonstrations available.
# Run PWIL
python -m pwil.trainer --workdir='/tmp/pwil' --env_name='Hopper-v2' --demo_dir=$DEMO_DIR
| PWIL: Primal Wasserstein Imitation Learning
===
Robert Dadashi, Leonard Hussenot, Matthieu Geist, Olivier Pietquin
---
This directory contains the source code accompanying the paper:
Primal Wasserstein Imitation Learning [https://arxiv.org/abs/2006.04678](https://arxiv.org/abs/2006.04678).
# Dependencies
PWIL is compatible with Python 3.7.7. You can install the dependencies using:
pip install -r requirements.txt
You will also need to install Mujoco and use a valid license. Follow the install
instructions [here](https://github.com/openai/mujoco-py).
# Expert demonstrations
PWIL demonstrations are available in a GCS bucket.
DEMO_DIR=/tmp/demonstrations
mkdir $DEMO_DIR
gsutil cp -r gs://gresearch/pwil/* $DEMO_DIR
# Run PWIL
python -m pwil.trainer --workdir='/tmp/pwil' --env_name='Hopper-v2' --demo_dir=$DEMO_DIR
| Add instructions for downloading demonstrations. | Add instructions for downloading demonstrations.
PiperOrigin-RevId: 361779790
| Markdown | apache-2.0 | google-research/google-research,google-research/google-research,google-research/google-research,google-research/google-research,google-research/google-research,google-research/google-research,google-research/google-research,google-research/google-research,google-research/google-research,google-research/google-research,google-research/google-research | markdown | ## Code Before:
PWIL: Primal Wasserstein Imitation Learning
===
Robert Dadashi, Leonard Hussenot, Matthieu Geist, Olivier Pietquin
---
This directory contains the source code accompanying the paper:
Primal Wasserstein Imitation Learning [https://arxiv.org/abs/2006.04678](https://arxiv.org/abs/2006.04678).
# Dependencies
PWIL is compatible with Python 3.7.7. You can install the dependencies using:
pip install -r requirements.txt
You will also need to install Mujoco and use a valid license. Follow the install
instructions [here](https://github.com/openai/mujoco-py).
# Expert demonstrations
We are working on making expert demonstrations available.
# Run PWIL
python -m pwil.trainer --workdir='/tmp/pwil' --env_name='Hopper-v2' --demo_dir=$DEMO_DIR
## Instruction:
Add instructions for downloading demonstrations.
PiperOrigin-RevId: 361779790
## Code After:
PWIL: Primal Wasserstein Imitation Learning
===
Robert Dadashi, Leonard Hussenot, Matthieu Geist, Olivier Pietquin
---
This directory contains the source code accompanying the paper:
Primal Wasserstein Imitation Learning [https://arxiv.org/abs/2006.04678](https://arxiv.org/abs/2006.04678).
# Dependencies
PWIL is compatible with Python 3.7.7. You can install the dependencies using:
pip install -r requirements.txt
You will also need to install Mujoco and use a valid license. Follow the install
instructions [here](https://github.com/openai/mujoco-py).
# Expert demonstrations
PWIL demonstrations are available in a GCS bucket.
DEMO_DIR=/tmp/demonstrations
mkdir $DEMO_DIR
gsutil cp -r gs://gresearch/pwil/* $DEMO_DIR
# Run PWIL
python -m pwil.trainer --workdir='/tmp/pwil' --env_name='Hopper-v2' --demo_dir=$DEMO_DIR
|
39cfba9ee0d896ad920238e739de7f6c1e2aa724 | sql/tables/Block.sql | sql/tables/Block.sql | create table Block (
id serial,
createdate timestamp not null,
displayorder integer not null default 0,
attachment integer null references Attachment(id),
media integer null references Media(id),
image integer null references Image(id),
bodytext text null,
primary key (id)
);
| create table Block (
id serial,
createdate timestamp not null,
displayorder integer not null default 0,
attachment integer null references Attachment(id) on delete set null,
media integer null references Media(id) on delete set null,
image integer null references Image(id) on delete set null,
bodytext text null,
primary key (id)
);
| Add delete constraints for sub-objects for blocks. | Add delete constraints for sub-objects for blocks.
| SQL | mit | silverorange/Building,gauthierm/Building | sql | ## Code Before:
create table Block (
id serial,
createdate timestamp not null,
displayorder integer not null default 0,
attachment integer null references Attachment(id),
media integer null references Media(id),
image integer null references Image(id),
bodytext text null,
primary key (id)
);
## Instruction:
Add delete constraints for sub-objects for blocks.
## Code After:
create table Block (
id serial,
createdate timestamp not null,
displayorder integer not null default 0,
attachment integer null references Attachment(id) on delete set null,
media integer null references Media(id) on delete set null,
image integer null references Image(id) on delete set null,
bodytext text null,
primary key (id)
);
|
9453e62399b9afd4b666f79dca312260e4208b38 | app/views/admin/config/import_export.html.erb | app/views/admin/config/import_export.html.erb | <h1>Import/Export Configuration</h1>
<fieldset>
<legend>Export Configuration</legend>
<div class="text-center">
<p class="muted">Download an export of your <em>currently published</em> configuration settings on this server.</p>
<strong>Last Publish:</strong> <%= ConfigVersion.last_version %><br>
<strong>Total APIs:</strong> <%= ConfigVersion.last_config["apis"].count %><br>
<%= link_to("Download", admin_config_export_path(:format => "yaml", :download => "true"), :class => "btn btn-primary") %>
</div>
</fieldset>
<fieldset>
<legend>Import Configuration</legend>
<%= form_tag(admin_config_import_preview_path, :class => "text-center", :multipart => true) do %>
<p class="muted">Import configuration settings from a YAML file to this server.</p>
<%= file_field_tag(:file) %><br />
<button type="submit" id="upload_button" class="btn btn-primary" data-loading-text="<i class='icon-refresh icon-spin'></i> Uploading...">Next...</button>
<%= javascript_tag do %>
var button = $('#upload_button');
$('form').submit(function(e) {
button.button('loading');
});
<% end %>
<% end %>
</fieldset>
| <h1>Import/Export Configuration</h1>
<fieldset>
<legend>Export Configuration</legend>
<div class="text-center">
<p class="muted">Download an export of your <em>currently published</em> configuration settings on this server.</p>
<strong>Last Publish:</strong> <%= ConfigVersion.last_version %><br>
<strong>Total APIs:</strong> <%= if(ConfigVersion.last_config) then ConfigVersion.last_config["apis"].count else 0 end %><br>
<%= link_to("Download", admin_config_export_path(:format => "yaml", :download => "true"), :class => "btn btn-primary") %>
</div>
</fieldset>
<fieldset>
<legend>Import Configuration</legend>
<%= form_tag(admin_config_import_preview_path, :class => "text-center", :multipart => true) do %>
<p class="muted">Import configuration settings from a YAML file to this server.</p>
<%= file_field_tag(:file) %><br />
<button type="submit" id="upload_button" class="btn btn-primary" data-loading-text="<i class='icon-refresh icon-spin'></i> Uploading...">Next...</button>
<%= javascript_tag do %>
var button = $('#upload_button');
$('form').submit(function(e) {
button.button('loading');
});
<% end %>
<% end %>
</fieldset>
| Fix import/export page when no apis currently exist. | Fix import/export page when no apis currently exist.
| HTML+ERB | mit | apinf/api-umbrella,NREL/api-umbrella,apinf/api-umbrella,cmc333333/api-umbrella-web,apinf/api-umbrella,NREL/api-umbrella,cmc333333/api-umbrella-web,johan--/api-umbrella-web,NREL/api-umbrella-web,johan--/api-umbrella-web,NREL/api-umbrella-web,NREL/api-umbrella,cmc333333/api-umbrella-web,johan--/api-umbrella-web,cmc333333/api-umbrella-web,NREL/api-umbrella-web,NREL/api-umbrella,apinf/api-umbrella,NREL/api-umbrella-web,apinf/api-umbrella,johan--/api-umbrella-web | html+erb | ## Code Before:
<h1>Import/Export Configuration</h1>
<fieldset>
<legend>Export Configuration</legend>
<div class="text-center">
<p class="muted">Download an export of your <em>currently published</em> configuration settings on this server.</p>
<strong>Last Publish:</strong> <%= ConfigVersion.last_version %><br>
<strong>Total APIs:</strong> <%= ConfigVersion.last_config["apis"].count %><br>
<%= link_to("Download", admin_config_export_path(:format => "yaml", :download => "true"), :class => "btn btn-primary") %>
</div>
</fieldset>
<fieldset>
<legend>Import Configuration</legend>
<%= form_tag(admin_config_import_preview_path, :class => "text-center", :multipart => true) do %>
<p class="muted">Import configuration settings from a YAML file to this server.</p>
<%= file_field_tag(:file) %><br />
<button type="submit" id="upload_button" class="btn btn-primary" data-loading-text="<i class='icon-refresh icon-spin'></i> Uploading...">Next...</button>
<%= javascript_tag do %>
var button = $('#upload_button');
$('form').submit(function(e) {
button.button('loading');
});
<% end %>
<% end %>
</fieldset>
## Instruction:
Fix import/export page when no apis currently exist.
## Code After:
<h1>Import/Export Configuration</h1>
<fieldset>
<legend>Export Configuration</legend>
<div class="text-center">
<p class="muted">Download an export of your <em>currently published</em> configuration settings on this server.</p>
<strong>Last Publish:</strong> <%= ConfigVersion.last_version %><br>
<strong>Total APIs:</strong> <%= if(ConfigVersion.last_config) then ConfigVersion.last_config["apis"].count else 0 end %><br>
<%= link_to("Download", admin_config_export_path(:format => "yaml", :download => "true"), :class => "btn btn-primary") %>
</div>
</fieldset>
<fieldset>
<legend>Import Configuration</legend>
<%= form_tag(admin_config_import_preview_path, :class => "text-center", :multipart => true) do %>
<p class="muted">Import configuration settings from a YAML file to this server.</p>
<%= file_field_tag(:file) %><br />
<button type="submit" id="upload_button" class="btn btn-primary" data-loading-text="<i class='icon-refresh icon-spin'></i> Uploading...">Next...</button>
<%= javascript_tag do %>
var button = $('#upload_button');
$('form').submit(function(e) {
button.button('loading');
});
<% end %>
<% end %>
</fieldset>
|
ee49d26e40a0681fb72125d79190cc2ec939f55c | src/modules/map/saga.js | src/modules/map/saga.js | import {takeLatest} from 'redux-saga';
import {call, fork, put} from 'redux-saga/effects';
import {arrayOf} from 'normalizr';
import {receiveAddress} from './actions';
import {mapActions} from './constants';
import {receiveUnits, setFetchError} from '../unit/actions';
import {getFetchUnitsRequest} from '../unit/helpers';
import {unitSchema} from '../unit/constants';
import {createUrl, createRequest, callApi, normalizeEntityResults} from '../api/helpers';
function* onSetLocation({payload: position}: FetchAction) {
const addressParams = {
lat: position[0],
lon: position[1],
page_size: 1
};
const addressRequest = createRequest(createUrl('address/', addressParams));
const {bodyAsJson: addressJson} = yield call(callApi, addressRequest);
const addressData = addressJson.results ? addressJson.results[0] : null;
yield put(receiveAddress(addressData));
const unitParams = {
};
const unitRequest = getFetchUnitsRequest(unitParams);
const {response, bodyAsJson: unitJson} = yield call(callApi, unitRequest);
if(response.status === 200) {
const data = normalizeEntityResults(unitJson.results, arrayOf(unitSchema));
yield put(receiveUnits(data));
} else {
yield put(setFetchError(unitJson.results));
}
}
function* watchSetLocation() {
yield takeLatest(mapActions.SET_LOCATION, onSetLocation);
}
export default function* saga() {
return [
yield fork(watchSetLocation)
];
}
| import {takeLatest} from 'redux-saga';
import {call, fork, put} from 'redux-saga/effects';
import {arrayOf} from 'normalizr';
import {receiveAddress} from './actions';
import {mapActions} from './constants';
import {receiveUnits, setFetchError} from '../unit/actions';
import {getFetchUnitsRequest} from '../unit/helpers';
import {unitSchema} from '../unit/constants';
import {createUrl, createRequest, callApi, normalizeEntityResults} from '../api/helpers';
function* onSetLocation({payload: position}: FetchAction) {
const addressParams = {
lat: position[0],
lon: position[1],
page_size: 1
};
const addressRequest = createRequest(createUrl('address/', addressParams));
const {bodyAsJson: addressJson} = yield call(callApi, addressRequest);
const addressData = addressJson.results ? addressJson.results[0] : null;
yield put(receiveAddress(addressData));
}
function* watchSetLocation() {
yield takeLatest(mapActions.SET_LOCATION, onSetLocation);
}
export default function* saga() {
return [
yield fork(watchSetLocation)
];
}
| Remove unnecessary API call on location set. | Remove unnecessary API call on location set.
| JavaScript | mit | nordsoftware/outdoors-sports-map,nordsoftware/outdoors-sports-map,nordsoftware/outdoors-sports-map | javascript | ## Code Before:
import {takeLatest} from 'redux-saga';
import {call, fork, put} from 'redux-saga/effects';
import {arrayOf} from 'normalizr';
import {receiveAddress} from './actions';
import {mapActions} from './constants';
import {receiveUnits, setFetchError} from '../unit/actions';
import {getFetchUnitsRequest} from '../unit/helpers';
import {unitSchema} from '../unit/constants';
import {createUrl, createRequest, callApi, normalizeEntityResults} from '../api/helpers';
function* onSetLocation({payload: position}: FetchAction) {
const addressParams = {
lat: position[0],
lon: position[1],
page_size: 1
};
const addressRequest = createRequest(createUrl('address/', addressParams));
const {bodyAsJson: addressJson} = yield call(callApi, addressRequest);
const addressData = addressJson.results ? addressJson.results[0] : null;
yield put(receiveAddress(addressData));
const unitParams = {
};
const unitRequest = getFetchUnitsRequest(unitParams);
const {response, bodyAsJson: unitJson} = yield call(callApi, unitRequest);
if(response.status === 200) {
const data = normalizeEntityResults(unitJson.results, arrayOf(unitSchema));
yield put(receiveUnits(data));
} else {
yield put(setFetchError(unitJson.results));
}
}
function* watchSetLocation() {
yield takeLatest(mapActions.SET_LOCATION, onSetLocation);
}
export default function* saga() {
return [
yield fork(watchSetLocation)
];
}
## Instruction:
Remove unnecessary API call on location set.
## Code After:
import {takeLatest} from 'redux-saga';
import {call, fork, put} from 'redux-saga/effects';
import {arrayOf} from 'normalizr';
import {receiveAddress} from './actions';
import {mapActions} from './constants';
import {receiveUnits, setFetchError} from '../unit/actions';
import {getFetchUnitsRequest} from '../unit/helpers';
import {unitSchema} from '../unit/constants';
import {createUrl, createRequest, callApi, normalizeEntityResults} from '../api/helpers';
function* onSetLocation({payload: position}: FetchAction) {
const addressParams = {
lat: position[0],
lon: position[1],
page_size: 1
};
const addressRequest = createRequest(createUrl('address/', addressParams));
const {bodyAsJson: addressJson} = yield call(callApi, addressRequest);
const addressData = addressJson.results ? addressJson.results[0] : null;
yield put(receiveAddress(addressData));
}
function* watchSetLocation() {
yield takeLatest(mapActions.SET_LOCATION, onSetLocation);
}
export default function* saga() {
return [
yield fork(watchSetLocation)
];
}
|
6da980e2b56f245ff3c4be277ede0f93c5f174c0 | util/clean-depend.pl | util/clean-depend.pl |
use strict;
while(<STDIN>) {
print;
last if /^# DO NOT DELETE THIS LINE/;
}
my %files;
my $thisfile="";
while(<STDIN>) {
my ($dummy, $file,$deps)=/^((.*):)? (.*)$/;
$thisfile=$file if defined $file;
next if !defined $deps;
my @deps=split ' ',$deps;
@deps=grep(!/^\//,@deps);
@deps=grep(!/^\\$/,@deps);
push @{$files{$thisfile}},@deps;
}
my $file;
foreach $file (sort keys %files) {
my $len=0;
my $dep;
foreach $dep (sort @{$files{$file}}) {
$len=0 if $len+length($dep)+1 >= 80;
if($len == 0) {
print "\n$file:";
$len=length($file)+1;
}
print " $dep";
$len+=length($dep)+1;
}
}
print "\n";
|
use strict;
while(<STDIN>) {
print;
last if /^# DO NOT DELETE THIS LINE/;
}
my %files;
my $thisfile="";
while(<STDIN>) {
my ($dummy, $file,$deps)=/^((.*):)? (.*)$/;
my $origfile="";
$thisfile=$file if defined $file;
next if !defined $deps;
$origfile=$thisfile;
$origfile=~s/\.o$/.c/;
my @deps=split ' ',$deps;
@deps=grep(!/^\//,@deps);
@deps=grep(!/^\\$/,@deps);
@deps=grep(!/^$origfile$/,@deps);
push @{$files{$thisfile}},@deps;
}
my $file;
foreach $file (sort keys %files) {
my $len=0;
my $dep;
my $origfile=$file;
$origfile=~s/\.o$/.c/;
push @{$files{$file}},$origfile;
foreach $dep (sort @{$files{$file}}) {
$len=0 if $len+length($dep)+1 >= 80;
if($len == 0) {
print "\n$file:";
$len=length($file)+1;
}
print " $dep";
$len+=length($dep)+1;
}
}
print "\n";
| Make sure the source file is included among the dependencies. This is the norm for 'gcc -M' but not for 'makedepend', and is merely introduced here to avoid commit wars. | Make sure the source file is included among the dependencies. This is
the norm for 'gcc -M' but not for 'makedepend', and is merely
introduced here to avoid commit wars.
| Perl | apache-2.0 | openssl/openssl,openssl/openssl,openssl/openssl,openssl/openssl,openssl/openssl,openssl/openssl | perl | ## Code Before:
use strict;
while(<STDIN>) {
print;
last if /^# DO NOT DELETE THIS LINE/;
}
my %files;
my $thisfile="";
while(<STDIN>) {
my ($dummy, $file,$deps)=/^((.*):)? (.*)$/;
$thisfile=$file if defined $file;
next if !defined $deps;
my @deps=split ' ',$deps;
@deps=grep(!/^\//,@deps);
@deps=grep(!/^\\$/,@deps);
push @{$files{$thisfile}},@deps;
}
my $file;
foreach $file (sort keys %files) {
my $len=0;
my $dep;
foreach $dep (sort @{$files{$file}}) {
$len=0 if $len+length($dep)+1 >= 80;
if($len == 0) {
print "\n$file:";
$len=length($file)+1;
}
print " $dep";
$len+=length($dep)+1;
}
}
print "\n";
## Instruction:
Make sure the source file is included among the dependencies. This is
the norm for 'gcc -M' but not for 'makedepend', and is merely
introduced here to avoid commit wars.
## Code After:
use strict;
while(<STDIN>) {
print;
last if /^# DO NOT DELETE THIS LINE/;
}
my %files;
my $thisfile="";
while(<STDIN>) {
my ($dummy, $file,$deps)=/^((.*):)? (.*)$/;
my $origfile="";
$thisfile=$file if defined $file;
next if !defined $deps;
$origfile=$thisfile;
$origfile=~s/\.o$/.c/;
my @deps=split ' ',$deps;
@deps=grep(!/^\//,@deps);
@deps=grep(!/^\\$/,@deps);
@deps=grep(!/^$origfile$/,@deps);
push @{$files{$thisfile}},@deps;
}
my $file;
foreach $file (sort keys %files) {
my $len=0;
my $dep;
my $origfile=$file;
$origfile=~s/\.o$/.c/;
push @{$files{$file}},$origfile;
foreach $dep (sort @{$files{$file}}) {
$len=0 if $len+length($dep)+1 >= 80;
if($len == 0) {
print "\n$file:";
$len=length($file)+1;
}
print " $dep";
$len+=length($dep)+1;
}
}
print "\n";
|
b8d330e27150a97e193f65bfe8a422d3420082ba | src/examples/22-vertex_shading/scene.cpp | src/examples/22-vertex_shading/scene.cpp |
Scene::Scene()
{
}
Scene::~Scene()
{
for(auto it = m_meshes.begin(); it != m_meshes.end(); ++it)
{
delete *it;
}
}
bool Scene::load(const char* fileName)
{
Assimp::Importer importer;
const aiScene* scene = importer.ReadFile(fileName,
aiProcess_Triangulate |
aiProcess_JoinIdenticalVertices |
aiProcess_SortByPType);
if(!scene)
{
std::cerr << "Error loading mesh " << fileName << ": " << importer.GetErrorString() << std::endl;
return false;
}
for (int m = 0; m < scene->mNumMeshes; ++m) {
const aiMesh* aiM = scene->mMeshes[m];
Mesh* mesh = new Mesh();;
if (!mesh->load(aiM))
{
return false;
}
m_meshes.push_back(mesh);
}
return true;
}
void Scene::render(Material* mat)
{
for (auto it = m_meshes.begin(); it != m_meshes.end(); ++it)
{
mat->setUniform("model", (*it)->getModelMatrix());
(*it)->render();
}
}
|
Scene::Scene()
{
}
Scene::~Scene()
{
for(auto it = m_meshes.begin(); it != m_meshes.end(); ++it)
{
delete *it;
}
}
bool Scene::load(const char* fileName)
{
Assimp::Importer importer;
const aiScene* scene = importer.ReadFile(fileName,
aiProcess_Triangulate |
aiProcess_JoinIdenticalVertices |
aiProcess_SortByPType);
if(!scene)
{
std::cerr << "Error loading scene " << fileName << ": " << importer.GetErrorString() << std::endl;
return false;
}
for (int m = 0; m < scene->mNumMeshes; ++m) {
const aiMesh* aiM = scene->mMeshes[m];
Mesh* mesh = new Mesh();
if (!mesh->load(aiM))
{
delete mesh;
return false;
}
m_meshes.push_back(mesh);
}
return true;
}
void Scene::render(Material* mat)
{
for (auto it = m_meshes.begin(); it != m_meshes.end(); ++it)
{
mat->setUniform("model", (*it)->getModelMatrix());
(*it)->render();
}
}
| Fix potential memory leak in vertex shading example | Fix potential memory leak in vertex shading example
| C++ | mit | m-decoster/glExamples,m-decoster/glExamples | c++ | ## Code Before:
Scene::Scene()
{
}
Scene::~Scene()
{
for(auto it = m_meshes.begin(); it != m_meshes.end(); ++it)
{
delete *it;
}
}
bool Scene::load(const char* fileName)
{
Assimp::Importer importer;
const aiScene* scene = importer.ReadFile(fileName,
aiProcess_Triangulate |
aiProcess_JoinIdenticalVertices |
aiProcess_SortByPType);
if(!scene)
{
std::cerr << "Error loading mesh " << fileName << ": " << importer.GetErrorString() << std::endl;
return false;
}
for (int m = 0; m < scene->mNumMeshes; ++m) {
const aiMesh* aiM = scene->mMeshes[m];
Mesh* mesh = new Mesh();;
if (!mesh->load(aiM))
{
return false;
}
m_meshes.push_back(mesh);
}
return true;
}
void Scene::render(Material* mat)
{
for (auto it = m_meshes.begin(); it != m_meshes.end(); ++it)
{
mat->setUniform("model", (*it)->getModelMatrix());
(*it)->render();
}
}
## Instruction:
Fix potential memory leak in vertex shading example
## Code After:
Scene::Scene()
{
}
Scene::~Scene()
{
for(auto it = m_meshes.begin(); it != m_meshes.end(); ++it)
{
delete *it;
}
}
bool Scene::load(const char* fileName)
{
Assimp::Importer importer;
const aiScene* scene = importer.ReadFile(fileName,
aiProcess_Triangulate |
aiProcess_JoinIdenticalVertices |
aiProcess_SortByPType);
if(!scene)
{
std::cerr << "Error loading scene " << fileName << ": " << importer.GetErrorString() << std::endl;
return false;
}
for (int m = 0; m < scene->mNumMeshes; ++m) {
const aiMesh* aiM = scene->mMeshes[m];
Mesh* mesh = new Mesh();
if (!mesh->load(aiM))
{
delete mesh;
return false;
}
m_meshes.push_back(mesh);
}
return true;
}
void Scene::render(Material* mat)
{
for (auto it = m_meshes.begin(); it != m_meshes.end(); ++it)
{
mat->setUniform("model", (*it)->getModelMatrix());
(*it)->render();
}
}
|
493794554779eb0b0f0eb7c5c14c3b8aa0df8a10 | circle.yml | circle.yml | machine:
environment:
PROJECT_NAME: gopher-slack-bot
CLUSTER_NAME: gopher-slack-bot
CLOUDSDK_COMPUTE_ZONE: eu-west-1
DEBIAN_FRONTEND: noninteractive
services:
- docker
dependencies:
pre:
- sudo pip install pyopenssl
- sudo /opt/google-cloud-sdk/bin/gcloud --quiet components update
- sudo /opt/google-cloud-sdk/bin/gcloud --quiet components update kubectl
- echo $ACCT_AUTH | base64 --decode -i > ${HOME}/account-auth.json
- sudo /opt/google-cloud-sdk/bin/gcloud auth activate-service-account ${GCLOUD_ACCOUNT_ID} --key-file ${HOME}/account-auth.json
- sudo /opt/google-cloud-sdk/bin/gcloud --quiet config set container/cluster $CLUSTER_NAME
- sudo /opt/google-cloud-sdk/bin/gcloud --quiet config set compute/zone ${CLOUDSDK_COMPUTE_ZONE}
- sudo /opt/google-cloud-sdk/bin/gcloud --quiet container clusters get-credentials $CLUSTER_NAME
- ./container-make.sh
test:
override:
- ./container-test.sh
deployment:
prod:
branch: master
commands:
- ./container-deploy.sh
| machine:
environment:
PROJECT_NAME: gopher-slack-bot
CLUSTER_NAME: gopher-slack-bot
CLOUDSDK_COMPUTE_ZONE: eu-west-1
DEBIAN_FRONTEND: noninteractive
services:
- docker
dependencies:
pre:
- sudo pip install pyopenssl
- sudo apt-get install python-openssl
- sudo /opt/google-cloud-sdk/bin/gcloud --quiet components update
- sudo /opt/google-cloud-sdk/bin/gcloud --quiet components update kubectl
- echo $ACCT_AUTH | base64 --decode -i > ${HOME}/account-auth.json
- sudo /opt/google-cloud-sdk/bin/gcloud auth activate-service-account ${GCLOUD_ACCOUNT_ID} --key-file ${HOME}/account-auth.json
- sudo /opt/google-cloud-sdk/bin/gcloud --quiet config set container/cluster $CLUSTER_NAME
- sudo /opt/google-cloud-sdk/bin/gcloud --quiet config set compute/zone ${CLOUDSDK_COMPUTE_ZONE}
- sudo /opt/google-cloud-sdk/bin/gcloud --quiet container clusters get-credentials $CLUSTER_NAME
- ./container-make.sh
test:
override:
- ./container-test.sh
deployment:
prod:
branch: master
commands:
- ./container-deploy.sh
| Make sure we really install it | Make sure we really install it
| YAML | apache-2.0 | gopheracademy/gopher,gopheracademy/gopher | yaml | ## Code Before:
machine:
environment:
PROJECT_NAME: gopher-slack-bot
CLUSTER_NAME: gopher-slack-bot
CLOUDSDK_COMPUTE_ZONE: eu-west-1
DEBIAN_FRONTEND: noninteractive
services:
- docker
dependencies:
pre:
- sudo pip install pyopenssl
- sudo /opt/google-cloud-sdk/bin/gcloud --quiet components update
- sudo /opt/google-cloud-sdk/bin/gcloud --quiet components update kubectl
- echo $ACCT_AUTH | base64 --decode -i > ${HOME}/account-auth.json
- sudo /opt/google-cloud-sdk/bin/gcloud auth activate-service-account ${GCLOUD_ACCOUNT_ID} --key-file ${HOME}/account-auth.json
- sudo /opt/google-cloud-sdk/bin/gcloud --quiet config set container/cluster $CLUSTER_NAME
- sudo /opt/google-cloud-sdk/bin/gcloud --quiet config set compute/zone ${CLOUDSDK_COMPUTE_ZONE}
- sudo /opt/google-cloud-sdk/bin/gcloud --quiet container clusters get-credentials $CLUSTER_NAME
- ./container-make.sh
test:
override:
- ./container-test.sh
deployment:
prod:
branch: master
commands:
- ./container-deploy.sh
## Instruction:
Make sure we really install it
## Code After:
machine:
environment:
PROJECT_NAME: gopher-slack-bot
CLUSTER_NAME: gopher-slack-bot
CLOUDSDK_COMPUTE_ZONE: eu-west-1
DEBIAN_FRONTEND: noninteractive
services:
- docker
dependencies:
pre:
- sudo pip install pyopenssl
- sudo apt-get install python-openssl
- sudo /opt/google-cloud-sdk/bin/gcloud --quiet components update
- sudo /opt/google-cloud-sdk/bin/gcloud --quiet components update kubectl
- echo $ACCT_AUTH | base64 --decode -i > ${HOME}/account-auth.json
- sudo /opt/google-cloud-sdk/bin/gcloud auth activate-service-account ${GCLOUD_ACCOUNT_ID} --key-file ${HOME}/account-auth.json
- sudo /opt/google-cloud-sdk/bin/gcloud --quiet config set container/cluster $CLUSTER_NAME
- sudo /opt/google-cloud-sdk/bin/gcloud --quiet config set compute/zone ${CLOUDSDK_COMPUTE_ZONE}
- sudo /opt/google-cloud-sdk/bin/gcloud --quiet container clusters get-credentials $CLUSTER_NAME
- ./container-make.sh
test:
override:
- ./container-test.sh
deployment:
prod:
branch: master
commands:
- ./container-deploy.sh
|
9a5c17781178e8c97a4749e49374c3b4449c7387 | tests/test_models.py | tests/test_models.py | from unittest import TestCase
import unittest.mock
from molecupy.structures import Model, AtomicStructure, SmallMolecule
class ModelTest(TestCase):
def setUp(self):
self.small_molecule1 = unittest.mock.Mock(spec=SmallMolecule)
self.small_molecule2 = unittest.mock.Mock(spec=SmallMolecule)
class ModelCreationTest(ModelTest):
def test_can_create_chain(self):
model = Model()
self.assertIsInstance(model, AtomicStructure)
self.assertEqual(model._atoms, set())
def test_model_repr(self):
model = Model()
self.assertEqual(str(model), "<Model (0 atoms)>")
class ModelSmallMoleculeTests(ModelTest):
def test_can_add_small_molecules(self):
model = Model()
self.assertEqual(model.small_molecules(), set())
model.add_small_molecule(self.small_molecule1)
self.assertEqual(model.small_molecules(), set([self.small_molecule1]))
model.add_small_molecule(self.small_molecule2)
self.assertEqual(
model.small_molecules(),
set([self.small_molecule1, self.small_molecule2])
)
| from unittest import TestCase
import unittest.mock
from molecupy.structures import Model, AtomicStructure, SmallMolecule
class ModelTest(TestCase):
def setUp(self):
self.small_molecule1 = unittest.mock.Mock(spec=SmallMolecule)
self.small_molecule2 = unittest.mock.Mock(spec=SmallMolecule)
class ModelCreationTest(ModelTest):
def test_can_create_chain(self):
model = Model()
self.assertIsInstance(model, AtomicStructure)
self.assertEqual(model._atoms, set())
def test_model_repr(self):
model = Model()
self.assertEqual(str(model), "<Model (0 atoms)>")
class ModelSmallMoleculeTests(ModelTest):
def test_can_add_small_molecules(self):
model = Model()
self.assertEqual(model.small_molecules(), set())
model.add_small_molecule(self.small_molecule1)
self.assertEqual(model.small_molecules(), set([self.small_molecule1]))
model.add_small_molecule(self.small_molecule2)
self.assertEqual(
model.small_molecules(),
set([self.small_molecule1, self.small_molecule2])
)
def test_must_use_method_to_add_small_molecule(self):
model = Model()
self.assertEqual(model.small_molecules(), set())
model.small_molecules().add(self.small_molecule1)
self.assertEqual(model.small_molecules(), set())
| Make small molecules read only | Make small molecules read only
| Python | mit | samirelanduk/atomium,samirelanduk/atomium,samirelanduk/molecupy | python | ## Code Before:
from unittest import TestCase
import unittest.mock
from molecupy.structures import Model, AtomicStructure, SmallMolecule
class ModelTest(TestCase):
def setUp(self):
self.small_molecule1 = unittest.mock.Mock(spec=SmallMolecule)
self.small_molecule2 = unittest.mock.Mock(spec=SmallMolecule)
class ModelCreationTest(ModelTest):
def test_can_create_chain(self):
model = Model()
self.assertIsInstance(model, AtomicStructure)
self.assertEqual(model._atoms, set())
def test_model_repr(self):
model = Model()
self.assertEqual(str(model), "<Model (0 atoms)>")
class ModelSmallMoleculeTests(ModelTest):
def test_can_add_small_molecules(self):
model = Model()
self.assertEqual(model.small_molecules(), set())
model.add_small_molecule(self.small_molecule1)
self.assertEqual(model.small_molecules(), set([self.small_molecule1]))
model.add_small_molecule(self.small_molecule2)
self.assertEqual(
model.small_molecules(),
set([self.small_molecule1, self.small_molecule2])
)
## Instruction:
Make small molecules read only
## Code After:
from unittest import TestCase
import unittest.mock
from molecupy.structures import Model, AtomicStructure, SmallMolecule
class ModelTest(TestCase):
def setUp(self):
self.small_molecule1 = unittest.mock.Mock(spec=SmallMolecule)
self.small_molecule2 = unittest.mock.Mock(spec=SmallMolecule)
class ModelCreationTest(ModelTest):
def test_can_create_chain(self):
model = Model()
self.assertIsInstance(model, AtomicStructure)
self.assertEqual(model._atoms, set())
def test_model_repr(self):
model = Model()
self.assertEqual(str(model), "<Model (0 atoms)>")
class ModelSmallMoleculeTests(ModelTest):
def test_can_add_small_molecules(self):
model = Model()
self.assertEqual(model.small_molecules(), set())
model.add_small_molecule(self.small_molecule1)
self.assertEqual(model.small_molecules(), set([self.small_molecule1]))
model.add_small_molecule(self.small_molecule2)
self.assertEqual(
model.small_molecules(),
set([self.small_molecule1, self.small_molecule2])
)
def test_must_use_method_to_add_small_molecule(self):
model = Model()
self.assertEqual(model.small_molecules(), set())
model.small_molecules().add(self.small_molecule1)
self.assertEqual(model.small_molecules(), set())
|
6284d330c64d3b8323032bc7a002349cc35f3a29 | src/config.js | src/config.js | 'use strict';
var load = require('../src/loader.js');
function config(params) {
var cfg = false;
if ('object' == typeof params) cfg = params;
if ('string' == typeof params && (params.endsWith('.yaml') || params.endsWith('.yml'))) cfg = load.yaml(params);
if ('string' == typeof params && params.endsWith('.json')) cfg = load.json(params);
if ('function' == typeof params) cfg = params();
if (!cfg) throw new Error('Invalid arguments');
return cfg;
}
module.exports = config; | 'use strict';
var load = require('../src/loader.js');
function config(params) {
var cfg = false;
if ('object' == typeof params) cfg = params;
if ('string' == typeof params && /\.ya?ml$/.test(params)) cfg = load.yaml(params);
if ('string' == typeof params && /\.json$/.test(params)) cfg = load.json(params);
if ('function' == typeof params) cfg = params();
if (!cfg) throw new Error('Invalid arguments');
return cfg;
}
module.exports = config; | Change String.endsWith() calls with regex tests to make compatible with node 0.10+ | Change String.endsWith() calls with regex tests to make compatible with node 0.10+
| JavaScript | mit | mrajo/metalsmith-grep | javascript | ## Code Before:
'use strict';
var load = require('../src/loader.js');
function config(params) {
var cfg = false;
if ('object' == typeof params) cfg = params;
if ('string' == typeof params && (params.endsWith('.yaml') || params.endsWith('.yml'))) cfg = load.yaml(params);
if ('string' == typeof params && params.endsWith('.json')) cfg = load.json(params);
if ('function' == typeof params) cfg = params();
if (!cfg) throw new Error('Invalid arguments');
return cfg;
}
module.exports = config;
## Instruction:
Change String.endsWith() calls with regex tests to make compatible with node 0.10+
## Code After:
'use strict';
var load = require('../src/loader.js');
function config(params) {
var cfg = false;
if ('object' == typeof params) cfg = params;
if ('string' == typeof params && /\.ya?ml$/.test(params)) cfg = load.yaml(params);
if ('string' == typeof params && /\.json$/.test(params)) cfg = load.json(params);
if ('function' == typeof params) cfg = params();
if (!cfg) throw new Error('Invalid arguments');
return cfg;
}
module.exports = config; |
21604376d2ca5b41c853faa78f96fd19b6f40f80 | app/controllers/oauth_controller.rb | app/controllers/oauth_controller.rb | class OauthController < ApplicationController
def create
auth = request.env["omniauth.auth"]
if auth.nil? || auth.blank?
redirect_to root_path, :notice => "We can't access Twitter at this time. Please try again."
end
user = User.find_by_provider_and_uid(auth["provider"], auth["uid"])
if user.nil?
Rails.logger.debug "User is not nil"
user = User.create_with_omniauth(auth)
Profile.create_with_omniauth(user.id, auth)
login_from_oauth(user.id)
Rails.logger.debug "logged in from auth"
return
elsif user.present? && user.profile.present?
Rails.logger.debug "User is present with a profile"
user.update_from_omniauth(auth)
session[:user_id] = user.id
redirect_to problems_path, :notice => "Welcome back!"
elsif user.present? && user.profile.nil?
redirect_to root_path, :notice => "WHAT"
end
end
private
def login_from_oauth(user_id)
session[:user_id] = user_id
redirect_to welcome_path
end
end | class OauthController < ApplicationController
def create
auth = request.env["omniauth.auth"]
if auth.nil? || auth.blank?
redirect_to root_path, :notice => "We can't access Twitter at this time. Please try again."
end
user = User.find_by_provider_and_uid(auth["provider"], auth["uid"])
if user.nil?
user = User.create_with_omniauth(auth)
Profile.create_with_omniauth(user.id, auth)
login_from_oauth(user.id)
return
elsif user.present? && user.profile.present?
user.update_from_omniauth(auth)
session[:user_id] = user.id
redirect_to problems_path, :notice => "Welcome back!"
elsif user.present? && user.profile.nil?
redirect_to root_path, :notice => "Something went wrong. We're working on a fix."
end
end
private
def login_from_oauth(user_id)
session[:user_id] = user_id
redirect_to welcome_path
end
end | Remove loggers and add a notice to empty profile | Remove loggers and add a notice to empty profile
| Ruby | mit | travisvalentine/POP,travisvalentine/POP,travisvalentine/POP | ruby | ## Code Before:
class OauthController < ApplicationController
def create
auth = request.env["omniauth.auth"]
if auth.nil? || auth.blank?
redirect_to root_path, :notice => "We can't access Twitter at this time. Please try again."
end
user = User.find_by_provider_and_uid(auth["provider"], auth["uid"])
if user.nil?
Rails.logger.debug "User is not nil"
user = User.create_with_omniauth(auth)
Profile.create_with_omniauth(user.id, auth)
login_from_oauth(user.id)
Rails.logger.debug "logged in from auth"
return
elsif user.present? && user.profile.present?
Rails.logger.debug "User is present with a profile"
user.update_from_omniauth(auth)
session[:user_id] = user.id
redirect_to problems_path, :notice => "Welcome back!"
elsif user.present? && user.profile.nil?
redirect_to root_path, :notice => "WHAT"
end
end
private
def login_from_oauth(user_id)
session[:user_id] = user_id
redirect_to welcome_path
end
end
## Instruction:
Remove loggers and add a notice to empty profile
## Code After:
class OauthController < ApplicationController
def create
auth = request.env["omniauth.auth"]
if auth.nil? || auth.blank?
redirect_to root_path, :notice => "We can't access Twitter at this time. Please try again."
end
user = User.find_by_provider_and_uid(auth["provider"], auth["uid"])
if user.nil?
user = User.create_with_omniauth(auth)
Profile.create_with_omniauth(user.id, auth)
login_from_oauth(user.id)
return
elsif user.present? && user.profile.present?
user.update_from_omniauth(auth)
session[:user_id] = user.id
redirect_to problems_path, :notice => "Welcome back!"
elsif user.present? && user.profile.nil?
redirect_to root_path, :notice => "Something went wrong. We're working on a fix."
end
end
private
def login_from_oauth(user_id)
session[:user_id] = user_id
redirect_to welcome_path
end
end |
910d9dba82fae24b7fd2b3167eddf69bb4e4ef71 | config/database.yml | config/database.yml | default: &default
host: <%= Rails.application.secrets[:database][:host] %>
adapter: postgresql
encoding: UTF8
database: <%= Rails.application.secrets[:database][:name] %>
pool: 10
reaping_frequency: 30
username: <%= Rails.application.secrets[:database][:username] %>
password: <%= Rails.application.secrets[:database][:password] %>
development: *default
test: *default
production: *default
database: <%= ENV['DATABASE_URL'] %>
| default: &default
host: <%= Rails.application.secrets[:database][:host] %>
adapter: postgresql
encoding: UTF8
database: <%= Rails.application.secrets[:database][:name] %>
pool: 10
reaping_frequency: 30
development: *default
username: <%= Rails.application.secrets[:database][:username] %>
password: <%= Rails.application.secrets[:database][:password] %>
test: *default
username: <%= Rails.application.secrets[:database][:username] %>
password: <%= Rails.application.secrets[:database][:password] %>
production: *default
database: <%= ENV['DATABASE_URL'] %>
| Move username and password to avoid production conflict | Move username and password to avoid production conflict
| YAML | mit | ajsharma/monologue,ajsharma/monologue,ajsharma/monologue,ajsharma/monologue | yaml | ## Code Before:
default: &default
host: <%= Rails.application.secrets[:database][:host] %>
adapter: postgresql
encoding: UTF8
database: <%= Rails.application.secrets[:database][:name] %>
pool: 10
reaping_frequency: 30
username: <%= Rails.application.secrets[:database][:username] %>
password: <%= Rails.application.secrets[:database][:password] %>
development: *default
test: *default
production: *default
database: <%= ENV['DATABASE_URL'] %>
## Instruction:
Move username and password to avoid production conflict
## Code After:
default: &default
host: <%= Rails.application.secrets[:database][:host] %>
adapter: postgresql
encoding: UTF8
database: <%= Rails.application.secrets[:database][:name] %>
pool: 10
reaping_frequency: 30
development: *default
username: <%= Rails.application.secrets[:database][:username] %>
password: <%= Rails.application.secrets[:database][:password] %>
test: *default
username: <%= Rails.application.secrets[:database][:username] %>
password: <%= Rails.application.secrets[:database][:password] %>
production: *default
database: <%= ENV['DATABASE_URL'] %>
|
169be4228e077edac91ab61000e424b5e1d45a9b | demos/glyph_paint/side_pane.cpp | demos/glyph_paint/side_pane.cpp |
using namespace cppurses;
namespace demos {
namespace glyph_paint {
Side_pane::Side_pane() {
this->width_policy.fixed(16);
space1.wallpaper = L'─';
space2.wallpaper = L'─';
glyph_select.height_policy.preferred(6);
color_select_stack.height_policy.fixed(3);
for (auto& child : color_select_stack.top_row.children.get()) {
child->brush.set_background(Color::Light_gray);
child->brush.set_foreground(Color::Black);
}
show_glyph.height_policy.fixed(1);
show_glyph.set_alignment(Alignment::Center);
}
} // namespace glyph_paint
} // namespace demos
|
using namespace cppurses;
namespace demos {
namespace glyph_paint {
Side_pane::Side_pane() {
this->width_policy.fixed(16);
space1.wallpaper = L'─';
space2.wallpaper = L'─';
glyph_select.height_policy.preferred(6);
color_select_stack.height_policy.fixed(3);
show_glyph.height_policy.fixed(1);
show_glyph.set_alignment(Alignment::Center);
}
} // namespace glyph_paint
} // namespace demos
| Remove duplicate brush color set in glyph paint cycle box. | Remove duplicate brush color set in glyph paint cycle box.
| C++ | mit | a-n-t-h-o-n-y/CPPurses | c++ | ## Code Before:
using namespace cppurses;
namespace demos {
namespace glyph_paint {
Side_pane::Side_pane() {
this->width_policy.fixed(16);
space1.wallpaper = L'─';
space2.wallpaper = L'─';
glyph_select.height_policy.preferred(6);
color_select_stack.height_policy.fixed(3);
for (auto& child : color_select_stack.top_row.children.get()) {
child->brush.set_background(Color::Light_gray);
child->brush.set_foreground(Color::Black);
}
show_glyph.height_policy.fixed(1);
show_glyph.set_alignment(Alignment::Center);
}
} // namespace glyph_paint
} // namespace demos
## Instruction:
Remove duplicate brush color set in glyph paint cycle box.
## Code After:
using namespace cppurses;
namespace demos {
namespace glyph_paint {
Side_pane::Side_pane() {
this->width_policy.fixed(16);
space1.wallpaper = L'─';
space2.wallpaper = L'─';
glyph_select.height_policy.preferred(6);
color_select_stack.height_policy.fixed(3);
show_glyph.height_policy.fixed(1);
show_glyph.set_alignment(Alignment::Center);
}
} // namespace glyph_paint
} // namespace demos
|
8ca3a0ca80293baa3af205db69a8eeee8dd736be | test/Driver/cuda-no-sanitizers.cu | test/Driver/cuda-no-sanitizers.cu | // Check that -fsanitize=foo doesn't get passed down to device-side
// compilation.
//
// REQUIRES: clang-driver
//
// RUN: %clang -### -target x86_64-linux-gnu -c --cuda-gpu-arch=sm_20 -fsanitize=address %s 2>&1 | \
// RUN: FileCheck %s
// CHECK-NOT: error:
// CHECK-DAG: "-fcuda-is-device"
// CHECK-NOT: "-fsanitize=address"
// CHECK-DAG: "-triple" "x86_64--linux-gnu"
// CHECK: "-fsanitize=address"
| // Check that -fsanitize=foo doesn't get passed down to device-side
// compilation.
//
// REQUIRES: clang-driver
//
// RUN: %clang -### -target x86_64-linux-gnu -c --cuda-gpu-arch=sm_20 -fsanitize=address %s 2>&1 | \
// RUN: FileCheck %s
// CHECK-NOT: error: unsupported option '-fsanitize=address'
// CHECK-DAG: "-fcuda-is-device"
// CHECK-NOT: "-fsanitize=address"
// CHECK-DAG: "-triple" "x86_64--linux-gnu"
// CHECK: "-fsanitize=address"
| Fix faulty test from rL288448 | [CUDA] Fix faulty test from rL288448
Summary:
The test introduced by rL288448 is currently failing because
unimportant but unexpected errors appear as output from a test compile
line. This patch looks for a more specific error message, in order to
avoid false positives.
Reviewers: jlebar
Subscribers: cfe-commits
Differential Revision: https://reviews.llvm.org/D27328
Switch to more specific error
git-svn-id: ffe668792ed300d6c2daa1f6eba2e0aa28d7ec6c@288453 91177308-0d34-0410-b5e6-96231b3b80d8
| Cuda | apache-2.0 | apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,llvm-mirror/clang,apple/swift-clang,apple/swift-clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,llvm-mirror/clang,llvm-mirror/clang,llvm-mirror/clang,apple/swift-clang,apple/swift-clang | cuda | ## Code Before:
// Check that -fsanitize=foo doesn't get passed down to device-side
// compilation.
//
// REQUIRES: clang-driver
//
// RUN: %clang -### -target x86_64-linux-gnu -c --cuda-gpu-arch=sm_20 -fsanitize=address %s 2>&1 | \
// RUN: FileCheck %s
// CHECK-NOT: error:
// CHECK-DAG: "-fcuda-is-device"
// CHECK-NOT: "-fsanitize=address"
// CHECK-DAG: "-triple" "x86_64--linux-gnu"
// CHECK: "-fsanitize=address"
## Instruction:
[CUDA] Fix faulty test from rL288448
Summary:
The test introduced by rL288448 is currently failing because
unimportant but unexpected errors appear as output from a test compile
line. This patch looks for a more specific error message, in order to
avoid false positives.
Reviewers: jlebar
Subscribers: cfe-commits
Differential Revision: https://reviews.llvm.org/D27328
Switch to more specific error
git-svn-id: ffe668792ed300d6c2daa1f6eba2e0aa28d7ec6c@288453 91177308-0d34-0410-b5e6-96231b3b80d8
## Code After:
// Check that -fsanitize=foo doesn't get passed down to device-side
// compilation.
//
// REQUIRES: clang-driver
//
// RUN: %clang -### -target x86_64-linux-gnu -c --cuda-gpu-arch=sm_20 -fsanitize=address %s 2>&1 | \
// RUN: FileCheck %s
// CHECK-NOT: error: unsupported option '-fsanitize=address'
// CHECK-DAG: "-fcuda-is-device"
// CHECK-NOT: "-fsanitize=address"
// CHECK-DAG: "-triple" "x86_64--linux-gnu"
// CHECK: "-fsanitize=address"
|
879802a394c17774c97d0c5583cc823e4e19a723 | app/js/arethusa.core/language_settings.js | app/js/arethusa.core/language_settings.js | "use strict";
angular.module('arethusa.core').service('languageSettings', [
'documentStore',
function(documentStore) {
var self = this;
this.languageSpecifics = {
'ara' : {
name: 'Arabic',
lang: 'ar',
leftToRight: false,
font: 'Amiri'
},
'grc' : {
name: 'Greek',
lang: 'gr',
leftToRight: true
}
};
this.langNames = arethusaUtil.inject({}, self.languageSpecifics, function(memo, code, obj) {
memo[obj.lang] = obj.name;
});
this.getFor = function(documentName) {
var document = documentStore.store[documentName];
if (document === undefined) {
return undefined;
}
var lang = document.json.treebank["_xml:lang"];
if (lang in this.languageSpecifics) {
return this.languageSpecifics[lang];
}
return undefined;
};
}
]);
| "use strict";
angular.module('arethusa.core').service('languageSettings', [
'documentStore',
function(documentStore) {
var self = this;
this.languageSpecifics = {
'ara' : {
name: 'Arabic',
lang: 'ar',
leftToRight: false,
font: 'Amiri'
},
'grc' : {
name: 'Greek',
lang: 'gr',
leftToRight: true
},
'heb' : {
name: 'Hebrew',
lang: 'he',
leftToRight: false
}
};
this.langNames = arethusaUtil.inject({}, self.languageSpecifics, function(memo, code, obj) {
memo[obj.lang] = obj.name;
});
var settings = {};
this.setFor = function(documentName, lang) {
settings[documentName] = self.languageSpecifics[lang];
};
this.getFor = function(documentName) {
var cached = settings[documentName];
if (cached) {
return cached;
} else {
var document = documentStore.store[documentName];
if (document === undefined) {
return undefined;
}
var lang = document.json.treebank["_xml:lang"];
if (lang in self.languageSpecifics) {
var langObj = self.languageSpecifics[lang];
self.setFor('treebank', langObj);
return langObj;
}
return undefined;
}
};
}
]);
| Refactor languageSettings and add Hebrew | Refactor languageSettings and add Hebrew
Allow to set a language setting specifically.
| JavaScript | mit | fbaumgardt/arethusa,Masoumeh/arethusa,fbaumgardt/arethusa,alpheios-project/arethusa,Masoumeh/arethusa,latin-language-toolkit/arethusa,PonteIneptique/arethusa,fbaumgardt/arethusa,alpheios-project/arethusa,latin-language-toolkit/arethusa,alpheios-project/arethusa,PonteIneptique/arethusa | javascript | ## Code Before:
"use strict";
angular.module('arethusa.core').service('languageSettings', [
'documentStore',
function(documentStore) {
var self = this;
this.languageSpecifics = {
'ara' : {
name: 'Arabic',
lang: 'ar',
leftToRight: false,
font: 'Amiri'
},
'grc' : {
name: 'Greek',
lang: 'gr',
leftToRight: true
}
};
this.langNames = arethusaUtil.inject({}, self.languageSpecifics, function(memo, code, obj) {
memo[obj.lang] = obj.name;
});
this.getFor = function(documentName) {
var document = documentStore.store[documentName];
if (document === undefined) {
return undefined;
}
var lang = document.json.treebank["_xml:lang"];
if (lang in this.languageSpecifics) {
return this.languageSpecifics[lang];
}
return undefined;
};
}
]);
## Instruction:
Refactor languageSettings and add Hebrew
Allow to set a language setting specifically.
## Code After:
"use strict";
angular.module('arethusa.core').service('languageSettings', [
'documentStore',
function(documentStore) {
var self = this;
this.languageSpecifics = {
'ara' : {
name: 'Arabic',
lang: 'ar',
leftToRight: false,
font: 'Amiri'
},
'grc' : {
name: 'Greek',
lang: 'gr',
leftToRight: true
},
'heb' : {
name: 'Hebrew',
lang: 'he',
leftToRight: false
}
};
this.langNames = arethusaUtil.inject({}, self.languageSpecifics, function(memo, code, obj) {
memo[obj.lang] = obj.name;
});
var settings = {};
this.setFor = function(documentName, lang) {
settings[documentName] = self.languageSpecifics[lang];
};
this.getFor = function(documentName) {
var cached = settings[documentName];
if (cached) {
return cached;
} else {
var document = documentStore.store[documentName];
if (document === undefined) {
return undefined;
}
var lang = document.json.treebank["_xml:lang"];
if (lang in self.languageSpecifics) {
var langObj = self.languageSpecifics[lang];
self.setFor('treebank', langObj);
return langObj;
}
return undefined;
}
};
}
]);
|
6ac80051abae652d3f07fbf2738e5920d24de3a7 | metadata/com.simplemobiletools.applauncher.txt | metadata/com.simplemobiletools.applauncher.txt | Categories:System
License:Apache2
Web Site:
Source Code:https://github.com/SimpleMobileTools/Simple-App-Launcher
Issue Tracker:https://github.com/SimpleMobileTools/Simple-App-Launcher/issues
Changelog:https://github.com/SimpleMobileTools/Simple-App-Launcher/blob/HEAD/CHANGELOG.md
Auto Name:Simple App Launcher
Summary:A simple holder for your favourite app launchers
Description:
You can easily rename or delete any app launcher after long pressing an item.
Contains no unnecessary permissions and provides a dark theme, too.
.
Repo Type:git
Repo:https://github.com/SimpleMobileTools/Simple-App-Launcher
Build:1.0,1
commit=1.0
subdir=app
gradle=yes
Build:1.3,3
commit=1.3
subdir=app
gradle=yes
Auto Update Mode:Version %v
Update Check Mode:Tags
Current Version:1.3
Current Version Code:3
| Categories:System
License:Apache2
Web Site:
Source Code:https://github.com/SimpleMobileTools/Simple-App-Launcher
Issue Tracker:https://github.com/SimpleMobileTools/Simple-App-Launcher/issues
Changelog:https://github.com/SimpleMobileTools/Simple-App-Launcher/blob/HEAD/CHANGELOG.md
Auto Name:Simple App Launcher
Summary:A simple holder for your favourite app launchers
Description:
You can easily rename or delete any app launcher after long pressing an item.
Contains no unnecessary permissions and provides a dark theme, too.
.
Repo Type:git
Repo:https://github.com/SimpleMobileTools/Simple-App-Launcher
Build:1.0,1
commit=1.0
subdir=app
gradle=yes
Build:1.3,3
commit=1.3
subdir=app
gradle=yes
Build:1.4,4
commit=1.4
subdir=app
gradle=yes
Auto Update Mode:Version %v
Update Check Mode:Tags
Current Version:1.4
Current Version Code:4
| Update Simple App Launcher to 1.4 (4) | Update Simple App Launcher to 1.4 (4)
| Text | agpl-3.0 | f-droid/fdroiddata,f-droid/fdroiddata,f-droid/fdroid-data | text | ## Code Before:
Categories:System
License:Apache2
Web Site:
Source Code:https://github.com/SimpleMobileTools/Simple-App-Launcher
Issue Tracker:https://github.com/SimpleMobileTools/Simple-App-Launcher/issues
Changelog:https://github.com/SimpleMobileTools/Simple-App-Launcher/blob/HEAD/CHANGELOG.md
Auto Name:Simple App Launcher
Summary:A simple holder for your favourite app launchers
Description:
You can easily rename or delete any app launcher after long pressing an item.
Contains no unnecessary permissions and provides a dark theme, too.
.
Repo Type:git
Repo:https://github.com/SimpleMobileTools/Simple-App-Launcher
Build:1.0,1
commit=1.0
subdir=app
gradle=yes
Build:1.3,3
commit=1.3
subdir=app
gradle=yes
Auto Update Mode:Version %v
Update Check Mode:Tags
Current Version:1.3
Current Version Code:3
## Instruction:
Update Simple App Launcher to 1.4 (4)
## Code After:
Categories:System
License:Apache2
Web Site:
Source Code:https://github.com/SimpleMobileTools/Simple-App-Launcher
Issue Tracker:https://github.com/SimpleMobileTools/Simple-App-Launcher/issues
Changelog:https://github.com/SimpleMobileTools/Simple-App-Launcher/blob/HEAD/CHANGELOG.md
Auto Name:Simple App Launcher
Summary:A simple holder for your favourite app launchers
Description:
You can easily rename or delete any app launcher after long pressing an item.
Contains no unnecessary permissions and provides a dark theme, too.
.
Repo Type:git
Repo:https://github.com/SimpleMobileTools/Simple-App-Launcher
Build:1.0,1
commit=1.0
subdir=app
gradle=yes
Build:1.3,3
commit=1.3
subdir=app
gradle=yes
Build:1.4,4
commit=1.4
subdir=app
gradle=yes
Auto Update Mode:Version %v
Update Check Mode:Tags
Current Version:1.4
Current Version Code:4
|
40688413e59aaabd4a92dba4d2f402fb42fee143 | 1-multiples-of-3-and-5.py | 1-multiples-of-3-and-5.py | from itertools import chain
def threes_and_fives_gen(num=1000):
for i in range(num):
if i % 3 == 0 or i % 5 == 0:
yield i
def threes_and_fives_fun(n):
return set(chain(range(3, n+1, 3), range(5, n+1, 5)))
def solve(n):
return sum(
filter(lambda x: x%3==0 or x%5==0,
range(1, n)
)
)
if __name__ == '__main__':
print(sum(three_and_fives_gen(10000000)))
| from itertools import chain
def threes_and_fives_gen(num=1000):
for i in range(num):
if i % 3 == 0 or i % 5 == 0:
yield i
def threes_and_fives_fun(n):
return set(chain(range(3, n+1, 3), range(5, n+1, 5)))
def solve(n):
return sum(
filter(lambda x: x%3==0 or x%5==0,
range(1, n)
)
)
def solve_2(n):
return sum(
x
for x in range(1, n)
if x%3==0 or x%5==0
)
if __name__ == '__main__':
print(solve_2(1000000))
| Add gen exp solution to 1 | Add gen exp solution to 1
| Python | mit | dawran6/project-euler | python | ## Code Before:
from itertools import chain
def threes_and_fives_gen(num=1000):
for i in range(num):
if i % 3 == 0 or i % 5 == 0:
yield i
def threes_and_fives_fun(n):
return set(chain(range(3, n+1, 3), range(5, n+1, 5)))
def solve(n):
return sum(
filter(lambda x: x%3==0 or x%5==0,
range(1, n)
)
)
if __name__ == '__main__':
print(sum(three_and_fives_gen(10000000)))
## Instruction:
Add gen exp solution to 1
## Code After:
from itertools import chain
def threes_and_fives_gen(num=1000):
for i in range(num):
if i % 3 == 0 or i % 5 == 0:
yield i
def threes_and_fives_fun(n):
return set(chain(range(3, n+1, 3), range(5, n+1, 5)))
def solve(n):
return sum(
filter(lambda x: x%3==0 or x%5==0,
range(1, n)
)
)
def solve_2(n):
return sum(
x
for x in range(1, n)
if x%3==0 or x%5==0
)
if __name__ == '__main__':
print(solve_2(1000000))
|
dc01d50974784f9a2ece241dc7ef7c743db10e65 | src/javascript/binary/pages/chartapp.js | src/javascript/binary/pages/chartapp.js | (function () {
'use strict';
var isMac = /Mac/i.test(navigator.platform),
isIOS = /iPhone|iPad|iPod/i.test(navigator.userAgent),
isAndroid = /Android/i.test(navigator.userAgent),
isWindowsPhone = /Windows Phone/i.test(navigator.userAgent),
isJavaInstalled = (deployJava.getJREs().length > 0) && deployJava.versionCheck("1.5+"),
isMobile = isIOS || isAndroid || isWindowsPhone,
canBeInstalled = isJavaInstalled && !isMobile;
$('#install-java').toggle(!isJavaInstalled);
$('#download-app').toggle(canBeInstalled);
$('#install-java').on('click', function () {
deployJava.installLatestJava();
});
$('#download-app').on('click', function () {
if (isMac) {
alert('You need to change your security preferences!');
return;
}
if (isMobile) {
alert('The charting app is not available on mobile devices!');
}
});
})(); | (function () {
'use strict';
var isMac = /Mac/i.test(navigator.platform),
isIOS = /iPhone|iPad|iPod/i.test(navigator.userAgent),
isAndroid = /Android/i.test(navigator.userAgent),
isWindowsPhone = /Windows Phone/i.test(navigator.userAgent),
isJavaInstalled = (deployJava.getJREs().length > 0) && deployJava.versionCheck("1.5+"),
isMobile = isIOS || isAndroid || isWindowsPhone,
shouldBeInstalled = !isJavaInstalled && !isMobile;
$('#install-java').toggle(shouldBeInstalled);
$('#download-app').toggle(isJavaInstalled);
$('#install-java').on('click', function () {
deployJava.installLatestJava();
});
$('#download-app').on('click', function () {
if (isMac) {
alert('You need to change your security preferences!');
return;
}
if (isMobile) {
alert('The charting app is not available on mobile devices!');
}
});
})();
| Fix Java chart app detection logic | Fix Java chart app detection logic
| JavaScript | apache-2.0 | massihx/binary-static,einhverfr/binary-static,borisyankov/binary-static,animeshsaxena/binary-static,massihx/binary-static,tfoertsch/binary-static,einhverfr/binary-static,junbon/binary-static-www2,tfoertsch/binary-static,borisyankov/binary-static,massihx/binary-static,animeshsaxena/binary-static,animeshsaxena/binary-static,massihx/binary-static,junbon/binary-static-www2,borisyankov/binary-static,brodiecapel16/binary-static,brodiecapel16/binary-static,animeshsaxena/binary-static,einhverfr/binary-static,brodiecapel16/binary-static,borisyankov/binary-static,brodiecapel16/binary-static,einhverfr/binary-static | javascript | ## Code Before:
(function () {
'use strict';
var isMac = /Mac/i.test(navigator.platform),
isIOS = /iPhone|iPad|iPod/i.test(navigator.userAgent),
isAndroid = /Android/i.test(navigator.userAgent),
isWindowsPhone = /Windows Phone/i.test(navigator.userAgent),
isJavaInstalled = (deployJava.getJREs().length > 0) && deployJava.versionCheck("1.5+"),
isMobile = isIOS || isAndroid || isWindowsPhone,
canBeInstalled = isJavaInstalled && !isMobile;
$('#install-java').toggle(!isJavaInstalled);
$('#download-app').toggle(canBeInstalled);
$('#install-java').on('click', function () {
deployJava.installLatestJava();
});
$('#download-app').on('click', function () {
if (isMac) {
alert('You need to change your security preferences!');
return;
}
if (isMobile) {
alert('The charting app is not available on mobile devices!');
}
});
})();
## Instruction:
Fix Java chart app detection logic
## Code After:
(function () {
'use strict';
var isMac = /Mac/i.test(navigator.platform),
isIOS = /iPhone|iPad|iPod/i.test(navigator.userAgent),
isAndroid = /Android/i.test(navigator.userAgent),
isWindowsPhone = /Windows Phone/i.test(navigator.userAgent),
isJavaInstalled = (deployJava.getJREs().length > 0) && deployJava.versionCheck("1.5+"),
isMobile = isIOS || isAndroid || isWindowsPhone,
shouldBeInstalled = !isJavaInstalled && !isMobile;
$('#install-java').toggle(shouldBeInstalled);
$('#download-app').toggle(isJavaInstalled);
$('#install-java').on('click', function () {
deployJava.installLatestJava();
});
$('#download-app').on('click', function () {
if (isMac) {
alert('You need to change your security preferences!');
return;
}
if (isMobile) {
alert('The charting app is not available on mobile devices!');
}
});
})();
|
d12e99cc2e5e7d4240bb37ffcc658253fb166596 | app/Http/Controllers/Admin/TournamentPoolController.php | app/Http/Controllers/Admin/TournamentPoolController.php | <?php
namespace App\Http\Controllers\Admin;
use App\Pool;
use Illuminate\Http\Request;
use App\Http\Controllers\Controller;
class TournamentPoolController extends Controller
{
/**
* Update the specified resource in storage.
*
* @param \Illuminate\Http\Request $request
* @param int $id
* @return \Illuminate\Http\Response
*
* @author Doran Kayoumi
*/
public function update(Request $request, $id) {
echo "coucou";
}
}
| <?php
namespace App\Http\Controllers\Admin;
use App\Pool;
use App\Contender;
use Illuminate\Http\Request;
use App\Http\Controllers\Controller;
class TournamentPoolController extends Controller
{
/**
* Update the specified resource in storage.
*
* @param \Illuminate\Http\Request $request
* @param int $tournament_id
* @param int $pool_id
* @return \Illuminate\Http\Response
*
* @author Doran Kayoumi
*/
public function update(Request $request, $tournament_id, $pool_id) {
// get pool, set it to finished and save changes
$pool = Pool::find($pool_id);
$pool->isFinished = 1;
$pool->save();
// find contender for the next pool with the current rank and current pool and set it with the team id
$contender = Contender::where('pool_from_id', $pool_id)->where('rank_in_pool', $request->rank_in_pool)->firstOrFail();
$contender->team_id = $request->team_id;
$contender->save();
}
}
| Set pool to finished and set contenders for next pools with team ids | Set pool to finished and set contenders for next pools with team ids
| PHP | mit | CPNV-ES/Joutes,CPNV-ES/Joutes,CPNV-ES/Joutes,CPNV-ES/Joutes | php | ## Code Before:
<?php
namespace App\Http\Controllers\Admin;
use App\Pool;
use Illuminate\Http\Request;
use App\Http\Controllers\Controller;
class TournamentPoolController extends Controller
{
/**
* Update the specified resource in storage.
*
* @param \Illuminate\Http\Request $request
* @param int $id
* @return \Illuminate\Http\Response
*
* @author Doran Kayoumi
*/
public function update(Request $request, $id) {
echo "coucou";
}
}
## Instruction:
Set pool to finished and set contenders for next pools with team ids
## Code After:
<?php
namespace App\Http\Controllers\Admin;
use App\Pool;
use App\Contender;
use Illuminate\Http\Request;
use App\Http\Controllers\Controller;
class TournamentPoolController extends Controller
{
/**
* Update the specified resource in storage.
*
* @param \Illuminate\Http\Request $request
* @param int $tournament_id
* @param int $pool_id
* @return \Illuminate\Http\Response
*
* @author Doran Kayoumi
*/
public function update(Request $request, $tournament_id, $pool_id) {
// get pool, set it to finished and save changes
$pool = Pool::find($pool_id);
$pool->isFinished = 1;
$pool->save();
// find contender for the next pool with the current rank and current pool and set it with the team id
$contender = Contender::where('pool_from_id', $pool_id)->where('rank_in_pool', $request->rank_in_pool)->firstOrFail();
$contender->team_id = $request->team_id;
$contender->save();
}
}
|
4e26388f18bacff792ad5a97b4fb815ee72f8cec | .travis.yml | .travis.yml | language: ruby
before_install:
- gem install bundler -v '= 1.5.1' # http://changelog.travis-ci.com/post/71633370723/mri-ruby-2-1-0-and-jruby-1-7-9-available
before_script:
- ./spec/setup/arangodb.sh
rvm:
- 1.9.3
- 2.0.0
- 2.1.0
- jruby-19mode
- jruby-head
- rbx-2.2.3
- ruby-head
env:
- VERSION=1.4.4 ARANGODB_DISABLE_AUTHENTIFICATION=true
- VERSION=1.4.4 ARANGODB_DISABLE_AUTHENTIFICATION=false
matrix:
allow_failures:
- rvm: ruby-head
- rvm: jruby-head
script: "bundle exec rake ci"
| language: ruby
before_install:
- gem install bundler -v '= 1.5.1' # http://changelog.travis-ci.com/post/71633370723/mri-ruby-2-1-0-and-jruby-1-7-9-available
before_script:
- ./spec/setup/arangodb.sh
rvm:
- 1.9.3
- 2.0.0
- 2.1.0
- jruby-19mode
- jruby-head
- rbx-2.2.3
- ruby-head
env:
- VERSION=1.4.4 ARANGODB_DISABLE_AUTHENTIFICATION=true
- VERSION=1.4.4 ARANGODB_DISABLE_AUTHENTIFICATION=false
matrix:
allow_failures:
- rvm: ruby-head
- rvm: jruby-head
- rvm: 2.1.0
script: "bundle exec rake ci"
| Allow failure on 2.1.0 for now | Allow failure on 2.1.0 for now
It segfaults since I upgraded mutant and unparser to the newest version.
A ticket has been created.
https://github.com/mbj/mutant/issues/151
| YAML | apache-2.0 | triAGENS/ashikawa-core,XescuGC/ashikawa-core,triAGENS/ashikawa-core,XescuGC/ashikawa-core | yaml | ## Code Before:
language: ruby
before_install:
- gem install bundler -v '= 1.5.1' # http://changelog.travis-ci.com/post/71633370723/mri-ruby-2-1-0-and-jruby-1-7-9-available
before_script:
- ./spec/setup/arangodb.sh
rvm:
- 1.9.3
- 2.0.0
- 2.1.0
- jruby-19mode
- jruby-head
- rbx-2.2.3
- ruby-head
env:
- VERSION=1.4.4 ARANGODB_DISABLE_AUTHENTIFICATION=true
- VERSION=1.4.4 ARANGODB_DISABLE_AUTHENTIFICATION=false
matrix:
allow_failures:
- rvm: ruby-head
- rvm: jruby-head
script: "bundle exec rake ci"
## Instruction:
Allow failure on 2.1.0 for now
It segfaults since I upgraded mutant and unparser to the newest version.
A ticket has been created.
https://github.com/mbj/mutant/issues/151
## Code After:
language: ruby
before_install:
- gem install bundler -v '= 1.5.1' # http://changelog.travis-ci.com/post/71633370723/mri-ruby-2-1-0-and-jruby-1-7-9-available
before_script:
- ./spec/setup/arangodb.sh
rvm:
- 1.9.3
- 2.0.0
- 2.1.0
- jruby-19mode
- jruby-head
- rbx-2.2.3
- ruby-head
env:
- VERSION=1.4.4 ARANGODB_DISABLE_AUTHENTIFICATION=true
- VERSION=1.4.4 ARANGODB_DISABLE_AUTHENTIFICATION=false
matrix:
allow_failures:
- rvm: ruby-head
- rvm: jruby-head
- rvm: 2.1.0
script: "bundle exec rake ci"
|
2302f2eb9f231cfa7cf2eb8554d64e26e8f40c75 | README.md | README.md | Reddit Bot for converting Rick & Morty Schmeckles to USD
| Reddit Bot for converting Rick & Morty Schmeckles to USD
See comment history for [/u/SchmeckleBot](https://www.reddit.com/user/SchmeckleBot/)
| Add link to reddit user | Add link to reddit user | Markdown | mit | Elucidation/schmeckle_bot,Elucidation/schmeckle_bot | markdown | ## Code Before:
Reddit Bot for converting Rick & Morty Schmeckles to USD
## Instruction:
Add link to reddit user
## Code After:
Reddit Bot for converting Rick & Morty Schmeckles to USD
See comment history for [/u/SchmeckleBot](https://www.reddit.com/user/SchmeckleBot/)
|
5440fd3816a13b51cfdaf2019eee796ad801d25b | .travis.yml | .travis.yml | language: python
python:
- 2.6
- 2.7
- 3.2
- 3.3
before_install:
- easy_install -q pyzmq
install:
- python setup.py install -q
script:
- if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then iptest -w /tmp; fi
- if [[ $TRAVIS_PYTHON_VERSION == '3.'* ]]; then iptest3 -w /tmp; fi
| language: python
python:
- 2.6
- 2.7
- 3.2
- 3.3
before_install:
- easy_install -q pyzmq
- easy_install pygments
- easy_install jinja2
- easy_install markdown
- easy_install sphinx
install:
- python setup.py install -q
script:
- if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then iptest -w /tmp; fi
- if [[ $TRAVIS_PYTHON_VERSION == '3.'* ]]; then iptest3 -w /tmp; fi
| Add nbconvert dependencies to TravisCI config | Add nbconvert dependencies to TravisCI config
| YAML | bsd-3-clause | ipython/ipython,ipython/ipython | yaml | ## Code Before:
language: python
python:
- 2.6
- 2.7
- 3.2
- 3.3
before_install:
- easy_install -q pyzmq
install:
- python setup.py install -q
script:
- if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then iptest -w /tmp; fi
- if [[ $TRAVIS_PYTHON_VERSION == '3.'* ]]; then iptest3 -w /tmp; fi
## Instruction:
Add nbconvert dependencies to TravisCI config
## Code After:
language: python
python:
- 2.6
- 2.7
- 3.2
- 3.3
before_install:
- easy_install -q pyzmq
- easy_install pygments
- easy_install jinja2
- easy_install markdown
- easy_install sphinx
install:
- python setup.py install -q
script:
- if [[ $TRAVIS_PYTHON_VERSION == '2.'* ]]; then iptest -w /tmp; fi
- if [[ $TRAVIS_PYTHON_VERSION == '3.'* ]]; then iptest3 -w /tmp; fi
|
7fe252be130d270d7421da2c7595a09159515cf8 | core/WebItem.qml | core/WebItem.qml | Rectangle {
signal clicked;
property bool clickable: true;
property bool hoverable: true;
color: "transparent";
property bool hover;
property string cursor;
onCursorChanged: {
this.element.css('cursor', value);
}
onClickableChanged: {
if (value){
this.element.click(this.clicked.bind(this))
}
else {
this.element.unbind('click')
}
}
onHoverableChanged: {
var self = this
if (value){
this.element.hover(function() { self.hover = true }, function() { self.hover = false })
}
else {
this.element.unbind('mouseenter mouseleave')
}
}
onCompleted: {
var self = this
if (this.clickable)
this.element.click(this.clicked.bind(this))
if (this.hoverable)
this.element.hover(function() { self.hover = true }, function() { self.hover = false })
}
} | Rectangle {
signal clicked;
property bool hover;
property bool clickable: true;
property bool hoverable: true;
property string cursor;
color: "transparent";
onCursorChanged: { this.element.css('cursor', value) }
onClickableChanged: {
if (value)
this.element.click(this.clicked.bind(this))
else
this.element.unbind('click')
}
onHoverableChanged: {
var self = this
if (value)
this.element.hover(function() { self.hover = true }, function() { self.hover = false })
else
this.element.unbind('mouseenter mouseleave')
}
onCompleted: {
var self = this
if (this.clickable)
this.element.click(this.clicked.bind(this))
if (this.hoverable)
this.element.hover(function() { self.hover = true }, function() { self.hover = false })
this.cursor = "pointer"
}
}
| Set 'pointer' cursor as default for webItem + fix format. | Set 'pointer' cursor as default for webItem + fix format.
| QML | mit | pureqml/qmlcore,pureqml/qmlcore,pureqml/qmlcore | qml | ## Code Before:
Rectangle {
signal clicked;
property bool clickable: true;
property bool hoverable: true;
color: "transparent";
property bool hover;
property string cursor;
onCursorChanged: {
this.element.css('cursor', value);
}
onClickableChanged: {
if (value){
this.element.click(this.clicked.bind(this))
}
else {
this.element.unbind('click')
}
}
onHoverableChanged: {
var self = this
if (value){
this.element.hover(function() { self.hover = true }, function() { self.hover = false })
}
else {
this.element.unbind('mouseenter mouseleave')
}
}
onCompleted: {
var self = this
if (this.clickable)
this.element.click(this.clicked.bind(this))
if (this.hoverable)
this.element.hover(function() { self.hover = true }, function() { self.hover = false })
}
}
## Instruction:
Set 'pointer' cursor as default for webItem + fix format.
## Code After:
Rectangle {
signal clicked;
property bool hover;
property bool clickable: true;
property bool hoverable: true;
property string cursor;
color: "transparent";
onCursorChanged: { this.element.css('cursor', value) }
onClickableChanged: {
if (value)
this.element.click(this.clicked.bind(this))
else
this.element.unbind('click')
}
onHoverableChanged: {
var self = this
if (value)
this.element.hover(function() { self.hover = true }, function() { self.hover = false })
else
this.element.unbind('mouseenter mouseleave')
}
onCompleted: {
var self = this
if (this.clickable)
this.element.click(this.clicked.bind(this))
if (this.hoverable)
this.element.hover(function() { self.hover = true }, function() { self.hover = false })
this.cursor = "pointer"
}
}
|
2598de5b89974f5345c4025dee425787cee61eb9 | runs/templates/runs/runs_table.html | runs/templates/runs/runs_table.html | {% load static %}
<table class="table table-bordered table-hover table-striped" id="runs_table">
<thead>
<tr>
<th>Id</th>
<th>Test Name</th>
<th>User</th>
<th>Start</th>
<th>End</th>
<th>Environment</th>
<th>Status</th>
<th>Output</th>
</tr>
</thead>
<tbody>
{% for run in runs_list %}
<tr class="{% if run.status.id == 1 %}active{% elif run.status.id == 2 %}danger{% elif run.status.id == 3 %}success{% else %}warning{% endif %}">
<td>{{ run.id }}</td>
<td>{{ run.test.name }}</td>
<td>{{ run.user.name }}</td>
<td>{% if run.start %}{{ run.start }}{% else %}{% endif %}</td>
<td>{% if run.end %}{{ run.end }}{% else %}{% endif %}</td>
<td>{{ run.environment.description }}</td>
<td>{{ run.status.value|upper }}</td>
<td><a target="_blank" href="{% static 'runs/autotests/runs/' %}{{ run.id }}.txt">View</a></td>
</tr>
{% endfor %}
</tbody>
</table>
| {% load static %}
<table class="table table-bordered table-hover table-striped" id="runs_table">
<thead>
<tr>
<th>Id</th>
<th>Test Name</th>
<th>User</th>
<th>Start</th>
<th>End</th>
<th>Environment</th>
<th>Status</th>
<th>Output</th>
</tr>
</thead>
<tbody>
{% for run in runs_list %}
<tr class="{% if run.status.id == 1 %}active{% elif run.status.id == 2 %}danger{% elif run.status.id == 3 %}success{% else %}warning{% endif %}">
<td>{{ run.id }}</td>
<td>{{ run.test.name }}</td>
<td>{{ run.user.name }}</td>
<td>{% if run.start %}{{ run.start }}{% else %}{% endif %}</td>
<td>{% if run.end %}{{ run.end }}{% else %}{% endif %}</td>
<td>{{ run.environment.description }}</td>
<td>{{ run.status.value|upper }}</td>
<td><a target="_blank" href="{% static 'runs/logs' %}{{ run.id }}.txt">View</a></td>
</tr>
{% endfor %}
</tbody>
</table>
| Fix invalid path for log in the table | Fix invalid path for log in the table
| HTML | mit | jfelipefilho/test-manager,jfelipefilho/test-manager,jfelipefilho/test-manager | html | ## Code Before:
{% load static %}
<table class="table table-bordered table-hover table-striped" id="runs_table">
<thead>
<tr>
<th>Id</th>
<th>Test Name</th>
<th>User</th>
<th>Start</th>
<th>End</th>
<th>Environment</th>
<th>Status</th>
<th>Output</th>
</tr>
</thead>
<tbody>
{% for run in runs_list %}
<tr class="{% if run.status.id == 1 %}active{% elif run.status.id == 2 %}danger{% elif run.status.id == 3 %}success{% else %}warning{% endif %}">
<td>{{ run.id }}</td>
<td>{{ run.test.name }}</td>
<td>{{ run.user.name }}</td>
<td>{% if run.start %}{{ run.start }}{% else %}{% endif %}</td>
<td>{% if run.end %}{{ run.end }}{% else %}{% endif %}</td>
<td>{{ run.environment.description }}</td>
<td>{{ run.status.value|upper }}</td>
<td><a target="_blank" href="{% static 'runs/autotests/runs/' %}{{ run.id }}.txt">View</a></td>
</tr>
{% endfor %}
</tbody>
</table>
## Instruction:
Fix invalid path for log in the table
## Code After:
{% load static %}
<table class="table table-bordered table-hover table-striped" id="runs_table">
<thead>
<tr>
<th>Id</th>
<th>Test Name</th>
<th>User</th>
<th>Start</th>
<th>End</th>
<th>Environment</th>
<th>Status</th>
<th>Output</th>
</tr>
</thead>
<tbody>
{% for run in runs_list %}
<tr class="{% if run.status.id == 1 %}active{% elif run.status.id == 2 %}danger{% elif run.status.id == 3 %}success{% else %}warning{% endif %}">
<td>{{ run.id }}</td>
<td>{{ run.test.name }}</td>
<td>{{ run.user.name }}</td>
<td>{% if run.start %}{{ run.start }}{% else %}{% endif %}</td>
<td>{% if run.end %}{{ run.end }}{% else %}{% endif %}</td>
<td>{{ run.environment.description }}</td>
<td>{{ run.status.value|upper }}</td>
<td><a target="_blank" href="{% static 'runs/logs' %}{{ run.id }}.txt">View</a></td>
</tr>
{% endfor %}
</tbody>
</table>
|
d091c4da6c6eed0e5b31998a9b6507c9070aaaf0 | usr/share/applications/dropbox-audit.desktop | usr/share/applications/dropbox-audit.desktop | [Desktop Entry]
X-AppInstall-Package=gnome-terminal
X-AppInstall-Section=main
Type=Application
Exec=/usr/bin/gnome-terminal --hide-menubar --working-directory=/home/rd --full-screen --title "Rivendell Dropbox Audit" --command /usr/local/bin/run-btd-dropbox-audit --interactive --verbose
Hidden=false
X-MATE-Autostart-enabled=true
Name=Rivendell Dropbox Audit
Comment=Audit Rivendell dropboxes
Categories=Admin;Rivendell;
Icon=computer
Terminal=false
StartupNotify=false
Path=/home/rd/Desktop
GenericName=Audit RIvendell dropboxes
| [Desktop Entry]
X-AppInstall-Package=gnome-terminal
X-AppInstall-Section=main
Type=Application
Exec=/usr/bin/gnome-terminal --hide-menubar --working-directory=/home/rd --full-screen --title "Rivendell Dropbox Audit" --command /usr/local/bin/run-btd-dropbox-audit
Hidden=false
X-MATE-Autostart-enabled=false
Name=Rivendell Dropbox Audit
Comment=Audit Rivendell dropboxes
Categories=Admin;Rivendell;
Icon=computer
Terminal=false
StartupNotify=false
Path=/home/rd/Desktop
GenericName=Audit RIvendell dropboxes
| Drop the options, don't autostart. | Drop the options, don't autostart.
| desktop | bsd-2-clause | opensourceradio/ram,opensourceradio/ram,opensourceradio/ram | desktop | ## Code Before:
[Desktop Entry]
X-AppInstall-Package=gnome-terminal
X-AppInstall-Section=main
Type=Application
Exec=/usr/bin/gnome-terminal --hide-menubar --working-directory=/home/rd --full-screen --title "Rivendell Dropbox Audit" --command /usr/local/bin/run-btd-dropbox-audit --interactive --verbose
Hidden=false
X-MATE-Autostart-enabled=true
Name=Rivendell Dropbox Audit
Comment=Audit Rivendell dropboxes
Categories=Admin;Rivendell;
Icon=computer
Terminal=false
StartupNotify=false
Path=/home/rd/Desktop
GenericName=Audit RIvendell dropboxes
## Instruction:
Drop the options, don't autostart.
## Code After:
[Desktop Entry]
X-AppInstall-Package=gnome-terminal
X-AppInstall-Section=main
Type=Application
Exec=/usr/bin/gnome-terminal --hide-menubar --working-directory=/home/rd --full-screen --title "Rivendell Dropbox Audit" --command /usr/local/bin/run-btd-dropbox-audit
Hidden=false
X-MATE-Autostart-enabled=false
Name=Rivendell Dropbox Audit
Comment=Audit Rivendell dropboxes
Categories=Admin;Rivendell;
Icon=computer
Terminal=false
StartupNotify=false
Path=/home/rd/Desktop
GenericName=Audit RIvendell dropboxes
|
4a53a1de96180091517046a1d7121fd73187aa3b | locales/id/faq.properties | locales/id/faq.properties | faq_donate_link=donasi sekarang
# Obsolete string, do not remove
# Obsolete string, do not remove
# Obsolete string, do not remove
# Obsolete string, do not remove
# Obsolete string, do not remove
# Obsolete string, do not remove
# Obsolete string, do not remove
# Obsolete string, do not remove
# Obsolete string, do not remove
# Obsolete string, do not remove
# Obsolete string, do not remove
# Obsolete string, do not remove
# Obsolete string, do not remove
# Obsolete string, do not remove
# Feel free to pick the term that makes the most sense in your language in “direct debit / bank transfer”. Both are the same thing.
| faq_donate_link=donasi sekarang
# Obsolete string, do not remove
# Obsolete string, do not remove
faq_item_1_header=Bagaimana cara saya menyumbang?
# Obsolete string, do not remove
# Obsolete string, do not remove
# Obsolete string, do not remove
# Obsolete string, do not remove
# Obsolete string, do not remove
# Obsolete string, do not remove
faq_item_5_paragraph_thunderbird=Saat ini kami tidak menerima bitcoin.
# Obsolete string, do not remove
# Obsolete string, do not remove
faq_item_7_header=Bagaimana donasi saya akan digunakan?
# Obsolete string, do not remove
# Obsolete string, do not remove
# Obsolete string, do not remove
# Obsolete string, do not remove
# Feel free to pick the term that makes the most sense in your language in “direct debit / bank transfer”. Both are the same thing.
| Update Indonesian (id) localization of Fundraising | Pontoon: Update Indonesian (id) localization of Fundraising
Localization authors:
- Benny Chandra <[email protected]>
- eljuno <[email protected]>
- Ahmad Risqi N <[email protected]>
| INI | mpl-2.0 | mozilla/donate.mozilla.org | ini | ## Code Before:
faq_donate_link=donasi sekarang
# Obsolete string, do not remove
# Obsolete string, do not remove
# Obsolete string, do not remove
# Obsolete string, do not remove
# Obsolete string, do not remove
# Obsolete string, do not remove
# Obsolete string, do not remove
# Obsolete string, do not remove
# Obsolete string, do not remove
# Obsolete string, do not remove
# Obsolete string, do not remove
# Obsolete string, do not remove
# Obsolete string, do not remove
# Obsolete string, do not remove
# Feel free to pick the term that makes the most sense in your language in “direct debit / bank transfer”. Both are the same thing.
## Instruction:
Pontoon: Update Indonesian (id) localization of Fundraising
Localization authors:
- Benny Chandra <[email protected]>
- eljuno <[email protected]>
- Ahmad Risqi N <[email protected]>
## Code After:
faq_donate_link=donasi sekarang
# Obsolete string, do not remove
# Obsolete string, do not remove
faq_item_1_header=Bagaimana cara saya menyumbang?
# Obsolete string, do not remove
# Obsolete string, do not remove
# Obsolete string, do not remove
# Obsolete string, do not remove
# Obsolete string, do not remove
# Obsolete string, do not remove
faq_item_5_paragraph_thunderbird=Saat ini kami tidak menerima bitcoin.
# Obsolete string, do not remove
# Obsolete string, do not remove
faq_item_7_header=Bagaimana donasi saya akan digunakan?
# Obsolete string, do not remove
# Obsolete string, do not remove
# Obsolete string, do not remove
# Obsolete string, do not remove
# Feel free to pick the term that makes the most sense in your language in “direct debit / bank transfer”. Both are the same thing.
|
06415fc171525e0449da9739cc5ba7eba8ffa285 | spec/fake_response/DoCapture/success.txt | spec/fake_response/DoCapture/success.txt | ACK=Success&AMT=440.00&AUTHORIZATIONID=2RG78938NK8989844&BUILD=8334781&CORRELATIONID=311611c111b48&CURRENCYCODE=BRL&EXCHANGERATE=0.426354&FEEAMT=13.16&ORDERTIME=2013-11-05T17%3A34%3A56Z&PARENTTRANSACTIONID=2RG78938NK8989844&PAYMENTSTATUS=Completed&PAYMENTTYPE=instant&PENDINGREASON=None&PROTECTIONELIGIBILITY=Ineligible&PROTECTIONELIGIBILITYTYPE=None&REASONCODE=None&SETTLEAMT=181.98&TAXAMT=0.00&TIMESTAMP=2013-11-05T17%3A34%3A56Z&TRANSACTIONID=9VW4495267708531S&TRANSACTIONTYPE=expresscheckout&VERSION=88.0
| ACK=Success&AMT=440.00&AUTHORIZATIONID=2RG78938NK8989844&BUILD=8334781&CORRELATIONID=311611c111b48&CURRENCYCODE=BRL&EXCHANGERATE=0.426354&FEEAMT=13.16&ORDERTIME=2013-11-05T17%3A34%3A56Z&PARENTTRANSACTIONID=2RG78938NK8989844&PAYMENTSTATUS=Completed&PAYMENTTYPE=instant&PENDINGREASON=None&PROTECTIONELIGIBILITY=Ineligible&PROTECTIONELIGIBILITYTYPE=None&REASONCODE=None&SETTLEAMT=181.98&TIMESTAMP=2013-11-05T17%3A34%3A56Z&TRANSACTIONID=9VW4495267708531S&VERSION=88.0
| Remove TAXAMT and TRANSACTIONTYPE from faked DoCapture response | Remove TAXAMT and TRANSACTIONTYPE from faked DoCapture response
From PayPal notification:
PayPal is retiring three parameters in the Payment Information fields
of the DoCapture API (NVP/SOAP) response:
• TAXAMT
• TRANSACTIONTYPE
• RECEIPTID
The parameters will be retired in May 2019.
After this time, PayPal will not return these attributes in the
DoCApture API response.
NVP documentation:
https://developer.paypal.com/docs/classic/api/merchant/DoCapture_API_Operation_NVP/
| Text | mit | ianfleeton/paypal-express | text | ## Code Before:
ACK=Success&AMT=440.00&AUTHORIZATIONID=2RG78938NK8989844&BUILD=8334781&CORRELATIONID=311611c111b48&CURRENCYCODE=BRL&EXCHANGERATE=0.426354&FEEAMT=13.16&ORDERTIME=2013-11-05T17%3A34%3A56Z&PARENTTRANSACTIONID=2RG78938NK8989844&PAYMENTSTATUS=Completed&PAYMENTTYPE=instant&PENDINGREASON=None&PROTECTIONELIGIBILITY=Ineligible&PROTECTIONELIGIBILITYTYPE=None&REASONCODE=None&SETTLEAMT=181.98&TAXAMT=0.00&TIMESTAMP=2013-11-05T17%3A34%3A56Z&TRANSACTIONID=9VW4495267708531S&TRANSACTIONTYPE=expresscheckout&VERSION=88.0
## Instruction:
Remove TAXAMT and TRANSACTIONTYPE from faked DoCapture response
From PayPal notification:
PayPal is retiring three parameters in the Payment Information fields
of the DoCapture API (NVP/SOAP) response:
• TAXAMT
• TRANSACTIONTYPE
• RECEIPTID
The parameters will be retired in May 2019.
After this time, PayPal will not return these attributes in the
DoCApture API response.
NVP documentation:
https://developer.paypal.com/docs/classic/api/merchant/DoCapture_API_Operation_NVP/
## Code After:
ACK=Success&AMT=440.00&AUTHORIZATIONID=2RG78938NK8989844&BUILD=8334781&CORRELATIONID=311611c111b48&CURRENCYCODE=BRL&EXCHANGERATE=0.426354&FEEAMT=13.16&ORDERTIME=2013-11-05T17%3A34%3A56Z&PARENTTRANSACTIONID=2RG78938NK8989844&PAYMENTSTATUS=Completed&PAYMENTTYPE=instant&PENDINGREASON=None&PROTECTIONELIGIBILITY=Ineligible&PROTECTIONELIGIBILITYTYPE=None&REASONCODE=None&SETTLEAMT=181.98&TIMESTAMP=2013-11-05T17%3A34%3A56Z&TRANSACTIONID=9VW4495267708531S&VERSION=88.0
|
dad7aebc788123f05efb1cb8c09e265a17462474 | lib/tech404logs/user.rb | lib/tech404logs/user.rb | require 'thread'
module Tech404logs
class User
include DataMapper::Resource
storage_names[:default] = 'users'
property :id, String, key: true
property :name, String
property :real_name, String
property :image, String, length: 255
def self.create_or_update(user)
first_or_new(id: user.fetch('id')).tap do |record|
record.name = user.fetch('name')
record.real_name = user.fetch('profile').fetch('real_name')
record.image = user.fetch('profile').fetch('image_48')
end.save
end
def self.store(user_or_id)
case user_or_id
when Hash
create_or_update(user_or_id)
when String
first_or_create(id: user_or_id)
end
end
def pretty_name
real_name || name
end
end
end
| require 'thread'
module Tech404logs
class User
include DataMapper::Resource
storage_names[:default] = 'users'
property :id, String, key: true
property :name, String
property :real_name, String
property :image, String, length: 255
property :opted_out, Boolean
def self.create_or_update(user)
first_or_new(id: user.fetch('id')).tap do |record|
record.name = user.fetch('name')
record.real_name = user.fetch('profile').fetch('real_name')
record.image = user.fetch('profile').fetch('image_48')
end.save
end
def self.store(user_or_id)
case user_or_id
when Hash
create_or_update(user_or_id)
when String
first_or_create(id: user_or_id)
end
end
def pretty_name
real_name || name
end
end
end
| Add opted_out field to DataMapper model | Add opted_out field to DataMapper model
This shouldn't be necessary because I'm working to replace
DataMapper with sequal, but for the sake of consistency.
| Ruby | mit | zacstewart/tech404logs,zacstewart/tech404-search,zacstewart/tech404logs,zacstewart/tech404-search,zacstewart/tech404-search,zacstewart/tech404logs | ruby | ## Code Before:
require 'thread'
module Tech404logs
class User
include DataMapper::Resource
storage_names[:default] = 'users'
property :id, String, key: true
property :name, String
property :real_name, String
property :image, String, length: 255
def self.create_or_update(user)
first_or_new(id: user.fetch('id')).tap do |record|
record.name = user.fetch('name')
record.real_name = user.fetch('profile').fetch('real_name')
record.image = user.fetch('profile').fetch('image_48')
end.save
end
def self.store(user_or_id)
case user_or_id
when Hash
create_or_update(user_or_id)
when String
first_or_create(id: user_or_id)
end
end
def pretty_name
real_name || name
end
end
end
## Instruction:
Add opted_out field to DataMapper model
This shouldn't be necessary because I'm working to replace
DataMapper with sequal, but for the sake of consistency.
## Code After:
require 'thread'
module Tech404logs
class User
include DataMapper::Resource
storage_names[:default] = 'users'
property :id, String, key: true
property :name, String
property :real_name, String
property :image, String, length: 255
property :opted_out, Boolean
def self.create_or_update(user)
first_or_new(id: user.fetch('id')).tap do |record|
record.name = user.fetch('name')
record.real_name = user.fetch('profile').fetch('real_name')
record.image = user.fetch('profile').fetch('image_48')
end.save
end
def self.store(user_or_id)
case user_or_id
when Hash
create_or_update(user_or_id)
when String
first_or_create(id: user_or_id)
end
end
def pretty_name
real_name || name
end
end
end
|
fb10d9fe8783b511a3e89d81b28d134858cafdab | lib/extensions/active_record/base.rb | lib/extensions/active_record/base.rb | module Extensions::ActiveRecord::Base
def self.included(class_)
class_.class_eval do
def self.currently_valid
where do
(valid_from.nil? || valid_from <= DateTime.now) &&
(valid_to.nil? || valid_to >= DateTime.now)
end
end
end
end
def currently_valid?
(valid_from.nil? || valid_from <= DateTime.now) &&
(valid_to.nil? || valid_to >= DateTime.now)
end
end
| module Extensions::ActiveRecord::Base
def self.included(class_)
class_.class_eval do
def self.currently_valid
where do
(valid_from.nil? || valid_from <= DateTime.now) &&
(valid_to.nil? || valid_to >= DateTime.now)
end
end
end
end
# @return [Bool] True if valid_from is a future time
def not_yet_valid?
!valid_from.nil? && valid_from > DateTime.now
end
# @return [Bool] True if current time is between valid_from and valid_to
def currently_valid?
(valid_from.nil? || valid_from <= DateTime.now) &&
(valid_to.nil? || valid_to >= DateTime.now)
end
# @return [Bool] True if valid_to is a past time
def expired?
!valid_to.nil? && DateTime.now > valid_to
end
end
| Add more helper functions for bounded objects | Add more helper functions for bounded objects
| Ruby | mit | BenMQ/coursemology2,cysjonathan/coursemology2,Coursemology/coursemology2,BenMQ/coursemology2,harryggg/coursemology2,Coursemology/coursemology2,cysjonathan/coursemology2,harryggg/coursemology2,xzhflying/coursemology2,Coursemology/coursemology2,Coursemology/coursemology2,harryggg/coursemology2,cysjonathan/coursemology2,BenMQ/coursemology2,Coursemology/coursemology2,Coursemology/coursemology2,Coursemology/coursemology2,xzhflying/coursemology2,xzhflying/coursemology2 | ruby | ## Code Before:
module Extensions::ActiveRecord::Base
def self.included(class_)
class_.class_eval do
def self.currently_valid
where do
(valid_from.nil? || valid_from <= DateTime.now) &&
(valid_to.nil? || valid_to >= DateTime.now)
end
end
end
end
def currently_valid?
(valid_from.nil? || valid_from <= DateTime.now) &&
(valid_to.nil? || valid_to >= DateTime.now)
end
end
## Instruction:
Add more helper functions for bounded objects
## Code After:
module Extensions::ActiveRecord::Base
def self.included(class_)
class_.class_eval do
def self.currently_valid
where do
(valid_from.nil? || valid_from <= DateTime.now) &&
(valid_to.nil? || valid_to >= DateTime.now)
end
end
end
end
# @return [Bool] True if valid_from is a future time
def not_yet_valid?
!valid_from.nil? && valid_from > DateTime.now
end
# @return [Bool] True if current time is between valid_from and valid_to
def currently_valid?
(valid_from.nil? || valid_from <= DateTime.now) &&
(valid_to.nil? || valid_to >= DateTime.now)
end
# @return [Bool] True if valid_to is a past time
def expired?
!valid_to.nil? && DateTime.now > valid_to
end
end
|
90664d4ecb090906cc5fa095f9ba0da64bca2b6f | examples/example-1.html | examples/example-1.html | <html>
<head>
<script src="build/js/bundle.js"></script>
<link rel="stylesheet" href="less/main.css">
</head>
<body>
<h1>Example One</h1>
<section id="stage">
</section>
<script>
var stage = document.querySelector('#stage');
var options = {
el: stage,
src: 'https://s3.amazonaws.com/stompdrop-output-hd-h264/jarrod/73178793-6af0-4a78-b461-6494961fa8f8',
autoplay: false,
width: 720
};
var stompScreen = StompScreen.init(options);
</script>
</body>
</html> | <html>
<head>
<script src="build/js/bundle.js"></script>
<link rel="stylesheet" href="less/main.css">
</head>
<body>
<h1>Example One</h1>
<section id="stage">
</section>
<script>
var stage = document.querySelector('#stage');
var options = {
el: stage,
src: 'https://s3.amazonaws.com/stompdrop-output-hd-h264/jarrod/1670d546-ab44-46a5-a4cd-120681d18629',
autoplay: false,
width: 720
};
var stompScreen = StompScreen.init(options);
</script>
</body>
</html> | Use a longer live video | Use a longer live video
| HTML | mit | kahnjw/stompscreen | html | ## Code Before:
<html>
<head>
<script src="build/js/bundle.js"></script>
<link rel="stylesheet" href="less/main.css">
</head>
<body>
<h1>Example One</h1>
<section id="stage">
</section>
<script>
var stage = document.querySelector('#stage');
var options = {
el: stage,
src: 'https://s3.amazonaws.com/stompdrop-output-hd-h264/jarrod/73178793-6af0-4a78-b461-6494961fa8f8',
autoplay: false,
width: 720
};
var stompScreen = StompScreen.init(options);
</script>
</body>
</html>
## Instruction:
Use a longer live video
## Code After:
<html>
<head>
<script src="build/js/bundle.js"></script>
<link rel="stylesheet" href="less/main.css">
</head>
<body>
<h1>Example One</h1>
<section id="stage">
</section>
<script>
var stage = document.querySelector('#stage');
var options = {
el: stage,
src: 'https://s3.amazonaws.com/stompdrop-output-hd-h264/jarrod/1670d546-ab44-46a5-a4cd-120681d18629',
autoplay: false,
width: 720
};
var stompScreen = StompScreen.init(options);
</script>
</body>
</html> |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.