commit
stringlengths
40
40
old_file
stringlengths
4
237
new_file
stringlengths
4
237
old_contents
stringlengths
1
4.24k
new_contents
stringlengths
5
4.84k
subject
stringlengths
15
778
message
stringlengths
16
6.86k
lang
stringlengths
1
30
license
stringclasses
13 values
repos
stringlengths
5
116k
config
stringlengths
1
30
content
stringlengths
105
8.72k
072a6ebd408a1c701d40ff3ac7408d39ee7ea42d
lib/sunlight.rb
lib/sunlight.rb
require 'rubygems' require 'json' require 'cgi' require 'ym4r/google_maps/geocoding' require 'net/http' include Ym4r::GoogleMaps require "#{File.dirname(__FILE__)}/sunlight/base.rb" Dir["#{File.dirname(__FILE__)}/sunlight/*.rb"].each { |source_file| require source_file }
require 'json' require 'cgi' require 'ym4r/google_maps/geocoding' require 'net/http' include Ym4r::GoogleMaps require "#{File.dirname(__FILE__)}/sunlight/base.rb" Dir["#{File.dirname(__FILE__)}/sunlight/*.rb"].each { |source_file| require source_file }
Remove require 'rubygems' to be a good Ruby citizen.
Remove require 'rubygems' to be a good Ruby citizen.
Ruby
bsd-3-clause
sunlightlabs/ruby-sunlight,loandy/ruby-sunlight,NIFA/ruby-sunlight
ruby
## Code Before: require 'rubygems' require 'json' require 'cgi' require 'ym4r/google_maps/geocoding' require 'net/http' include Ym4r::GoogleMaps require "#{File.dirname(__FILE__)}/sunlight/base.rb" Dir["#{File.dirname(__FILE__)}/sunlight/*.rb"].each { |source_file| require source_file } ## Instruction: Remove require 'rubygems' to be a good Ruby citizen. ## Code After: require 'json' require 'cgi' require 'ym4r/google_maps/geocoding' require 'net/http' include Ym4r::GoogleMaps require "#{File.dirname(__FILE__)}/sunlight/base.rb" Dir["#{File.dirname(__FILE__)}/sunlight/*.rb"].each { |source_file| require source_file }
721703801654af88e8b5064d1bc65569ce1555cf
thumbnails/engines/__init__.py
thumbnails/engines/__init__.py
def get_current_engine(): return None
from thumbnails.engines.pillow import PillowEngine def get_current_engine(): return PillowEngine()
Set pillow engine as default
Set pillow engine as default
Python
mit
python-thumbnails/python-thumbnails,relekang/python-thumbnails
python
## Code Before: def get_current_engine(): return None ## Instruction: Set pillow engine as default ## Code After: from thumbnails.engines.pillow import PillowEngine def get_current_engine(): return PillowEngine()
cdfd622f4e7017ab1860e1f7420d6f26424a69f1
dashboard_app/extension.py
dashboard_app/extension.py
from lava_server.extension import LavaServerExtension class DashboardExtension(LavaServerExtension): @property def app_name(self): return "dashboard_app" @property def name(self): return "Dashboard" @property def main_view_name(self): return "dashboard_app.views.bundle_stream_list" @property def description(self): return "Validation Dashboard" @property def version(self): import versiontools import dashboard_app return versiontools.format_version(dashboard_app.__version__) def contribute_to_settings(self, settings): super(DashboardExtension, self).contribute_to_settings(settings) settings['INSTALLED_APPS'].extend([ "linaro_django_pagination", "south", ]) settings['MIDDLEWARE_CLASSES'].append( 'linaro_django_pagination.middleware.PaginationMiddleware') settings['RESTRUCTUREDTEXT_FILTER_SETTINGS'] = { "initial_header_level": 4} def contribute_to_settings_ex(self, settings_module, settings_object): settings_module['DATAVIEW_DIRS'] = settings_object._settings.get( "DATAVIEW_DIRS", []) settings_module['DATAREPORT_DIRS'] = settings_object._settings.get( "DATAREPORT_DIRS", [])
from lava_server.extension import LavaServerExtension class DashboardExtension(LavaServerExtension): @property def app_name(self): return "dashboard_app" @property def name(self): return "Dashboard" @property def main_view_name(self): return "dashboard_app.views.bundle_stream_list" @property def description(self): return "Validation Dashboard" @property def version(self): import versiontools import dashboard_app return versiontools.format_version(dashboard_app.__version__) def contribute_to_settings(self, settings_module): super(DashboardExtension, self).contribute_to_settings(settings_module) settings_module['INSTALLED_APPS'].extend([ "linaro_django_pagination", "south", ]) settings_module['MIDDLEWARE_CLASSES'].append( 'linaro_django_pagination.middleware.PaginationMiddleware') def contribute_to_settings_ex(self, settings_module, settings_object): settings_module['DATAVIEW_DIRS'] = settings_object._settings.get( "DATAVIEW_DIRS", []) settings_module['DATAREPORT_DIRS'] = settings_object._settings.get( "DATAREPORT_DIRS", []) # Enable constrained dataview database if requested if settings_object._settings.get("use_dataview_database"): # Copy everything from the default database and append _dataview to user # name. The rest is out of scope (making sure it's actually setup # properly, having permissions to login, permissions to view proper data) settings_module['DATABASES']['dataview'] = dict(settings_module['DATABASES']['default']) settings_module['DATABASES']['dataview']['USER'] += "_dataview"
Move support for dataview-specific database from lava-server
Move support for dataview-specific database from lava-server
Python
agpl-3.0
Linaro/lava-server,Linaro/lava-server,Linaro/lava-server,OSSystems/lava-server,OSSystems/lava-server,OSSystems/lava-server,Linaro/lava-server
python
## Code Before: from lava_server.extension import LavaServerExtension class DashboardExtension(LavaServerExtension): @property def app_name(self): return "dashboard_app" @property def name(self): return "Dashboard" @property def main_view_name(self): return "dashboard_app.views.bundle_stream_list" @property def description(self): return "Validation Dashboard" @property def version(self): import versiontools import dashboard_app return versiontools.format_version(dashboard_app.__version__) def contribute_to_settings(self, settings): super(DashboardExtension, self).contribute_to_settings(settings) settings['INSTALLED_APPS'].extend([ "linaro_django_pagination", "south", ]) settings['MIDDLEWARE_CLASSES'].append( 'linaro_django_pagination.middleware.PaginationMiddleware') settings['RESTRUCTUREDTEXT_FILTER_SETTINGS'] = { "initial_header_level": 4} def contribute_to_settings_ex(self, settings_module, settings_object): settings_module['DATAVIEW_DIRS'] = settings_object._settings.get( "DATAVIEW_DIRS", []) settings_module['DATAREPORT_DIRS'] = settings_object._settings.get( "DATAREPORT_DIRS", []) ## Instruction: Move support for dataview-specific database from lava-server ## Code After: from lava_server.extension import LavaServerExtension class DashboardExtension(LavaServerExtension): @property def app_name(self): return "dashboard_app" @property def name(self): return "Dashboard" @property def main_view_name(self): return "dashboard_app.views.bundle_stream_list" @property def description(self): return "Validation Dashboard" @property def version(self): import versiontools import dashboard_app return versiontools.format_version(dashboard_app.__version__) def contribute_to_settings(self, settings_module): super(DashboardExtension, self).contribute_to_settings(settings_module) settings_module['INSTALLED_APPS'].extend([ "linaro_django_pagination", "south", ]) settings_module['MIDDLEWARE_CLASSES'].append( 'linaro_django_pagination.middleware.PaginationMiddleware') def contribute_to_settings_ex(self, settings_module, settings_object): settings_module['DATAVIEW_DIRS'] = settings_object._settings.get( "DATAVIEW_DIRS", []) settings_module['DATAREPORT_DIRS'] = settings_object._settings.get( "DATAREPORT_DIRS", []) # Enable constrained dataview database if requested if settings_object._settings.get("use_dataview_database"): # Copy everything from the default database and append _dataview to user # name. The rest is out of scope (making sure it's actually setup # properly, having permissions to login, permissions to view proper data) settings_module['DATABASES']['dataview'] = dict(settings_module['DATABASES']['default']) settings_module['DATABASES']['dataview']['USER'] += "_dataview"
3922c4b0123db8d8b57ec4797514f3f131b5ca2a
app/views/books/show.html.erb
app/views/books/show.html.erb
<section class="single_book wrap"> <div class="cover"><%= book_cover_tag @book, :size => "S" %></div> <div class="title"> <h1><%= @book.title %></h1> <h2>by <%= @book.author %></h2> </div> <p><%= pluralize @book.copies.count, "copy" %></p> <ul class="copies"> <%= render :partial => "copy", :collection => @book.copies.ordered_by_availability %> </ul> <nav class="actions"> <ul> <li><%= link_to "Edit this book's details", edit_book_path(@book) %></li> <li><%= link_to "Add another copy", new_book_copy_path(@book) %></li> </ul> </nav> </section>
<section class="single_book wrap"> <div class="cover"><%= book_cover_tag @book, :size => "S" %></div> <div class="title"> <h1><%= @book.title %></h1> <h2>by <%= @book.author %></h2> </div> <p><%= pluralize @book.copies.count, "copy" %></p> <ul class="copies"> <%= render :partial => "copy", :collection => @book.copies.ordered_by_availability %> </ul> <nav class="actions"> <ul> <li><%= link_to "Edit this book's details", edit_book_path(@book) %></li> <li><%= link_to "Add another copy", new_book_copy_path(@book) %></li> <li><%= link_to "See revision history", history_book_path(@book) %></li> </ul> </nav> </section>
Add link to revision history in book actions list
Add link to revision history in book actions list
HTML+ERB
mit
jabley/anthology,jabley/anthology,JordanHatch/anthology,JordanHatch/anthology,jabley/anthology
html+erb
## Code Before: <section class="single_book wrap"> <div class="cover"><%= book_cover_tag @book, :size => "S" %></div> <div class="title"> <h1><%= @book.title %></h1> <h2>by <%= @book.author %></h2> </div> <p><%= pluralize @book.copies.count, "copy" %></p> <ul class="copies"> <%= render :partial => "copy", :collection => @book.copies.ordered_by_availability %> </ul> <nav class="actions"> <ul> <li><%= link_to "Edit this book's details", edit_book_path(@book) %></li> <li><%= link_to "Add another copy", new_book_copy_path(@book) %></li> </ul> </nav> </section> ## Instruction: Add link to revision history in book actions list ## Code After: <section class="single_book wrap"> <div class="cover"><%= book_cover_tag @book, :size => "S" %></div> <div class="title"> <h1><%= @book.title %></h1> <h2>by <%= @book.author %></h2> </div> <p><%= pluralize @book.copies.count, "copy" %></p> <ul class="copies"> <%= render :partial => "copy", :collection => @book.copies.ordered_by_availability %> </ul> <nav class="actions"> <ul> <li><%= link_to "Edit this book's details", edit_book_path(@book) %></li> <li><%= link_to "Add another copy", new_book_copy_path(@book) %></li> <li><%= link_to "See revision history", history_book_path(@book) %></li> </ul> </nav> </section>
3c460877f9ad60d48f32259606b4523c4d56c066
src/js/main.js
src/js/main.js
/* * This is the main PebbleJS file. You do not need to modify this file unless * you want to change the way PebbleJS starts, the script it runs or the libraries * it loads. * * By default, this will run app.js */ require('safe'); Pebble.addEventListener('ready', function(e) { // Initialize the Pebble protocol require('ui/simply-pebble.js').init(); // Backwards compatibility: place moment.js in global scope // This will be removed in a future update window.moment = require('vendor/moment'); // Load local file require('app.js'); });
/* * This is the main PebbleJS file. You do not need to modify this file unless * you want to change the way PebbleJS starts, the script it runs or the libraries * it loads. * * By default, this will run app.js */ var safe = require('safe'); var util2 = require('util2'); Pebble.addEventListener('ready', function(e) { // Initialize the Pebble protocol require('ui/simply-pebble.js').init(); // Backwards compatibility: place moment.js in global scope // This will be removed in a future update var moment = require('vendor/moment'); var momentPasser = function(methodName) { return function() { if (safe.warnGlobalMoment !== false) { safe.warn("You've accessed moment globally. Pleae use `var moment = require('moment')` instead.\n\t" + 'moment will not be automatically loaded as a global in future versions.', 5); safe.warnGlobalMoment = false; } return (methodName ? moment[methodName] : moment).apply(this, arguments); }; }; var globalMoment = momentPasser(); util2.copy(moment.prototype, globalMoment.prototype); for (var k in moment) { var v = moment[k]; globalMoment[k] = typeof v === 'function' ? momentPasser(k) : v; } window.moment = globalMoment; // Load local file require('app.js'); });
Add moment global usage warning
Add moment global usage warning
JavaScript
mit
effata/pebblejs,ento/pebblejs,jsfi/pebblejs,ishepard/TransmissionTorrent,fletchto99/pebblejs,demophoon/Trimet-Tracker,effata/pebblejs,gwijsman/OpenRemotePebble,fletchto99/pebblejs,effata/pebblejs,daduke/LMSController,pebble/pebblejs,ento/pebblejs,jiangege/pebblejs-project,ishepard/TransmissionTorrent,daduke/LMSController,pebble/pebblejs,jsfi/pebblejs,jiangege/pebblejs-project,youtux/PebbleShows,carlo-colombo/dublin-bus-pebble,sunshineyyy/CatchOneBus,frizzr/CatchOneBus,jiangege/pebblejs-project,demophoon/Trimet-Tracker,youtux/PebbleShows,demophoon/Trimet-Tracker,jsfi/pebblejs,carlo-colombo/dublin-bus-pebble,youtux/pebblejs,bkbilly/Tvheadend-EPG,youtux/pebblejs,sunshineyyy/CatchOneBus,jsfi/pebblejs,jiangege/pebblejs-project,ishepard/TransmissionTorrent,youtux/pebblejs,pebble/pebblejs,jsfi/pebblejs,jiangege/pebblejs-project,daduke/LMSController,sunshineyyy/CatchOneBus,effata/pebblejs,youtux/pebblejs,frizzr/CatchOneBus,frizzr/CatchOneBus,bkbilly/Tvheadend-EPG,demophoon/Trimet-Tracker,demophoon/Trimet-Tracker,effata/pebblejs,bkbilly/Tvheadend-EPG,frizzr/CatchOneBus,pebble/pebblejs,youtux/PebbleShows,daduke/LMSController,carlo-colombo/dublin-bus-pebble,fletchto99/pebblejs,gwijsman/OpenRemotePebble,sunshineyyy/CatchOneBus,pebble/pebblejs,ishepard/TransmissionTorrent,daduke/LMSController,ishepard/TransmissionTorrent,youtux/pebblejs,fletchto99/pebblejs,carlo-colombo/dublin-bus-pebble,ento/pebblejs,gwijsman/OpenRemotePebble,youtux/PebbleShows,fletchto99/pebblejs,bkbilly/Tvheadend-EPG,ento/pebblejs,carlo-colombo/dublin-bus-pebble,ento/pebblejs,bkbilly/Tvheadend-EPG,gwijsman/OpenRemotePebble,sunshineyyy/CatchOneBus,gwijsman/OpenRemotePebble
javascript
## Code Before: /* * This is the main PebbleJS file. You do not need to modify this file unless * you want to change the way PebbleJS starts, the script it runs or the libraries * it loads. * * By default, this will run app.js */ require('safe'); Pebble.addEventListener('ready', function(e) { // Initialize the Pebble protocol require('ui/simply-pebble.js').init(); // Backwards compatibility: place moment.js in global scope // This will be removed in a future update window.moment = require('vendor/moment'); // Load local file require('app.js'); }); ## Instruction: Add moment global usage warning ## Code After: /* * This is the main PebbleJS file. You do not need to modify this file unless * you want to change the way PebbleJS starts, the script it runs or the libraries * it loads. * * By default, this will run app.js */ var safe = require('safe'); var util2 = require('util2'); Pebble.addEventListener('ready', function(e) { // Initialize the Pebble protocol require('ui/simply-pebble.js').init(); // Backwards compatibility: place moment.js in global scope // This will be removed in a future update var moment = require('vendor/moment'); var momentPasser = function(methodName) { return function() { if (safe.warnGlobalMoment !== false) { safe.warn("You've accessed moment globally. Pleae use `var moment = require('moment')` instead.\n\t" + 'moment will not be automatically loaded as a global in future versions.', 5); safe.warnGlobalMoment = false; } return (methodName ? moment[methodName] : moment).apply(this, arguments); }; }; var globalMoment = momentPasser(); util2.copy(moment.prototype, globalMoment.prototype); for (var k in moment) { var v = moment[k]; globalMoment[k] = typeof v === 'function' ? momentPasser(k) : v; } window.moment = globalMoment; // Load local file require('app.js'); });
ab3b0d54f6f660719649237cedcc9c77789efad9
scripts/sonarqube-scan.cmd
scripts/sonarqube-scan.cmd
mvn clean jacoco:prepare-agent verify jacoco:report -Dmaven.javadoc.failOnError=false -s dev\settings-sonarqube.xml %*
mvn clean jacoco:prepare-agent verify jacoco:report sonar:sonar -Dmaven.javadoc.failOnError=false -s dev\settings-sonarqube.xml %*
Add missing `sonar:sonar` for windows script
Add missing `sonar:sonar` for windows script
Batchfile
apache-2.0
excella-core/excella-core
batchfile
## Code Before: mvn clean jacoco:prepare-agent verify jacoco:report -Dmaven.javadoc.failOnError=false -s dev\settings-sonarqube.xml %* ## Instruction: Add missing `sonar:sonar` for windows script ## Code After: mvn clean jacoco:prepare-agent verify jacoco:report sonar:sonar -Dmaven.javadoc.failOnError=false -s dev\settings-sonarqube.xml %*
b96a625dc55f45057586dd6576675bf315955f97
README.md
README.md
by [email protected] ### What to talk about with the Gang * **domain name** move asap to jobs.makesense.org * set a 'from:' email * **BM** yes/no? ideas? 'reward based' pay as you want * **MKS app redirect** UX/UI feedback * **branding** * graphic design, improve logo, make something recognizable * **features** * **search** * **categorization** biz, tech, design * **share to a friend** fb, twitter, email * **follow** rss, dedicated twitter account, newsletter? * **$€gm€ntation** paid, unpaid? freelance tag? * **highlight** it works * **tracking/analytics** what to measure? what helps the recruiter/applicant to get the job done? what's usefull for the gangsters? * * **random** * **modjo** keep it simple, lightweight, efficient * **tone** auto-moderation * **code license** open source? * profile jobs within a framework of work skills, ex. [Future work skills 2020](http://www.iftf.org/our-work/global-landscape/work/future-work-skills-2020/)
Prepare meeting with the gang
Prepare meeting with the gang
Markdown
agpl-3.0
Em-AK/makesense_jobs,Em-AK/makesense_jobs,Em-AK/makesense_jobs
markdown
## Code Before: _ by [email protected] _ ## Instruction: Prepare meeting with the gang ## Code After: by [email protected] ### What to talk about with the Gang * **domain name** move asap to jobs.makesense.org * set a 'from:' email * **BM** yes/no? ideas? 'reward based' pay as you want * **MKS app redirect** UX/UI feedback * **branding** * graphic design, improve logo, make something recognizable * **features** * **search** * **categorization** biz, tech, design * **share to a friend** fb, twitter, email * **follow** rss, dedicated twitter account, newsletter? * **$€gm€ntation** paid, unpaid? freelance tag? * **highlight** it works * **tracking/analytics** what to measure? what helps the recruiter/applicant to get the job done? what's usefull for the gangsters? * * **random** * **modjo** keep it simple, lightweight, efficient * **tone** auto-moderation * **code license** open source? * profile jobs within a framework of work skills, ex. [Future work skills 2020](http://www.iftf.org/our-work/global-landscape/work/future-work-skills-2020/)
3960eb9165af37042539ac16c08e2e45beeb4de7
support/mkdist-osx.sh
support/mkdist-osx.sh
test -f tundra.lua || exit 1 find examples -name tundra-output -exec rm -rf {} \; find examples -name .tundra-\* -exec rm -f {} \; rm -rf build dist mkdir build cd build cmake .. make cd .. TUNDRA_HOME=$PWD build/tundra standalone release macosx-clang mkdir dist mkdir dist/doc cp -r README.md COPYING examples dist cp doc/manual.asciidoc dist/doc cp tundra-output/macosx-clang-release-standalone/tundra dist git log -1 >> dist/SNAPSHOT_REVISION
test -f tundra.lua || exit 1 find examples -name tundra-output -exec rm -rf {} \; find examples -name .tundra-\* -exec rm -f {} \; rm -rf build dist mkdir build cd build cmake .. make cd .. TUNDRA_HOME=$PWD build/tundra standalone release macosx-clang mkdir dist mkdir dist/doc cp -r README.md COPYING examples dist cp doc/manual.asciidoc dist/doc cp tundra-output/macosx-clang-release-standalone/tundra dist git log -1 >> dist/SNAPSHOT_REVISION find dist -name \*.swp -exec rm {} \; find dist -name .DS_Store -exec rm {} \;
Remove vim swap files before packing.
Remove vim swap files before packing.
Shell
mit
bmharper/tundra,deplinenoise/tundra,bmharper/tundra,bmharper/tundra,deplinenoise/tundra,deplinenoise/tundra,bmharper/tundra
shell
## Code Before: test -f tundra.lua || exit 1 find examples -name tundra-output -exec rm -rf {} \; find examples -name .tundra-\* -exec rm -f {} \; rm -rf build dist mkdir build cd build cmake .. make cd .. TUNDRA_HOME=$PWD build/tundra standalone release macosx-clang mkdir dist mkdir dist/doc cp -r README.md COPYING examples dist cp doc/manual.asciidoc dist/doc cp tundra-output/macosx-clang-release-standalone/tundra dist git log -1 >> dist/SNAPSHOT_REVISION ## Instruction: Remove vim swap files before packing. ## Code After: test -f tundra.lua || exit 1 find examples -name tundra-output -exec rm -rf {} \; find examples -name .tundra-\* -exec rm -f {} \; rm -rf build dist mkdir build cd build cmake .. make cd .. TUNDRA_HOME=$PWD build/tundra standalone release macosx-clang mkdir dist mkdir dist/doc cp -r README.md COPYING examples dist cp doc/manual.asciidoc dist/doc cp tundra-output/macosx-clang-release-standalone/tundra dist git log -1 >> dist/SNAPSHOT_REVISION find dist -name \*.swp -exec rm {} \; find dist -name .DS_Store -exec rm {} \;
9ce1af4c9c4858b61e084a0268ba07e002e2ab4c
test/fixtures/reverse/mysql/build/sql/schema.sql
test/fixtures/reverse/mysql/build/sql/schema.sql
DROP TABLE book; DROP VIEW view_book_titles; CREATE TABLE book ( id INTEGER NOT NULL AUTO_INCREMENT COMMENT 'Book Id', title VARCHAR(255) NOT NULL COMMENT 'Book Title', isbn VARCHAR(24) NOT NULL COMMENT 'ISBN Number', price FLOAT COMMENT 'Price of the book.', PRIMARY KEY (id) ) ENGINE=InnoDB COMMENT='Book Table'; CREATE VIEW view_book_titles AS SELECT title FROM book;
DROP TABLE IF EXISTS book; DROP VIEW IF EXISTS view_book_titles; CREATE TABLE book ( id INTEGER NOT NULL AUTO_INCREMENT COMMENT 'Book Id', title VARCHAR(255) NOT NULL COMMENT 'Book Title', isbn VARCHAR(24) NOT NULL COMMENT 'ISBN Number', price FLOAT COMMENT 'Price of the book.', PRIMARY KEY (id) ) ENGINE=InnoDB COMMENT='Book Table'; CREATE VIEW view_book_titles AS SELECT title FROM book;
Fix fixtures to avoid error (mysql safe mode)
Fix fixtures to avoid error (mysql safe mode)
SQL
mit
PatidarWeb/Propel,Egston/Propel,mhitza/Propel,rozwell/Propel,propelorm/Propel,halfer/Propel,zachmay/Propel,adrianrusnarczyk/Propel,halfer/Propel,adrianrusnarczyk/Propel,gencer/Propel,gencer/Propel,AriaSystems/Propel,sh0dow/Propel,tbl0605/Propel-pervasive,tbl0605/Propel-pervasive,bjdelange/Propel,propelorm/Propel,bjdelange/Propel,Egston/Propel,sh0dow/Propel,PatidarWeb/Propel,rozwell/Propel,mhitza/Propel,AriaSystems/Propel,zachmay/Propel
sql
## Code Before: DROP TABLE book; DROP VIEW view_book_titles; CREATE TABLE book ( id INTEGER NOT NULL AUTO_INCREMENT COMMENT 'Book Id', title VARCHAR(255) NOT NULL COMMENT 'Book Title', isbn VARCHAR(24) NOT NULL COMMENT 'ISBN Number', price FLOAT COMMENT 'Price of the book.', PRIMARY KEY (id) ) ENGINE=InnoDB COMMENT='Book Table'; CREATE VIEW view_book_titles AS SELECT title FROM book; ## Instruction: Fix fixtures to avoid error (mysql safe mode) ## Code After: DROP TABLE IF EXISTS book; DROP VIEW IF EXISTS view_book_titles; CREATE TABLE book ( id INTEGER NOT NULL AUTO_INCREMENT COMMENT 'Book Id', title VARCHAR(255) NOT NULL COMMENT 'Book Title', isbn VARCHAR(24) NOT NULL COMMENT 'ISBN Number', price FLOAT COMMENT 'Price of the book.', PRIMARY KEY (id) ) ENGINE=InnoDB COMMENT='Book Table'; CREATE VIEW view_book_titles AS SELECT title FROM book;
9c5553c1672ae2978f5e71c99dc4ac5ef2d9cb22
syntax_checkers/javascript.vim
syntax_checkers/javascript.vim
"============================================================================ "File: javascript.vim "Description: Syntax checking plugin for syntastic.vim "Maintainer: Martin Grenfell <martin_grenfell at msn dot com> "License: This program is free software. It comes without any warranty, " to the extent permitted by applicable law. You can redistribute " it and/or modify it under the terms of the Do What The Fuck You " Want To Public License, Version 2, as published by Sam Hocevar. " See http://sam.zoy.org/wtfpl/COPYING for more details. " "============================================================================ if exists("loaded_javascript_syntax_checker") finish endif let loaded_javascript_syntax_checker = 1 "bail if the user doesnt have jsl installed if !executable("jsl") finish endif function! SyntaxCheckers_javascript_GetLocList() let makeprg = "jsl -nologo -nofilelisting -nosummary -nocontext -process %" let errorformat='%W%f(%l): lint warning: %m,%-Z%p^,%E%f(%l): SyntaxError: %m,%-Z%p^,%-G' return SyntasticMake({ 'makeprg': makeprg, 'errorformat': errorformat }) endfunction
"============================================================================ "File: javascript.vim "Description: Syntax checking plugin for syntastic.vim "Maintainer: Martin Grenfell <martin_grenfell at msn dot com> "License: This program is free software. It comes without any warranty, " to the extent permitted by applicable law. You can redistribute " it and/or modify it under the terms of the Do What The Fuck You " Want To Public License, Version 2, as published by Sam Hocevar. " See http://sam.zoy.org/wtfpl/COPYING for more details. " "============================================================================ if exists("loaded_javascript_syntax_checker") finish endif let loaded_javascript_syntax_checker = 1 "bail if the user doesnt have jsl installed if !executable("jsl") finish endif function! SyntaxCheckers_javascript_GetLocList() let makeprg = "jsl -nologo -nofilelisting -nosummary -nocontext -process %" let errorformat='%W%f(%l): lint warning: %m,%-Z%p^,%W%f(%l): warning: %m,%-Z%p^,%E%f(%l): SyntaxError: %m,%-Z%p^,%-G' return SyntasticMake({ 'makeprg': makeprg, 'errorformat': errorformat }) endfunction
Add another jsl warning format
Add another jsl warning format Some warnings don't include the word lint, like the warning from function(x) { var x; }
VimL
apache-2.0
wezhang/vim-setup,wezhang/vim-setup,nikmartin/dotfiles,wezhang/vim-setup,wezhang/vim-setup,wezhang/vim-setup,wezhang/vim-setup
viml
## Code Before: "============================================================================ "File: javascript.vim "Description: Syntax checking plugin for syntastic.vim "Maintainer: Martin Grenfell <martin_grenfell at msn dot com> "License: This program is free software. It comes without any warranty, " to the extent permitted by applicable law. You can redistribute " it and/or modify it under the terms of the Do What The Fuck You " Want To Public License, Version 2, as published by Sam Hocevar. " See http://sam.zoy.org/wtfpl/COPYING for more details. " "============================================================================ if exists("loaded_javascript_syntax_checker") finish endif let loaded_javascript_syntax_checker = 1 "bail if the user doesnt have jsl installed if !executable("jsl") finish endif function! SyntaxCheckers_javascript_GetLocList() let makeprg = "jsl -nologo -nofilelisting -nosummary -nocontext -process %" let errorformat='%W%f(%l): lint warning: %m,%-Z%p^,%E%f(%l): SyntaxError: %m,%-Z%p^,%-G' return SyntasticMake({ 'makeprg': makeprg, 'errorformat': errorformat }) endfunction ## Instruction: Add another jsl warning format Some warnings don't include the word lint, like the warning from function(x) { var x; } ## Code After: "============================================================================ "File: javascript.vim "Description: Syntax checking plugin for syntastic.vim "Maintainer: Martin Grenfell <martin_grenfell at msn dot com> "License: This program is free software. It comes without any warranty, " to the extent permitted by applicable law. You can redistribute " it and/or modify it under the terms of the Do What The Fuck You " Want To Public License, Version 2, as published by Sam Hocevar. " See http://sam.zoy.org/wtfpl/COPYING for more details. " "============================================================================ if exists("loaded_javascript_syntax_checker") finish endif let loaded_javascript_syntax_checker = 1 "bail if the user doesnt have jsl installed if !executable("jsl") finish endif function! SyntaxCheckers_javascript_GetLocList() let makeprg = "jsl -nologo -nofilelisting -nosummary -nocontext -process %" let errorformat='%W%f(%l): lint warning: %m,%-Z%p^,%W%f(%l): warning: %m,%-Z%p^,%E%f(%l): SyntaxError: %m,%-Z%p^,%-G' return SyntasticMake({ 'makeprg': makeprg, 'errorformat': errorformat }) endfunction
1ad02b6afee6f3d2096b1e5c4c91ea1d8a1cc3e0
ci/prepare/00_static_analisys.sh
ci/prepare/00_static_analisys.sh
[ "$COVERITY_BUILD" -ne 0 ] || exit 0 ARCHIVE="coverity_tool.tgz" WithMsg "Testing token" test "${COVERITY_SCAN_TOKEN}" && WithMsg "Downloading coverity scan tool" wget https://scan.coverity.com/download/linux64 --post-data "token=${COVERITY_SCAN_TOKEN}&project=koplyarov%2Fjoint" -O "$ARCHIVE" && Verbose tar -xvzf "$ARCHIVE" || exit 1
[ "$COVERITY_BUILD" -ne 0 ] || exit 0 ARCHIVE="coverity_tool.tgz" WithMsg "Testing token" test "${COVERITY_SCAN_TOKEN}" && WithMsg "Downloading coverity scan tool" wget --no-verbose https://scan.coverity.com/download/linux64 --post-data "token=${COVERITY_SCAN_TOKEN}&project=koplyarov%2Fjoint" -O "$ARCHIVE" && Verbose tar -xvzf "$ARCHIVE" || exit 1
Reduce verbosity of wget in CI scripts
Reduce verbosity of wget in CI scripts
Shell
isc
koplyarov/joint,koplyarov/joint,koplyarov/joint,koplyarov/joint,koplyarov/joint,koplyarov/joint
shell
## Code Before: [ "$COVERITY_BUILD" -ne 0 ] || exit 0 ARCHIVE="coverity_tool.tgz" WithMsg "Testing token" test "${COVERITY_SCAN_TOKEN}" && WithMsg "Downloading coverity scan tool" wget https://scan.coverity.com/download/linux64 --post-data "token=${COVERITY_SCAN_TOKEN}&project=koplyarov%2Fjoint" -O "$ARCHIVE" && Verbose tar -xvzf "$ARCHIVE" || exit 1 ## Instruction: Reduce verbosity of wget in CI scripts ## Code After: [ "$COVERITY_BUILD" -ne 0 ] || exit 0 ARCHIVE="coverity_tool.tgz" WithMsg "Testing token" test "${COVERITY_SCAN_TOKEN}" && WithMsg "Downloading coverity scan tool" wget --no-verbose https://scan.coverity.com/download/linux64 --post-data "token=${COVERITY_SCAN_TOKEN}&project=koplyarov%2Fjoint" -O "$ARCHIVE" && Verbose tar -xvzf "$ARCHIVE" || exit 1
8b4b5705907e1ec5f9dd3148560dc1bf4cd5b9b7
bin/detail/get_nmake_environment.py
bin/detail/get_nmake_environment.py
import detail.util import os import sys def get(arch, vs_version): vs_path_env = 'VS{}0COMNTOOLS'.format(vs_version) vs_path = os.getenv(vs_path_env) if not vs_path: sys.exit( 'Environment variable {} is empty, ' 'looks like Visual Studio {} is not installed'.format( vs_path_env, vs_version ) ) vcvarsall_dir = os.path.join(vs_path, '..', '..', 'VC') if not os.path.isdir(vcvarsall_dir): sys.exit( 'Directory `{}` not exists ' '({} environment variable)'.format(vcvarsall_dir, vs_path_env) ) vcvarsall_path = os.path.join(vcvarsall_dir, 'vcvarsall.bat') if not os.path.isfile(vcvarsall_path): sys.exit( 'File vcvarsall.bat not found in directory ' '`{}` ({} environment variable)'.format(vcvarsall_dir, vs_path_env) ) return detail.util.get_environment_from_batch_command([vcvarsall_path, arch])
import detail.util import os import sys def get(arch, vs_version): vs_path_env = 'VS{}0COMNTOOLS'.format(vs_version) vs_path = os.getenv(vs_path_env) if not vs_path: sys.exit( 'Environment variable {} is empty, ' 'looks like Visual Studio {} is not installed'.format( vs_path_env, vs_version ) ) if vs_version == '15': vcvarsall_dir = os.path.join(vs_path, '..', '..', 'VC', 'Auxiliary', 'Build') else: vcvarsall_dir = os.path.join(vs_path, '..', '..', 'VC') if not os.path.isdir(vcvarsall_dir): sys.exit( 'Directory `{}` not exists ' '({} environment variable)'.format(vcvarsall_dir, vs_path_env) ) vcvarsall_path = os.path.join(vcvarsall_dir, 'vcvarsall.bat') if not os.path.isfile(vcvarsall_path): sys.exit( 'File vcvarsall.bat not found in directory ' '`{}` ({} environment variable)'.format(vcvarsall_dir, vs_path_env) ) return detail.util.get_environment_from_batch_command([vcvarsall_path, arch])
Fix vcvarsall_dir for Visual Studio 2017
polly.py: Fix vcvarsall_dir for Visual Studio 2017 [skip ci]
Python
bsd-2-clause
idscan/polly,idscan/polly,ruslo/polly,ruslo/polly
python
## Code Before: import detail.util import os import sys def get(arch, vs_version): vs_path_env = 'VS{}0COMNTOOLS'.format(vs_version) vs_path = os.getenv(vs_path_env) if not vs_path: sys.exit( 'Environment variable {} is empty, ' 'looks like Visual Studio {} is not installed'.format( vs_path_env, vs_version ) ) vcvarsall_dir = os.path.join(vs_path, '..', '..', 'VC') if not os.path.isdir(vcvarsall_dir): sys.exit( 'Directory `{}` not exists ' '({} environment variable)'.format(vcvarsall_dir, vs_path_env) ) vcvarsall_path = os.path.join(vcvarsall_dir, 'vcvarsall.bat') if not os.path.isfile(vcvarsall_path): sys.exit( 'File vcvarsall.bat not found in directory ' '`{}` ({} environment variable)'.format(vcvarsall_dir, vs_path_env) ) return detail.util.get_environment_from_batch_command([vcvarsall_path, arch]) ## Instruction: polly.py: Fix vcvarsall_dir for Visual Studio 2017 [skip ci] ## Code After: import detail.util import os import sys def get(arch, vs_version): vs_path_env = 'VS{}0COMNTOOLS'.format(vs_version) vs_path = os.getenv(vs_path_env) if not vs_path: sys.exit( 'Environment variable {} is empty, ' 'looks like Visual Studio {} is not installed'.format( vs_path_env, vs_version ) ) if vs_version == '15': vcvarsall_dir = os.path.join(vs_path, '..', '..', 'VC', 'Auxiliary', 'Build') else: vcvarsall_dir = os.path.join(vs_path, '..', '..', 'VC') if not os.path.isdir(vcvarsall_dir): sys.exit( 'Directory `{}` not exists ' '({} environment variable)'.format(vcvarsall_dir, vs_path_env) ) vcvarsall_path = os.path.join(vcvarsall_dir, 'vcvarsall.bat') if not os.path.isfile(vcvarsall_path): sys.exit( 'File vcvarsall.bat not found in directory ' '`{}` ({} environment variable)'.format(vcvarsall_dir, vs_path_env) ) return detail.util.get_environment_from_batch_command([vcvarsall_path, arch])
3787db709c109776585a802f90d01d73c773b43b
README.markdown
README.markdown
[![Build Status](https://secure.travis-ci.org/doctrine/mongodb-odm.png)](http://travis-ci.org/doctrine/mongodb-odm) The Doctrine MongoDB ODM project is a library that provides a PHP object mapping functionality for MongoDB. ## More resources: * [Website](http://www.doctrine-project.org/projects/mongodb_odm) * [Documentation](http://www.doctrine-project.org/projects/mongodb_odm/current/docs/en) * [Issue Tracker](http://www.doctrine-project.org/jira/browse/MODM) * [Downloads](http://github.com/doctrine/mongodb-odm/downloads)
[![Build Status](https://secure.travis-ci.org/doctrine/mongodb-odm.png)](http://travis-ci.org/doctrine/mongodb-odm) The Doctrine MongoDB ODM project is a library that provides a PHP object mapping functionality for MongoDB. ## More resources: * [Website](http://www.doctrine-project.org/projects/mongodb_odm) * [Documentation](http://docs.doctrine-project.org/projects/doctrine-mongodb-odm/en/latest/index.html) * [API](http://www.doctrine-project.org/api/mongodb_odm/1.0/index.html) * [Issue Tracker](http://www.doctrine-project.org/jira/browse/MODM) * [Downloads](http://github.com/doctrine/mongodb-odm/downloads)
Fix documentation link and add link to API
[Readme] Fix documentation link and add link to API
Markdown
mit
doctrine/mongodb-odm,notrix/mongodb-odm,Briareos/mongodb-odm,stylehub/mongodb-odm,alcaeus/mongodb-odm,lyft/mongodb-odm,marcoleong/mongodb-odm,malarzm/mongodb-odm,Xosofox/mongodb-odm,Soullivaneuh/mongodb-odm,stof/mongodb-odm,Bilge/mongodb-odm,castarco/mongodb-odm,kevinyien/mongodb-odm,stylehub/mongodb-odm,waldihuber/mongodb-odm,jmikola/mongodb-odm,stylehub/mongodb-odm,alicomo/mongodb-odm,hyperunknown/mongodb-odm,ifunny/mongodb-odm,malarzm/mongodb-odm,aheinz-sg/mongodb-odm,slawus/mongodb-odm,Ocramius/mongodb-odm,solocommand/mongodb-odm,Tony133/mongodb-odm,oswaldderiemaecker/mongodb-odm,doctrine/mongodb-odm,Bilge/mongodb-odm,Bilge/mongodb-odm,coudenysj/mongodb-odm,slawus/mongodb-odm,rubenrua/mongodb-odm,danizord/mongodb-odm,slawus/mongodb-odm,alicomo/mongodb-odm,alcaeus/mongodb-odm
markdown
## Code Before: [![Build Status](https://secure.travis-ci.org/doctrine/mongodb-odm.png)](http://travis-ci.org/doctrine/mongodb-odm) The Doctrine MongoDB ODM project is a library that provides a PHP object mapping functionality for MongoDB. ## More resources: * [Website](http://www.doctrine-project.org/projects/mongodb_odm) * [Documentation](http://www.doctrine-project.org/projects/mongodb_odm/current/docs/en) * [Issue Tracker](http://www.doctrine-project.org/jira/browse/MODM) * [Downloads](http://github.com/doctrine/mongodb-odm/downloads) ## Instruction: [Readme] Fix documentation link and add link to API ## Code After: [![Build Status](https://secure.travis-ci.org/doctrine/mongodb-odm.png)](http://travis-ci.org/doctrine/mongodb-odm) The Doctrine MongoDB ODM project is a library that provides a PHP object mapping functionality for MongoDB. ## More resources: * [Website](http://www.doctrine-project.org/projects/mongodb_odm) * [Documentation](http://docs.doctrine-project.org/projects/doctrine-mongodb-odm/en/latest/index.html) * [API](http://www.doctrine-project.org/api/mongodb_odm/1.0/index.html) * [Issue Tracker](http://www.doctrine-project.org/jira/browse/MODM) * [Downloads](http://github.com/doctrine/mongodb-odm/downloads)
331935a5a0b571e1ee51099a2bd3849146dbc438
app/Modules/CommissionModule/AuditTrail/commissionUpdated.latte
app/Modules/CommissionModule/AuditTrail/commissionUpdated.latte
{layout $layout} {var $entityClass = 'PAF\Modules\CommissionModule\Model\Commission'} <div class="card-header" n:block="#card-header"> {include #stamp, 'instant' => $logEvent->instant} <span>{_$logEvent->type, ['actor' => $actor]}</span> </div> <div class="card-body" n:block="#card-body"> {foreach $parameters['changes'] ?? [] as $property => $change} {control $control:propertyDiff, $entityClass, $property, $change['newValue']??null, $change['oldValue']??null} {/foreach} </div>
{layout $layout} {var $entityClass = 'pafCommissions.commission'} <div class="card-header" n:block="#card-header"> {include #stamp, 'instant' => $logEvent->instant} <span>{_$logEvent->type, ['actor' => $actor]}</span> </div> <div class="card-body" n:block="#card-body"> {foreach $parameters['changes'] ?? [] as $property => $change} {control $control:propertyDiff, $entityClass, $property, $change['newValue']??null, $change['oldValue']??null} {/foreach} </div>
Fix audit cards for commission entity
Fix audit cards for commission entity
Latte
mit
Thoronir42/paf,Thoronir42/paf,Thoronir42/paf,Thoronir42/paf
latte
## Code Before: {layout $layout} {var $entityClass = 'PAF\Modules\CommissionModule\Model\Commission'} <div class="card-header" n:block="#card-header"> {include #stamp, 'instant' => $logEvent->instant} <span>{_$logEvent->type, ['actor' => $actor]}</span> </div> <div class="card-body" n:block="#card-body"> {foreach $parameters['changes'] ?? [] as $property => $change} {control $control:propertyDiff, $entityClass, $property, $change['newValue']??null, $change['oldValue']??null} {/foreach} </div> ## Instruction: Fix audit cards for commission entity ## Code After: {layout $layout} {var $entityClass = 'pafCommissions.commission'} <div class="card-header" n:block="#card-header"> {include #stamp, 'instant' => $logEvent->instant} <span>{_$logEvent->type, ['actor' => $actor]}</span> </div> <div class="card-body" n:block="#card-body"> {foreach $parameters['changes'] ?? [] as $property => $change} {control $control:propertyDiff, $entityClass, $property, $change['newValue']??null, $change['oldValue']??null} {/foreach} </div>
73c0073e5961a5661fc88d217fafb9805d9466e0
kanban/component/board/directives/boardsDirective.js
kanban/component/board/directives/boardsDirective.js
/** * Created by xubt on 5/26/16. */ kanbanApp.directive('boardBanner', function () { return { restrict: 'E', templateUrl: 'component/board/partials/board-banner.html', replace: true, controller: ['$scope', '$location', 'boardsService', 'localStorageService', function ($scope, $location, boardsService, localStorageService) { var boardLink = localStorageService.get("boardLink"); var boardPromise = boardsService.loadBoardByLink(boardLink); boardPromise.then(function (_board) { $scope.board = _board; }); $scope.toBoards = function () { $location.path('/boards'); }; }] }; });
/** * Created by xubt on 5/26/16. */ kanbanApp.directive('boardBanner', function () { return { restrict: 'E', templateUrl: 'component/board/partials/board-banner.html', replace: true, controller: ['$scope', '$location', 'boardsService', 'localStorageService', function ($scope, $location, boardsService, localStorageService) { var boardLink = localStorageService.get("boardLink"); var boardsLink = localStorageService.get("identity.userName") + '/boards'; var boardPromise = boardsService.loadBoardByLink(boardLink); boardPromise.then(function (_board) { $scope.board = _board; }); $scope.toBoards = function () { $location.path(boardsLink); }; }] }; });
Load boardsLink from local storage.
Load boardsLink from local storage.
JavaScript
mit
thiki-org/thiki-kanban-web,thiki-org/thiki-kanban-web,thiki-org/thiki-kanban-web
javascript
## Code Before: /** * Created by xubt on 5/26/16. */ kanbanApp.directive('boardBanner', function () { return { restrict: 'E', templateUrl: 'component/board/partials/board-banner.html', replace: true, controller: ['$scope', '$location', 'boardsService', 'localStorageService', function ($scope, $location, boardsService, localStorageService) { var boardLink = localStorageService.get("boardLink"); var boardPromise = boardsService.loadBoardByLink(boardLink); boardPromise.then(function (_board) { $scope.board = _board; }); $scope.toBoards = function () { $location.path('/boards'); }; }] }; }); ## Instruction: Load boardsLink from local storage. ## Code After: /** * Created by xubt on 5/26/16. */ kanbanApp.directive('boardBanner', function () { return { restrict: 'E', templateUrl: 'component/board/partials/board-banner.html', replace: true, controller: ['$scope', '$location', 'boardsService', 'localStorageService', function ($scope, $location, boardsService, localStorageService) { var boardLink = localStorageService.get("boardLink"); var boardsLink = localStorageService.get("identity.userName") + '/boards'; var boardPromise = boardsService.loadBoardByLink(boardLink); boardPromise.then(function (_board) { $scope.board = _board; }); $scope.toBoards = function () { $location.path(boardsLink); }; }] }; });
a41c009c5ff7e52416861cf87a908186ee93d2b4
Casks/blender.rb
Casks/blender.rb
cask :v1 => 'blender' do version '2.76' sha256 '542bc7fe9871c5a8f80efd5b9657416eda45d3dbeb455189367303203da695c9' url "https://download.blender.org/release/Blender#{version.to_f}/blender-#{version}-OSX_10.6-x86_64.zip" name 'Blender' homepage 'https://www.blender.org/' license :gpl app 'Blender.app' app 'BlenderPlayer.app' end
cask :v1 => 'blender' do version '2.76a' sha256 '37b583d19eb16123065b62a7c05c574d9ebee2ff7497c1180466447ce6dab383' url "https://download.blender.org/release/Blender#{version.to_f}/blender-#{version}-OSX_10.6-x86_64.zip" name 'Blender' homepage 'https://www.blender.org/' license :gpl app 'Blender.app' app 'BlenderPlayer.app' end
Upgrade Blender to 2.76a bugfix release
Upgrade Blender to 2.76a bugfix release
Ruby
bsd-2-clause
xight/homebrew-cask,sgnh/homebrew-cask,cprecioso/homebrew-cask,napaxton/homebrew-cask,nrlquaker/homebrew-cask,mjdescy/homebrew-cask,lifepillar/homebrew-cask,Keloran/homebrew-cask,mchlrmrz/homebrew-cask,chuanxd/homebrew-cask,vin047/homebrew-cask,Saklad5/homebrew-cask,antogg/homebrew-cask,asins/homebrew-cask,greg5green/homebrew-cask,tjnycum/homebrew-cask,ianyh/homebrew-cask,winkelsdorf/homebrew-cask,chuanxd/homebrew-cask,stigkj/homebrew-caskroom-cask,tjt263/homebrew-cask,dictcp/homebrew-cask,elyscape/homebrew-cask,deiga/homebrew-cask,moimikey/homebrew-cask,asbachb/homebrew-cask,moogar0880/homebrew-cask,tmoreira2020/homebrew,haha1903/homebrew-cask,fharbe/homebrew-cask,Amorymeltzer/homebrew-cask,winkelsdorf/homebrew-cask,diguage/homebrew-cask,stonehippo/homebrew-cask,theoriginalgri/homebrew-cask,maxnordlund/homebrew-cask,miguelfrde/homebrew-cask,adrianchia/homebrew-cask,seanorama/homebrew-cask,markthetech/homebrew-cask,Ephemera/homebrew-cask,daften/homebrew-cask,seanzxx/homebrew-cask,vitorgalvao/homebrew-cask,andrewdisley/homebrew-cask,sanyer/homebrew-cask,Fedalto/homebrew-cask,mlocher/homebrew-cask,rajiv/homebrew-cask,crzrcn/homebrew-cask,jangalinski/homebrew-cask,malford/homebrew-cask,kronicd/homebrew-cask,singingwolfboy/homebrew-cask,yutarody/homebrew-cask,scribblemaniac/homebrew-cask,diogodamiani/homebrew-cask,joschi/homebrew-cask,tsparber/homebrew-cask,sohtsuka/homebrew-cask,ptb/homebrew-cask,jonathanwiesel/homebrew-cask,Amorymeltzer/homebrew-cask,jellyfishcoder/homebrew-cask,guerrero/homebrew-cask,jpmat296/homebrew-cask,alebcay/homebrew-cask,kesara/homebrew-cask,mathbunnyru/homebrew-cask,squid314/homebrew-cask,dcondrey/homebrew-cask,jawshooah/homebrew-cask,albertico/homebrew-cask,sgnh/homebrew-cask,pacav69/homebrew-cask,xtian/homebrew-cask,psibre/homebrew-cask,ksylvan/homebrew-cask,JosephViolago/homebrew-cask,Ngrd/homebrew-cask,mauricerkelly/homebrew-cask,MichaelPei/homebrew-cask,shonjir/homebrew-cask,stonehippo/homebrew-cask,riyad/homebrew-cask,cliffcotino/homebrew-cask,dvdoliveira/homebrew-cask,inz/homebrew-cask,lumaxis/homebrew-cask,ericbn/homebrew-cask,mingzhi22/homebrew-cask,dustinblackman/homebrew-cask,joshka/homebrew-cask,timsutton/homebrew-cask,otaran/homebrew-cask,moogar0880/homebrew-cask,yuhki50/homebrew-cask,jgarber623/homebrew-cask,ebraminio/homebrew-cask,guerrero/homebrew-cask,RJHsiao/homebrew-cask,wKovacs64/homebrew-cask,a1russell/homebrew-cask,ddm/homebrew-cask,jeroenseegers/homebrew-cask,fanquake/homebrew-cask,klane/homebrew-cask,thehunmonkgroup/homebrew-cask,kongslund/homebrew-cask,blogabe/homebrew-cask,markhuber/homebrew-cask,jalaziz/homebrew-cask,nathansgreen/homebrew-cask,arronmabrey/homebrew-cask,imgarylai/homebrew-cask,colindean/homebrew-cask,Ketouem/homebrew-cask,wmorin/homebrew-cask,yumitsu/homebrew-cask,xyb/homebrew-cask,brianshumate/homebrew-cask,ptb/homebrew-cask,patresi/homebrew-cask,devmynd/homebrew-cask,tjt263/homebrew-cask,dwihn0r/homebrew-cask,vigosan/homebrew-cask,jalaziz/homebrew-cask,samnung/homebrew-cask,gyndav/homebrew-cask,MircoT/homebrew-cask,boecko/homebrew-cask,mchlrmrz/homebrew-cask,stephenwade/homebrew-cask,My2ndAngelic/homebrew-cask,feigaochn/homebrew-cask,Ibuprofen/homebrew-cask,scottsuch/homebrew-cask,wastrachan/homebrew-cask,cprecioso/homebrew-cask,cfillion/homebrew-cask,stephenwade/homebrew-cask,AnastasiaSulyagina/homebrew-cask,alebcay/homebrew-cask,JacopKane/homebrew-cask,lucasmezencio/homebrew-cask,6uclz1/homebrew-cask,tjnycum/homebrew-cask,decrement/homebrew-cask,Cottser/homebrew-cask,casidiablo/homebrew-cask,MichaelPei/homebrew-cask,kiliankoe/homebrew-cask,gmkey/homebrew-cask,corbt/homebrew-cask,lifepillar/homebrew-cask,larseggert/homebrew-cask,jppelteret/homebrew-cask,skatsuta/homebrew-cask,rogeriopradoj/homebrew-cask,jacobbednarz/homebrew-cask,mazehall/homebrew-cask,kteru/homebrew-cask,n0ts/homebrew-cask,dictcp/homebrew-cask,koenrh/homebrew-cask,BenjaminHCCarr/homebrew-cask,Ephemera/homebrew-cask,tolbkni/homebrew-cask,bdhess/homebrew-cask,sebcode/homebrew-cask,ianyh/homebrew-cask,optikfluffel/homebrew-cask,miccal/homebrew-cask,hanxue/caskroom,Saklad5/homebrew-cask,rajiv/homebrew-cask,faun/homebrew-cask,nshemonsky/homebrew-cask,codeurge/homebrew-cask,fanquake/homebrew-cask,lukasbestle/homebrew-cask,johndbritton/homebrew-cask,BenjaminHCCarr/homebrew-cask,onlynone/homebrew-cask,lantrix/homebrew-cask,timsutton/homebrew-cask,nrlquaker/homebrew-cask,exherb/homebrew-cask,kpearson/homebrew-cask,renaudguerin/homebrew-cask,blogabe/homebrew-cask,wickedsp1d3r/homebrew-cask,vitorgalvao/homebrew-cask,hanxue/caskroom,hovancik/homebrew-cask,esebastian/homebrew-cask,malob/homebrew-cask,mikem/homebrew-cask,mattrobenolt/homebrew-cask,artdevjs/homebrew-cask,tedbundyjr/homebrew-cask,jawshooah/homebrew-cask,usami-k/homebrew-cask,mattrobenolt/homebrew-cask,xakraz/homebrew-cask,malford/homebrew-cask,timsutton/homebrew-cask,jeroenseegers/homebrew-cask,sanyer/homebrew-cask,scribblemaniac/homebrew-cask,joschi/homebrew-cask,sscotth/homebrew-cask,mhubig/homebrew-cask,winkelsdorf/homebrew-cask,psibre/homebrew-cask,miku/homebrew-cask,flaviocamilo/homebrew-cask,jasmas/homebrew-cask,joshka/homebrew-cask,gerrypower/homebrew-cask,jgarber623/homebrew-cask,pkq/homebrew-cask,kingthorin/homebrew-cask,forevergenin/homebrew-cask,larseggert/homebrew-cask,SentinelWarren/homebrew-cask,theoriginalgri/homebrew-cask,Ibuprofen/homebrew-cask,buo/homebrew-cask,claui/homebrew-cask,deanmorin/homebrew-cask,antogg/homebrew-cask,chadcatlett/caskroom-homebrew-cask,lukasbestle/homebrew-cask,mahori/homebrew-cask,aguynamedryan/homebrew-cask,gerrypower/homebrew-cask,fharbe/homebrew-cask,chrisfinazzo/homebrew-cask,toonetown/homebrew-cask,bric3/homebrew-cask,wickles/homebrew-cask,nathancahill/homebrew-cask,dcondrey/homebrew-cask,blainesch/homebrew-cask,andrewdisley/homebrew-cask,bcomnes/homebrew-cask,hellosky806/homebrew-cask,colindunn/homebrew-cask,ebraminio/homebrew-cask,n8henrie/homebrew-cask,tan9/homebrew-cask,tan9/homebrew-cask,jedahan/homebrew-cask,phpwutz/homebrew-cask,robertgzr/homebrew-cask,markthetech/homebrew-cask,josa42/homebrew-cask,kingthorin/homebrew-cask,jacobbednarz/homebrew-cask,victorpopkov/homebrew-cask,xtian/homebrew-cask,jaredsampson/homebrew-cask,elyscape/homebrew-cask,greg5green/homebrew-cask,kteru/homebrew-cask,leipert/homebrew-cask,feigaochn/homebrew-cask,hakamadare/homebrew-cask,brianshumate/homebrew-cask,leipert/homebrew-cask,reitermarkus/homebrew-cask,m3nu/homebrew-cask,blainesch/homebrew-cask,kronicd/homebrew-cask,SentinelWarren/homebrew-cask,jedahan/homebrew-cask,MerelyAPseudonym/homebrew-cask,helloIAmPau/homebrew-cask,deanmorin/homebrew-cask,mahori/homebrew-cask,lucasmezencio/homebrew-cask,kkdd/homebrew-cask,uetchy/homebrew-cask,mjgardner/homebrew-cask,miccal/homebrew-cask,claui/homebrew-cask,colindean/homebrew-cask,hyuna917/homebrew-cask,codeurge/homebrew-cask,ninjahoahong/homebrew-cask,dustinblackman/homebrew-cask,andyli/homebrew-cask,dwihn0r/homebrew-cask,reelsense/homebrew-cask,yurikoles/homebrew-cask,cobyism/homebrew-cask,bric3/homebrew-cask,n8henrie/homebrew-cask,colindunn/homebrew-cask,xakraz/homebrew-cask,caskroom/homebrew-cask,Labutin/homebrew-cask,KosherBacon/homebrew-cask,dvdoliveira/homebrew-cask,patresi/homebrew-cask,napaxton/homebrew-cask,Fedalto/homebrew-cask,howie/homebrew-cask,franklouwers/homebrew-cask,jbeagley52/homebrew-cask,sscotth/homebrew-cask,tarwich/homebrew-cask,afh/homebrew-cask,lantrix/homebrew-cask,moimikey/homebrew-cask,stevehedrick/homebrew-cask,shoichiaizawa/homebrew-cask,deiga/homebrew-cask,janlugt/homebrew-cask,mrmachine/homebrew-cask,Amorymeltzer/homebrew-cask,artdevjs/homebrew-cask,maxnordlund/homebrew-cask,adrianchia/homebrew-cask,jeroenj/homebrew-cask,samshadwell/homebrew-cask,thii/homebrew-cask,wastrachan/homebrew-cask,amatos/homebrew-cask,ericbn/homebrew-cask,lumaxis/homebrew-cask,renaudguerin/homebrew-cask,gabrielizaias/homebrew-cask,decrement/homebrew-cask,y00rb/homebrew-cask,yuhki50/homebrew-cask,paour/homebrew-cask,hovancik/homebrew-cask,cobyism/homebrew-cask,Bombenleger/homebrew-cask,scottsuch/homebrew-cask,cobyism/homebrew-cask,mingzhi22/homebrew-cask,dwkns/homebrew-cask,thehunmonkgroup/homebrew-cask,retrography/homebrew-cask,johnjelinek/homebrew-cask,miku/homebrew-cask,buo/homebrew-cask,shonjir/homebrew-cask,pacav69/homebrew-cask,cfillion/homebrew-cask,bcomnes/homebrew-cask,samdoran/homebrew-cask,Keloran/homebrew-cask,Cottser/homebrew-cask,doits/homebrew-cask,AnastasiaSulyagina/homebrew-cask,6uclz1/homebrew-cask,nightscape/homebrew-cask,coeligena/homebrew-customized,esebastian/homebrew-cask,yutarody/homebrew-cask,scottsuch/homebrew-cask,ywfwj2008/homebrew-cask,jaredsampson/homebrew-cask,miccal/homebrew-cask,Dremora/homebrew-cask,MoOx/homebrew-cask,kTitan/homebrew-cask,MerelyAPseudonym/homebrew-cask,haha1903/homebrew-cask,doits/homebrew-cask,josa42/homebrew-cask,paour/homebrew-cask,klane/homebrew-cask,yumitsu/homebrew-cask,retrography/homebrew-cask,inz/homebrew-cask,paour/homebrew-cask,singingwolfboy/homebrew-cask,m3nu/homebrew-cask,claui/homebrew-cask,kingthorin/homebrew-cask,13k/homebrew-cask,opsdev-ws/homebrew-cask,seanzxx/homebrew-cask,lukeadams/homebrew-cask,vigosan/homebrew-cask,kpearson/homebrew-cask,My2ndAngelic/homebrew-cask,sanchezm/homebrew-cask,gabrielizaias/homebrew-cask,santoshsahoo/homebrew-cask,inta/homebrew-cask,asins/homebrew-cask,giannitm/homebrew-cask,Ngrd/homebrew-cask,tyage/homebrew-cask,onlynone/homebrew-cask,wmorin/homebrew-cask,ksato9700/homebrew-cask,rogeriopradoj/homebrew-cask,slack4u/homebrew-cask,cliffcotino/homebrew-cask,xcezx/homebrew-cask,blogabe/homebrew-cask,mathbunnyru/homebrew-cask,puffdad/homebrew-cask,miguelfrde/homebrew-cask,riyad/homebrew-cask,okket/homebrew-cask,josa42/homebrew-cask,reitermarkus/homebrew-cask,franklouwers/homebrew-cask,farmerchris/homebrew-cask,kassi/homebrew-cask,danielbayley/homebrew-cask,elnappo/homebrew-cask,jeroenj/homebrew-cask,corbt/homebrew-cask,a1russell/homebrew-cask,xcezx/homebrew-cask,gurghet/homebrew-cask,nrlquaker/homebrew-cask,xyb/homebrew-cask,Labutin/homebrew-cask,sanchezm/homebrew-cask,schneidmaster/homebrew-cask,uetchy/homebrew-cask,danielbayley/homebrew-cask,caskroom/homebrew-cask,julionc/homebrew-cask,perfide/homebrew-cask,puffdad/homebrew-cask,robertgzr/homebrew-cask,ninjahoahong/homebrew-cask,jmeridth/homebrew-cask,axodys/homebrew-cask,shorshe/homebrew-cask,stephenwade/homebrew-cask,hristozov/homebrew-cask,gilesdring/homebrew-cask,hristozov/homebrew-cask,nightscape/homebrew-cask,CameronGarrett/homebrew-cask,schneidmaster/homebrew-cask,aguynamedryan/homebrew-cask,amatos/homebrew-cask,goxberry/homebrew-cask,zerrot/homebrew-cask,Ephemera/homebrew-cask,anbotero/homebrew-cask,0rax/homebrew-cask,coeligena/homebrew-customized,mishari/homebrew-cask,retbrown/homebrew-cask,jbeagley52/homebrew-cask,Bombenleger/homebrew-cask,rickychilcott/homebrew-cask,tangestani/homebrew-cask,imgarylai/homebrew-cask,nathanielvarona/homebrew-cask,uetchy/homebrew-cask,hellosky806/homebrew-cask,reelsense/homebrew-cask,sosedoff/homebrew-cask,ericbn/homebrew-cask,FredLackeyOfficial/homebrew-cask,reitermarkus/homebrew-cask,nathancahill/homebrew-cask,rickychilcott/homebrew-cask,cedwardsmedia/homebrew-cask,bosr/homebrew-cask,antogg/homebrew-cask,jellyfishcoder/homebrew-cask,pkq/homebrew-cask,chadcatlett/caskroom-homebrew-cask,kongslund/homebrew-cask,jeanregisser/homebrew-cask,tangestani/homebrew-cask,MoOx/homebrew-cask,axodys/homebrew-cask,boecko/homebrew-cask,lukeadams/homebrew-cask,gurghet/homebrew-cask,ddm/homebrew-cask,m3nu/homebrew-cask,jonathanwiesel/homebrew-cask,gibsjose/homebrew-cask,morganestes/homebrew-cask,kesara/homebrew-cask,JosephViolago/homebrew-cask,williamboman/homebrew-cask,bosr/homebrew-cask,opsdev-ws/homebrew-cask,BenjaminHCCarr/homebrew-cask,optikfluffel/homebrew-cask,stigkj/homebrew-caskroom-cask,mchlrmrz/homebrew-cask,sohtsuka/homebrew-cask,neverfox/homebrew-cask,n0ts/homebrew-cask,santoshsahoo/homebrew-cask,deiga/homebrew-cask,imgarylai/homebrew-cask,lcasey001/homebrew-cask,albertico/homebrew-cask,okket/homebrew-cask,vin047/homebrew-cask,casidiablo/homebrew-cask,jasmas/homebrew-cask,xyb/homebrew-cask,retbrown/homebrew-cask,joshka/homebrew-cask,athrunsun/homebrew-cask,wickedsp1d3r/homebrew-cask,joschi/homebrew-cask,ksylvan/homebrew-cask,hakamadare/homebrew-cask,mgryszko/homebrew-cask,thomanq/homebrew-cask,diogodamiani/homebrew-cask,wmorin/homebrew-cask,helloIAmPau/homebrew-cask,hanxue/caskroom,williamboman/homebrew-cask,FranklinChen/homebrew-cask,mathbunnyru/homebrew-cask,Ketouem/homebrew-cask,jgarber623/homebrew-cask,perfide/homebrew-cask,JosephViolago/homebrew-cask,cblecker/homebrew-cask,Dremora/homebrew-cask,tmoreira2020/homebrew,KosherBacon/homebrew-cask,sanyer/homebrew-cask,CameronGarrett/homebrew-cask,a1russell/homebrew-cask,ksato9700/homebrew-cask,jalaziz/homebrew-cask,faun/homebrew-cask,gyndav/homebrew-cask,howie/homebrew-cask,shorshe/homebrew-cask,janlugt/homebrew-cask,Gasol/homebrew-cask,xight/homebrew-cask,sjackman/homebrew-cask,linc01n/homebrew-cask,tyage/homebrew-cask,kTitan/homebrew-cask,giannitm/homebrew-cask,FredLackeyOfficial/homebrew-cask,yutarody/homebrew-cask,zerrot/homebrew-cask,andyli/homebrew-cask,chrisfinazzo/homebrew-cask,JacopKane/homebrew-cask,esebastian/homebrew-cask,nathanielvarona/homebrew-cask,malob/homebrew-cask,JikkuJose/homebrew-cask,neverfox/homebrew-cask,bric3/homebrew-cask,gyndav/homebrew-cask,mikem/homebrew-cask,nathanielvarona/homebrew-cask,tjnycum/homebrew-cask,lcasey001/homebrew-cask,afh/homebrew-cask,kiliankoe/homebrew-cask,jeanregisser/homebrew-cask,neverfox/homebrew-cask,pkq/homebrew-cask,daften/homebrew-cask,0xadada/homebrew-cask,markhuber/homebrew-cask,alexg0/homebrew-cask,bdhess/homebrew-cask,mishari/homebrew-cask,inta/homebrew-cask,mahori/homebrew-cask,jmeridth/homebrew-cask,forevergenin/homebrew-cask,dictcp/homebrew-cask,mjgardner/homebrew-cask,troyxmccall/homebrew-cask,rogeriopradoj/homebrew-cask,JikkuJose/homebrew-cask,renard/homebrew-cask,goxberry/homebrew-cask,usami-k/homebrew-cask,zmwangx/homebrew-cask,tarwich/homebrew-cask,samnung/homebrew-cask,linc01n/homebrew-cask,zmwangx/homebrew-cask,tedbundyjr/homebrew-cask,arronmabrey/homebrew-cask,danielbayley/homebrew-cask,devmynd/homebrew-cask,mwean/homebrew-cask,victorpopkov/homebrew-cask,sjackman/homebrew-cask,julionc/homebrew-cask,johnjelinek/homebrew-cask,koenrh/homebrew-cask,mhubig/homebrew-cask,alexg0/homebrew-cask,stonehippo/homebrew-cask,athrunsun/homebrew-cask,yurikoles/homebrew-cask,tsparber/homebrew-cask,slack4u/homebrew-cask,stevehedrick/homebrew-cask,anbotero/homebrew-cask,morganestes/homebrew-cask,cedwardsmedia/homebrew-cask,shoichiaizawa/homebrew-cask,troyxmccall/homebrew-cask,tedski/homebrew-cask,mauricerkelly/homebrew-cask,skatsuta/homebrew-cask,jiashuw/homebrew-cask,alebcay/homebrew-cask,gmkey/homebrew-cask,sscotth/homebrew-cask,sosedoff/homebrew-cask,nathansgreen/homebrew-cask,syscrusher/homebrew-cask,malob/homebrew-cask,kamilboratynski/homebrew-cask,optikfluffel/homebrew-cask,adrianchia/homebrew-cask,diguage/homebrew-cask,thii/homebrew-cask,tedski/homebrew-cask,jconley/homebrew-cask,mgryszko/homebrew-cask,rajiv/homebrew-cask,MircoT/homebrew-cask,scribblemaniac/homebrew-cask,mjdescy/homebrew-cask,chrisfinazzo/homebrew-cask,phpwutz/homebrew-cask,squid314/homebrew-cask,kesara/homebrew-cask,kassi/homebrew-cask,thomanq/homebrew-cask,mwean/homebrew-cask,jiashuw/homebrew-cask,0rax/homebrew-cask,shonjir/homebrew-cask,jpmat296/homebrew-cask,wickles/homebrew-cask,elnappo/homebrew-cask,jangalinski/homebrew-cask,michelegera/homebrew-cask,FinalDes/homebrew-cask,julionc/homebrew-cask,13k/homebrew-cask,kamilboratynski/homebrew-cask,andrewdisley/homebrew-cask,nshemonsky/homebrew-cask,0xadada/homebrew-cask,samdoran/homebrew-cask,hyuna917/homebrew-cask,kkdd/homebrew-cask,mrmachine/homebrew-cask,farmerchris/homebrew-cask,muan/homebrew-cask,FinalDes/homebrew-cask,tolbkni/homebrew-cask,toonetown/homebrew-cask,renard/homebrew-cask,samshadwell/homebrew-cask,coeligena/homebrew-customized,gilesdring/homebrew-cask,syscrusher/homebrew-cask,mazehall/homebrew-cask,otaran/homebrew-cask,shoichiaizawa/homebrew-cask,johndbritton/homebrew-cask,cblecker/homebrew-cask,gibsjose/homebrew-cask,RJHsiao/homebrew-cask,alexg0/homebrew-cask,mattrobenolt/homebrew-cask,cblecker/homebrew-cask,crzrcn/homebrew-cask,exherb/homebrew-cask,muan/homebrew-cask,seanorama/homebrew-cask,sebcode/homebrew-cask,ywfwj2008/homebrew-cask,singingwolfboy/homebrew-cask,y00rb/homebrew-cask,xight/homebrew-cask,flaviocamilo/homebrew-cask,JacopKane/homebrew-cask,jppelteret/homebrew-cask,moimikey/homebrew-cask,jconley/homebrew-cask,yurikoles/homebrew-cask,Gasol/homebrew-cask,mjgardner/homebrew-cask,mlocher/homebrew-cask,michelegera/homebrew-cask,wKovacs64/homebrew-cask,tangestani/homebrew-cask,dwkns/homebrew-cask,asbachb/homebrew-cask,FranklinChen/homebrew-cask
ruby
## Code Before: cask :v1 => 'blender' do version '2.76' sha256 '542bc7fe9871c5a8f80efd5b9657416eda45d3dbeb455189367303203da695c9' url "https://download.blender.org/release/Blender#{version.to_f}/blender-#{version}-OSX_10.6-x86_64.zip" name 'Blender' homepage 'https://www.blender.org/' license :gpl app 'Blender.app' app 'BlenderPlayer.app' end ## Instruction: Upgrade Blender to 2.76a bugfix release ## Code After: cask :v1 => 'blender' do version '2.76a' sha256 '37b583d19eb16123065b62a7c05c574d9ebee2ff7497c1180466447ce6dab383' url "https://download.blender.org/release/Blender#{version.to_f}/blender-#{version}-OSX_10.6-x86_64.zip" name 'Blender' homepage 'https://www.blender.org/' license :gpl app 'Blender.app' app 'BlenderPlayer.app' end
788eb21606e027f0889c7565de769ce58e39a377
.travis.yml
.travis.yml
language: python cache: directories: - $HOME/.cache/pip - $HOME/.cache/pre-commit python: - "3.6" addons: postgresql: "9.6" apt: packages: - expect-dev # provides unbuffer utility stages: - test jobs: include: - stage: test env: - TESTS=1 ODOO_REPO="odoo/odoo" MAKEPOT="1" - stage: test env: - TESTS=1 ODOO_REPO="OCA/OCB" env: global: - VERSION="14.0" TESTS="0" LINT_CHECK="0" MAKEPOT="0" - MQT_DEP=PIP install: - git clone --depth=1 https://github.com/OCA/maintainer-quality-tools.git ${HOME}/maintainer-quality-tools - export PATH=${HOME}/maintainer-quality-tools/travis:${PATH} - travis_install_nightly script: - travis_run_tests after_success: - travis_after_tests_success
language: python cache: directories: - $HOME/.cache/pip - $HOME/.cache/pre-commit python: - "3.6" addons: postgresql: "9.6" apt: packages: - expect-dev # provides unbuffer utility stages: - linting - test jobs: include: - stage: linting name: "pre-commit" before_install: install: pip install pre-commit script: pre-commit run --all --show-diff-on-failure --verbose --color always after_success: - stage: test env: - TESTS="1" ODOO_REPO="odoo/odoo" MAKEPOT="1" - stage: test env: - TESTS="1" ODOO_REPO="OCA/OCB" EXCLUDE="fetchmail_incoming_log,fetchmail_notify_error_to_sender_test" env: global: - VERSION="14.0" TESTS="0" LINT_CHECK="0" MAKEPOT="0" - MQT_DEP=PIP install: - git clone --depth=1 https://github.com/OCA/maintainer-quality-tools.git ${HOME}/maintainer-quality-tools - export PATH=${HOME}/maintainer-quality-tools/travis:${PATH} - travis_install_nightly script: - travis_run_tests after_success: - travis_after_tests_success
Exclude fetchmail_incoming_log + fetchmail_notify_error_to_sender_test addon to prevent error in Travis related to OCA/OCB
[FIX] Exclude fetchmail_incoming_log + fetchmail_notify_error_to_sender_test addon to prevent error in Travis related to OCA/OCB
YAML
agpl-3.0
OCA/server-tools,YannickB/server-tools,YannickB/server-tools,OCA/server-tools,OCA/server-tools,YannickB/server-tools
yaml
## Code Before: language: python cache: directories: - $HOME/.cache/pip - $HOME/.cache/pre-commit python: - "3.6" addons: postgresql: "9.6" apt: packages: - expect-dev # provides unbuffer utility stages: - test jobs: include: - stage: test env: - TESTS=1 ODOO_REPO="odoo/odoo" MAKEPOT="1" - stage: test env: - TESTS=1 ODOO_REPO="OCA/OCB" env: global: - VERSION="14.0" TESTS="0" LINT_CHECK="0" MAKEPOT="0" - MQT_DEP=PIP install: - git clone --depth=1 https://github.com/OCA/maintainer-quality-tools.git ${HOME}/maintainer-quality-tools - export PATH=${HOME}/maintainer-quality-tools/travis:${PATH} - travis_install_nightly script: - travis_run_tests after_success: - travis_after_tests_success ## Instruction: [FIX] Exclude fetchmail_incoming_log + fetchmail_notify_error_to_sender_test addon to prevent error in Travis related to OCA/OCB ## Code After: language: python cache: directories: - $HOME/.cache/pip - $HOME/.cache/pre-commit python: - "3.6" addons: postgresql: "9.6" apt: packages: - expect-dev # provides unbuffer utility stages: - linting - test jobs: include: - stage: linting name: "pre-commit" before_install: install: pip install pre-commit script: pre-commit run --all --show-diff-on-failure --verbose --color always after_success: - stage: test env: - TESTS="1" ODOO_REPO="odoo/odoo" MAKEPOT="1" - stage: test env: - TESTS="1" ODOO_REPO="OCA/OCB" EXCLUDE="fetchmail_incoming_log,fetchmail_notify_error_to_sender_test" env: global: - VERSION="14.0" TESTS="0" LINT_CHECK="0" MAKEPOT="0" - MQT_DEP=PIP install: - git clone --depth=1 https://github.com/OCA/maintainer-quality-tools.git ${HOME}/maintainer-quality-tools - export PATH=${HOME}/maintainer-quality-tools/travis:${PATH} - travis_install_nightly script: - travis_run_tests after_success: - travis_after_tests_success
18c47b4afe28c8afd93ab1bfe0e151a1906702dc
lib/cfi/cfi_blacklist.txt
lib/cfi/cfi_blacklist.txt
fun:*8allocateEjPKv fun:*8allocateEmPKv # std::get_temporary_buffer, likewise (libstdc++, libc++). fun:_ZSt20get_temporary_buffer* fun:_ZNSt3__120get_temporary_buffer* # STL address-of magic (libstdc++, libc++). fun:*__addressof* fun:_ZNSt3__19addressof* # Windows C++ stdlib headers that contain bad unrelated casts. src:*xmemory0 src:*xstddef # std::_Sp_counted_ptr_inplace::_Sp_counted_ptr_inplace() (libstdc++). # This ctor is used by std::make_shared and needs to cast to uninitialized T* # in order to call std::allocator_traits<T>::construct. fun:_ZNSt23_Sp_counted_ptr_inplace*
fun:_ZSt20get_temporary_buffer* fun:_ZNSt3__120get_temporary_buffer* # STL address-of magic (libstdc++, libc++). fun:*__addressof* fun:_ZNSt3__19addressof* # Windows C++ stdlib headers that contain bad unrelated casts. src:*xmemory0 src:*xstddef # std::_Sp_counted_ptr_inplace::_Sp_counted_ptr_inplace() (libstdc++). # This ctor is used by std::make_shared and needs to cast to uninitialized T* # in order to call std::allocator_traits<T>::construct. fun:_ZNSt23_Sp_counted_ptr_inplace*
Move STL allocator blacklist to clang
CFI: Move STL allocator blacklist to clang Summary: The regular expression to match STL allocators can't easily account for C++ mangling compression and fails to match some valid instances of STL allocators. Perform this logic in clang instead. Motivated by crbug.com/751385. Reviewers: pcc, kcc, llvm-commits Reviewed By: pcc Differential Revision: https://reviews.llvm.org/D36291 git-svn-id: c199f293c43da69278bea8e88f92242bf3aa95f7@310109 91177308-0d34-0410-b5e6-96231b3b80d8
Text
apache-2.0
llvm-mirror/compiler-rt,llvm-mirror/compiler-rt,llvm-mirror/compiler-rt,llvm-mirror/compiler-rt,llvm-mirror/compiler-rt
text
## Code Before: fun:*8allocateEjPKv fun:*8allocateEmPKv # std::get_temporary_buffer, likewise (libstdc++, libc++). fun:_ZSt20get_temporary_buffer* fun:_ZNSt3__120get_temporary_buffer* # STL address-of magic (libstdc++, libc++). fun:*__addressof* fun:_ZNSt3__19addressof* # Windows C++ stdlib headers that contain bad unrelated casts. src:*xmemory0 src:*xstddef # std::_Sp_counted_ptr_inplace::_Sp_counted_ptr_inplace() (libstdc++). # This ctor is used by std::make_shared and needs to cast to uninitialized T* # in order to call std::allocator_traits<T>::construct. fun:_ZNSt23_Sp_counted_ptr_inplace* ## Instruction: CFI: Move STL allocator blacklist to clang Summary: The regular expression to match STL allocators can't easily account for C++ mangling compression and fails to match some valid instances of STL allocators. Perform this logic in clang instead. Motivated by crbug.com/751385. Reviewers: pcc, kcc, llvm-commits Reviewed By: pcc Differential Revision: https://reviews.llvm.org/D36291 git-svn-id: c199f293c43da69278bea8e88f92242bf3aa95f7@310109 91177308-0d34-0410-b5e6-96231b3b80d8 ## Code After: fun:_ZSt20get_temporary_buffer* fun:_ZNSt3__120get_temporary_buffer* # STL address-of magic (libstdc++, libc++). fun:*__addressof* fun:_ZNSt3__19addressof* # Windows C++ stdlib headers that contain bad unrelated casts. src:*xmemory0 src:*xstddef # std::_Sp_counted_ptr_inplace::_Sp_counted_ptr_inplace() (libstdc++). # This ctor is used by std::make_shared and needs to cast to uninitialized T* # in order to call std::allocator_traits<T>::construct. fun:_ZNSt23_Sp_counted_ptr_inplace*
a18dd745d9a67546dcb34bb50b1f0d0e323e2400
README.md
README.md
Slack slapbot
Do you remember that great IRC times when you can just type ``` /slap Josh ``` ..and it will slap him real good! Well why not recreate that joyfull momements again in a modern IRC service Slack! ## Installation * Clone the repo * Run `npm install` * Run `node server.js` Ok, great job, but this is not that usefull since you will need to host this somewhere in order to configure Slack to use it. I recommend running the server with PM2 and frontend it with Nginx or similar. PM2 configuration .json should look like this one: ``` { "name" : "slapbot", "script" : "server.js", "cwd" : "<path_to_cloned_repo>", "env" : { "PORT":8090, } } ``` ## Configuration * Go to Slack services section https://<your_slack_team>.slack.com/services/new. * From there find Slash Command and add a command like `/slap [username]`. Put in the URL your service url or you can use ours -> `http://slapbot.spfr.co/slap`. Now you have a working bot with whom you can interact, but it is a private communication between you and him, so you could say that it is not that fun. Let's make it more fun! * Go to All Integrations screen again and add Incoming Webhook * Choose any channel to post to (don't worry, bot will use the channel you type in when you do your slapping) * Copy and paste tha token part of the URL that you got (it will be something like : `https://hooks.slack.com/services/sadaUSh12/s218jS/ajd123` * Take the part after `services`, as in the above example it will be -> `/sadaUSh12/s218jS/ajd123` * Go back to Slash Command and edit the URL to add the token part from above so it should be at the end something like `http://slapbot.spfr.co/slap?callback=/sadaUSh12/s218jS/ajd123`. BTW replace `slapbot.spfr.co` with your url, or feel free to use this one. Congratulations, enjoy slapping!
Update Readme to include guide for settings this up
Update Readme to include guide for settings this up
Markdown
mit
spfr/slapbot
markdown
## Code Before: Slack slapbot ## Instruction: Update Readme to include guide for settings this up ## Code After: Do you remember that great IRC times when you can just type ``` /slap Josh ``` ..and it will slap him real good! Well why not recreate that joyfull momements again in a modern IRC service Slack! ## Installation * Clone the repo * Run `npm install` * Run `node server.js` Ok, great job, but this is not that usefull since you will need to host this somewhere in order to configure Slack to use it. I recommend running the server with PM2 and frontend it with Nginx or similar. PM2 configuration .json should look like this one: ``` { "name" : "slapbot", "script" : "server.js", "cwd" : "<path_to_cloned_repo>", "env" : { "PORT":8090, } } ``` ## Configuration * Go to Slack services section https://<your_slack_team>.slack.com/services/new. * From there find Slash Command and add a command like `/slap [username]`. Put in the URL your service url or you can use ours -> `http://slapbot.spfr.co/slap`. Now you have a working bot with whom you can interact, but it is a private communication between you and him, so you could say that it is not that fun. Let's make it more fun! * Go to All Integrations screen again and add Incoming Webhook * Choose any channel to post to (don't worry, bot will use the channel you type in when you do your slapping) * Copy and paste tha token part of the URL that you got (it will be something like : `https://hooks.slack.com/services/sadaUSh12/s218jS/ajd123` * Take the part after `services`, as in the above example it will be -> `/sadaUSh12/s218jS/ajd123` * Go back to Slash Command and edit the URL to add the token part from above so it should be at the end something like `http://slapbot.spfr.co/slap?callback=/sadaUSh12/s218jS/ajd123`. BTW replace `slapbot.spfr.co` with your url, or feel free to use this one. Congratulations, enjoy slapping!
d555d334147928a651163aff2c8207aaa88f1e80
src/gui/CMakeLists.txt
src/gui/CMakeLists.txt
include_directories(..) qt5_add_resources(NEOVIM_RCC_SOURCES data.qrc) add_executable(nvim-qt main.cpp shell.cpp input.cpp errorwidget.cpp mainwindow.cpp ${NEOVIM_RCC_SOURCES}) target_link_libraries(nvim-qt Qt5::Network Qt5::Widgets ${MSGPACK_LIBRARIES} neovim-qt) add_executable(nvim-qt-widget main.cpp shell.cpp input.cpp ${NEOVIM_RCC_SOURCES}) set_target_properties(nvim-qt-widget PROPERTIES COMPILE_DEFINITIONS NEOVIMQT_GUI_WIDGET ) target_link_libraries(nvim-qt-widget Qt5::Network Qt5::Widgets ${MSGPACK_LIBRARIES} neovim-qt)
include_directories(..) qt5_add_resources(NEOVIM_RCC_SOURCES data.qrc) add_executable(nvim-qt main.cpp shell.cpp input.cpp errorwidget.cpp mainwindow.cpp ${NEOVIM_RCC_SOURCES}) target_link_libraries(nvim-qt Qt5::Network Qt5::Widgets ${MSGPACK_LIBRARIES} neovim-qt) add_executable(nvim-qt-widget EXCLUDE_FROM_ALL main.cpp shell.cpp input.cpp ${NEOVIM_RCC_SOURCES}) set_target_properties(nvim-qt-widget PROPERTIES COMPILE_DEFINITIONS NEOVIMQT_GUI_WIDGET ) target_link_libraries(nvim-qt-widget Qt5::Network Qt5::Widgets ${MSGPACK_LIBRARIES} neovim-qt)
Exclude widget gui target test from default build
Exclude widget gui target test from default build - The nvim-qt-widget executable is used to test the shell widget and should not be built by default
Text
isc
ravloony/neovim-qt,ngkaho1234/neovim-qt,ravloony/neovim-qt,equalsraf/neovim-qt,Pireax/neovim-qt,0x90sled/neovim-qt,Pireax/neovim-qt,equalsraf/neovim-qt,equalsraf/neovim-qt,ravloony/neovim-qt,equalsraf/neovim-qt,ngkaho1234/neovim-qt,ngkaho1234/neovim-qt,Pireax/neovim-qt,0x90sled/neovim-qt,0x90sled/neovim-qt
text
## Code Before: include_directories(..) qt5_add_resources(NEOVIM_RCC_SOURCES data.qrc) add_executable(nvim-qt main.cpp shell.cpp input.cpp errorwidget.cpp mainwindow.cpp ${NEOVIM_RCC_SOURCES}) target_link_libraries(nvim-qt Qt5::Network Qt5::Widgets ${MSGPACK_LIBRARIES} neovim-qt) add_executable(nvim-qt-widget main.cpp shell.cpp input.cpp ${NEOVIM_RCC_SOURCES}) set_target_properties(nvim-qt-widget PROPERTIES COMPILE_DEFINITIONS NEOVIMQT_GUI_WIDGET ) target_link_libraries(nvim-qt-widget Qt5::Network Qt5::Widgets ${MSGPACK_LIBRARIES} neovim-qt) ## Instruction: Exclude widget gui target test from default build - The nvim-qt-widget executable is used to test the shell widget and should not be built by default ## Code After: include_directories(..) qt5_add_resources(NEOVIM_RCC_SOURCES data.qrc) add_executable(nvim-qt main.cpp shell.cpp input.cpp errorwidget.cpp mainwindow.cpp ${NEOVIM_RCC_SOURCES}) target_link_libraries(nvim-qt Qt5::Network Qt5::Widgets ${MSGPACK_LIBRARIES} neovim-qt) add_executable(nvim-qt-widget EXCLUDE_FROM_ALL main.cpp shell.cpp input.cpp ${NEOVIM_RCC_SOURCES}) set_target_properties(nvim-qt-widget PROPERTIES COMPILE_DEFINITIONS NEOVIMQT_GUI_WIDGET ) target_link_libraries(nvim-qt-widget Qt5::Network Qt5::Widgets ${MSGPACK_LIBRARIES} neovim-qt)
fe19f983fb0aa05afc706aecca114ed91c650e07
_includes/rockstar-speakers.html
_includes/rockstar-speakers.html
<!-- Begin Rockstar Speakers Section --> <section id="rockstar-speakers" class="rockstar-speakers image-section parallax" style="background-image: url({{ site.baseurl }}/img/sections-background/{{ site.rockstarSpeakersImage }});"> <div class="overlay solid-overlay"></div> <div class="content-wrapper"> <div class="col-lg-10 col-lg-offset-1 rockstar-speakers-list"> <h3 id="rockstar-speaker-title">{{ site.rockstarSpeakersTitle }}</h3> <div class="clearfix"></div> </div> </div> </section> <!-- End Rockstar Speakers Section -->
<!-- Begin Rockstar Speakers Section --> <section id="rockstar-speakers" class="rockstar-speakers image-section parallax" style="background-image: url({{ site.baseurl }}/img/sections-background/{{ site.rockstarSpeakersImage }});"> <div class="overlay solid-overlay"></div> <div class="content-wrapper"> <div class="col-lg-10 col-lg-offset-1 rockstar-speakers-list"> <h3 id="rockstar-speaker-title">{{ site.rockstarSpeakersTitle }}</h3> <div class="clearfix"></div> <h3>CALL FOR SPEAKERS NOW OPEN</h3> <p><a href="http://swanseacon.co.uk/">SUBMIT A SESSION</a></p> </div> </div> </section> <!-- End Rockstar Speakers Section -->
Call for speakers button added
Call for speakers button added
HTML
mit
swanseacon/swanseacon.github.io,swanseacon/swanseacon.github.io,swanseacon/swanseacon.github.io
html
## Code Before: <!-- Begin Rockstar Speakers Section --> <section id="rockstar-speakers" class="rockstar-speakers image-section parallax" style="background-image: url({{ site.baseurl }}/img/sections-background/{{ site.rockstarSpeakersImage }});"> <div class="overlay solid-overlay"></div> <div class="content-wrapper"> <div class="col-lg-10 col-lg-offset-1 rockstar-speakers-list"> <h3 id="rockstar-speaker-title">{{ site.rockstarSpeakersTitle }}</h3> <div class="clearfix"></div> </div> </div> </section> <!-- End Rockstar Speakers Section --> ## Instruction: Call for speakers button added ## Code After: <!-- Begin Rockstar Speakers Section --> <section id="rockstar-speakers" class="rockstar-speakers image-section parallax" style="background-image: url({{ site.baseurl }}/img/sections-background/{{ site.rockstarSpeakersImage }});"> <div class="overlay solid-overlay"></div> <div class="content-wrapper"> <div class="col-lg-10 col-lg-offset-1 rockstar-speakers-list"> <h3 id="rockstar-speaker-title">{{ site.rockstarSpeakersTitle }}</h3> <div class="clearfix"></div> <h3>CALL FOR SPEAKERS NOW OPEN</h3> <p><a href="http://swanseacon.co.uk/">SUBMIT A SESSION</a></p> </div> </div> </section> <!-- End Rockstar Speakers Section -->
cd8c61cc609f9d5005b3e009f4c65d1323ec0b26
contrib/flavor/ngs_pipeline_minimal/packages-homebrew.yaml
contrib/flavor/ngs_pipeline_minimal/packages-homebrew.yaml
--- bio_nextgen: alignment: - bwa - bowtie2 - novoalign utilities: - bamtools - bedtools - fastqc - fastx_toolkit - qualimap - sambamba analysis: #- cufflinks - samtools #- tophat variant: - glia - lumpy-sv - tabix - vcflib - vcftools
--- bio_nextgen: alignment: - bwa - bowtie2 - novoalign utilities: - bamtools - bedtools - fastqc - qualimap - sambamba analysis: #- cufflinks - samtools #- tophat variant: - glia - lumpy-sv - tabix - vcflib - vcftools
Drop fastx_toolkit requirement for bcbio-nextgen install
Drop fastx_toolkit requirement for bcbio-nextgen install
YAML
mit
AICIDNN/cloudbiolinux,rchekaluk/cloudbiolinux,AICIDNN/cloudbiolinux,rchekaluk/cloudbiolinux,averagehat/cloudbiolinux,kdaily/cloudbiolinux,elkingtonmcb/cloudbiolinux,joemphilips/cloudbiolinux,heuermh/cloudbiolinux,chapmanb/cloudbiolinux,AICIDNN/cloudbiolinux,averagehat/cloudbiolinux,joemphilips/cloudbiolinux,pjotrp/cloudbiolinux,pjotrp/cloudbiolinux,lpantano/cloudbiolinux,kdaily/cloudbiolinux,lpantano/cloudbiolinux,lpantano/cloudbiolinux,elkingtonmcb/cloudbiolinux,AICIDNN/cloudbiolinux,averagehat/cloudbiolinux,kdaily/cloudbiolinux,chapmanb/cloudbiolinux,elkingtonmcb/cloudbiolinux,pjotrp/cloudbiolinux,chapmanb/cloudbiolinux,rchekaluk/cloudbiolinux,heuermh/cloudbiolinux,heuermh/cloudbiolinux,chapmanb/cloudbiolinux,joemphilips/cloudbiolinux,heuermh/cloudbiolinux,elkingtonmcb/cloudbiolinux,joemphilips/cloudbiolinux,rchekaluk/cloudbiolinux,kdaily/cloudbiolinux,averagehat/cloudbiolinux,pjotrp/cloudbiolinux
yaml
## Code Before: --- bio_nextgen: alignment: - bwa - bowtie2 - novoalign utilities: - bamtools - bedtools - fastqc - fastx_toolkit - qualimap - sambamba analysis: #- cufflinks - samtools #- tophat variant: - glia - lumpy-sv - tabix - vcflib - vcftools ## Instruction: Drop fastx_toolkit requirement for bcbio-nextgen install ## Code After: --- bio_nextgen: alignment: - bwa - bowtie2 - novoalign utilities: - bamtools - bedtools - fastqc - qualimap - sambamba analysis: #- cufflinks - samtools #- tophat variant: - glia - lumpy-sv - tabix - vcflib - vcftools
2973b664e8c9cf551d5d7277ab4995125be5fad0
python/reference.py
python/reference.py
import os # Current directory # If you call this from the current directory without abspath, # then it will not work since __file__ is a relative path os.path.dirname(os.path.abspath(__file__))
import os # Current directory # If you call this from the current directory without abspath, # then it will not work since __file__ is a relative path os.path.dirname(os.path.abspath(__file__)) # Get all files in a directory # Never use os.walk again def all_sub_files(root): for path, subdirs, files in os.walk(root): for name in files: yield os.path.join(path, name)
Add util for not using os.walk
Add util for not using os.walk
Python
mit
brycepg/how-to
python
## Code Before: import os # Current directory # If you call this from the current directory without abspath, # then it will not work since __file__ is a relative path os.path.dirname(os.path.abspath(__file__)) ## Instruction: Add util for not using os.walk ## Code After: import os # Current directory # If you call this from the current directory without abspath, # then it will not work since __file__ is a relative path os.path.dirname(os.path.abspath(__file__)) # Get all files in a directory # Never use os.walk again def all_sub_files(root): for path, subdirs, files in os.walk(root): for name in files: yield os.path.join(path, name)
6c1017541df36b4b8a992404b51a58c95e5a5471
components/related_articles/template.jade
components/related_articles/template.jade
.related-articles for article in articles .related-article a.ra-image.hoverable-image-link( href="/article/#{article.get('slug')}" ) img( src= crop(article.get('thumbnail_image'), { width: 300, height: 225 }) alt= article.get('title') ) .ra-blurb a.ra-author( href="/editorial" ) | By Artsy Editorial a( href="/article/#{article.get('slug')}" ) if article.get('thumbnail_title') h3.ra-title= article.get('thumbnail_title') if article.get('thumbnail_teaser') .ra-summary = _s.prune(article.get('thumbnail_teaser'), 450) span.ra-go Go to article if remaining > 0 .related-articles-show-all.avant-garde-button-white.is-block | Show More Related Articles (#{remaining})
.related-articles for article in articles .related-article a.ra-image.hoverable-image-link( href="/article/#{article.get('slug')}" ) img( src= crop(article.get('thumbnail_image'), { width: 300, height: 225 }) alt= article.get('title') ) .ra-blurb a.ra-author( href="/#{article.get('author').profile_handle}" ) | By #{article.get('author').name} a( href="/article/#{article.get('slug')}" ) if article.get('thumbnail_title') h3.ra-title= article.get('thumbnail_title') if article.get('thumbnail_teaser') .ra-summary = _s.prune(article.get('thumbnail_teaser'), 450) span.ra-go Go to article if remaining > 0 .related-articles-show-all.avant-garde-button-white.is-block | Show More Related Articles (#{remaining})
Use real author in artist/:id/articles
Use real author in artist/:id/articles
Jade
mit
dblock/force,erikdstock/force,damassi/force,eessex/force,yuki24/force,dblock/force,xtina-starr/force,yuki24/force,xtina-starr/force,joeyAghion/force,oxaudo/force,kanaabe/force,oxaudo/force,erikdstock/force,erikdstock/force,damassi/force,cavvia/force-1,mzikherman/force,mzikherman/force,izakp/force,damassi/force,mzikherman/force,artsy/force,dblock/force,oxaudo/force,TribeMedia/force-public,anandaroop/force,izakp/force,eessex/force,artsy/force,eessex/force,artsy/force-public,anandaroop/force,cavvia/force-1,anandaroop/force,joeyAghion/force,izakp/force,artsy/force-public,xtina-starr/force,kanaabe/force,mzikherman/force,cavvia/force-1,kanaabe/force,artsy/force,kanaabe/force,kanaabe/force,joeyAghion/force,izakp/force,damassi/force,eessex/force,anandaroop/force,TribeMedia/force-public,artsy/force,joeyAghion/force,erikdstock/force,xtina-starr/force,yuki24/force,yuki24/force,oxaudo/force,cavvia/force-1
jade
## Code Before: .related-articles for article in articles .related-article a.ra-image.hoverable-image-link( href="/article/#{article.get('slug')}" ) img( src= crop(article.get('thumbnail_image'), { width: 300, height: 225 }) alt= article.get('title') ) .ra-blurb a.ra-author( href="/editorial" ) | By Artsy Editorial a( href="/article/#{article.get('slug')}" ) if article.get('thumbnail_title') h3.ra-title= article.get('thumbnail_title') if article.get('thumbnail_teaser') .ra-summary = _s.prune(article.get('thumbnail_teaser'), 450) span.ra-go Go to article if remaining > 0 .related-articles-show-all.avant-garde-button-white.is-block | Show More Related Articles (#{remaining}) ## Instruction: Use real author in artist/:id/articles ## Code After: .related-articles for article in articles .related-article a.ra-image.hoverable-image-link( href="/article/#{article.get('slug')}" ) img( src= crop(article.get('thumbnail_image'), { width: 300, height: 225 }) alt= article.get('title') ) .ra-blurb a.ra-author( href="/#{article.get('author').profile_handle}" ) | By #{article.get('author').name} a( href="/article/#{article.get('slug')}" ) if article.get('thumbnail_title') h3.ra-title= article.get('thumbnail_title') if article.get('thumbnail_teaser') .ra-summary = _s.prune(article.get('thumbnail_teaser'), 450) span.ra-go Go to article if remaining > 0 .related-articles-show-all.avant-garde-button-white.is-block | Show More Related Articles (#{remaining})
1998e22e2cdbfda5a4f6d9903b81df616ca66e06
.travis-build-without-test.sh
.travis-build-without-test.sh
ROOT=$TRAVIS_BUILD_DIR/.. # Fail the whole script if any command fails set -e export SHELLOPTS SLUGOWNER=${TRAVIS_REPO_SLUG%/*} # jsr308-langtools if [ -d ../jsr308-langtools ] ; then (cd ../jsr308-langtools && hg pull && hg update) else set +e echo "Running: hg identify https://bitbucket.org/${SLUGOWNER}/jsr308-langtools &>-" hg identify https://bitbucket.org/${SLUGOWNER}/jsr308-langtools &>- if [ "$?" -ne 0 ]; then SLUGOWNER=typetools fi set -e echo "Running: (cd .. && hg clone https://bitbucket.org/${SLUGOWNER}/jsr308-langtools)" (cd .. && (hg clone https://bitbucket.org/${SLUGOWNER}/jsr308-langtools || hg clone https://bitbucket.org/${SLUGOWNER}/jsr308-langtools)) echo "... done: (cd .. && hg clone https://bitbucket.org/${SLUGOWNER}/jsr308-langtools)" fi (cd ../jsr308-langtools/ && ./.travis-build-without-test.sh) ## Compile echo "running \"ant compile\" for annotation-tools" ant compile
ROOT=$TRAVIS_BUILD_DIR/.. # Fail the whole script if any command fails set -e export SHELLOPTS SLUGOWNER=${TRAVIS_REPO_SLUG%/*} if [[ "$SLUGOWNER" == "" ]]; then SLUGOWNER=typetools fi # jsr308-langtools if [ -d ../jsr308-langtools ] ; then (cd ../jsr308-langtools && hg pull && hg update) else set +e echo "Running: hg identify https://bitbucket.org/${SLUGOWNER}/jsr308-langtools &>-" hg identify https://bitbucket.org/${SLUGOWNER}/jsr308-langtools &>- if [ "$?" -ne 0 ]; then SLUGOWNER=typetools fi set -e echo "Running: (cd .. && hg clone https://bitbucket.org/${SLUGOWNER}/jsr308-langtools)" (cd .. && (hg clone https://bitbucket.org/${SLUGOWNER}/jsr308-langtools || hg clone https://bitbucket.org/${SLUGOWNER}/jsr308-langtools)) echo "... done: (cd .. && hg clone https://bitbucket.org/${SLUGOWNER}/jsr308-langtools)" fi (cd ../jsr308-langtools/ && ./.travis-build-without-test.sh) ## Compile echo "running \"ant compile\" for annotation-tools" ant compile
Use 'typetools' if TRAVIS_REPO_SLUG is not set.
Use 'typetools' if TRAVIS_REPO_SLUG is not set.
Shell
mit
eisop/annotation-tools,typetools/annotation-tools,eisop/annotation-tools,typetools/annotation-tools,eisop/annotation-tools,typetools/annotation-tools
shell
## Code Before: ROOT=$TRAVIS_BUILD_DIR/.. # Fail the whole script if any command fails set -e export SHELLOPTS SLUGOWNER=${TRAVIS_REPO_SLUG%/*} # jsr308-langtools if [ -d ../jsr308-langtools ] ; then (cd ../jsr308-langtools && hg pull && hg update) else set +e echo "Running: hg identify https://bitbucket.org/${SLUGOWNER}/jsr308-langtools &>-" hg identify https://bitbucket.org/${SLUGOWNER}/jsr308-langtools &>- if [ "$?" -ne 0 ]; then SLUGOWNER=typetools fi set -e echo "Running: (cd .. && hg clone https://bitbucket.org/${SLUGOWNER}/jsr308-langtools)" (cd .. && (hg clone https://bitbucket.org/${SLUGOWNER}/jsr308-langtools || hg clone https://bitbucket.org/${SLUGOWNER}/jsr308-langtools)) echo "... done: (cd .. && hg clone https://bitbucket.org/${SLUGOWNER}/jsr308-langtools)" fi (cd ../jsr308-langtools/ && ./.travis-build-without-test.sh) ## Compile echo "running \"ant compile\" for annotation-tools" ant compile ## Instruction: Use 'typetools' if TRAVIS_REPO_SLUG is not set. ## Code After: ROOT=$TRAVIS_BUILD_DIR/.. # Fail the whole script if any command fails set -e export SHELLOPTS SLUGOWNER=${TRAVIS_REPO_SLUG%/*} if [[ "$SLUGOWNER" == "" ]]; then SLUGOWNER=typetools fi # jsr308-langtools if [ -d ../jsr308-langtools ] ; then (cd ../jsr308-langtools && hg pull && hg update) else set +e echo "Running: hg identify https://bitbucket.org/${SLUGOWNER}/jsr308-langtools &>-" hg identify https://bitbucket.org/${SLUGOWNER}/jsr308-langtools &>- if [ "$?" -ne 0 ]; then SLUGOWNER=typetools fi set -e echo "Running: (cd .. && hg clone https://bitbucket.org/${SLUGOWNER}/jsr308-langtools)" (cd .. && (hg clone https://bitbucket.org/${SLUGOWNER}/jsr308-langtools || hg clone https://bitbucket.org/${SLUGOWNER}/jsr308-langtools)) echo "... done: (cd .. && hg clone https://bitbucket.org/${SLUGOWNER}/jsr308-langtools)" fi (cd ../jsr308-langtools/ && ./.travis-build-without-test.sh) ## Compile echo "running \"ant compile\" for annotation-tools" ant compile
41ac7e2d85126c2fe5dd16230ed678d72a8d048f
jax/__init__.py
jax/__init__.py
import os os.environ.setdefault('TF_CPP_MIN_LOG_LEVEL', '1') version_file = os.path.join(os.path.abspath(os.path.dirname(__file__)), "version.py") with open(version_file) as f: exec(f.read(), globals()) from jax.api import * import jax.numpy as np # side-effecting import sets up operator overloads
import os os.environ.setdefault('TF_CPP_MIN_LOG_LEVEL', '1') from jax.version import __version__ from jax.api import * import jax.numpy as np # side-effecting import sets up operator overloads
Use a regular import to add jax.__version__ rather than exec() trickery.
Use a regular import to add jax.__version__ rather than exec() trickery. (The exec() trickery is needed for setup.py, but not for jax/__init__.py.)
Python
apache-2.0
tensorflow/probability,google/jax,google/jax,google/jax,google/jax,tensorflow/probability
python
## Code Before: import os os.environ.setdefault('TF_CPP_MIN_LOG_LEVEL', '1') version_file = os.path.join(os.path.abspath(os.path.dirname(__file__)), "version.py") with open(version_file) as f: exec(f.read(), globals()) from jax.api import * import jax.numpy as np # side-effecting import sets up operator overloads ## Instruction: Use a regular import to add jax.__version__ rather than exec() trickery. (The exec() trickery is needed for setup.py, but not for jax/__init__.py.) ## Code After: import os os.environ.setdefault('TF_CPP_MIN_LOG_LEVEL', '1') from jax.version import __version__ from jax.api import * import jax.numpy as np # side-effecting import sets up operator overloads
7e356d8f7685cfe9d5d350d1755f638275d78832
packages/stream-dom/karma.conf.js
packages/stream-dom/karma.conf.js
var path = require('path') module.exports = function(config) { config.set({ singleRun: true, files: [ './node_modules/babel-polyfill/browser.js', 'test/index.js', 'test/eventing.js' ], frameworks: [ 'mocha' ], preprocessors: { 'test/index.js': [ 'webpack', 'sourcemap' ], 'test/eventing.js': [ 'webpack', 'sourcemap' ] }, logLevel: config.LOG_INFO, reporters: [ 'spec' ], webpack: { devtool: 'inline-source-map', module: { loaders: [{ test: /\.js$/, loader: 'babel', include: [ path.join(__dirname, 'test') ] }] }, resolve: { modulesDirectories: [ '', 'node_modules' ], alias: { 'stream-dom': path.join(__dirname, 'lib') }, extensions: [ '', '.js' ] } }, webpackMiddleware: { // Display no info to console (only warnings and errors) noInfo: true }, plugins: [ require('karma-webpack'), require('karma-sourcemap-loader'), require('karma-mocha'), require('karma-chrome-launcher'), require('karma-firefox-launcher'), require('karma-spec-reporter') ], browsers: [ 'Chrome', 'Firefox' ] }) }
var path = require('path') module.exports = function(config) { config.set({ singleRun: true, files: [ 'test/index.js', 'test/eventing.js' ], frameworks: [ 'mocha' ], preprocessors: { 'test/index.js': [ 'webpack', 'sourcemap' ], 'test/eventing.js': [ 'webpack', 'sourcemap' ] }, logLevel: config.LOG_INFO, reporters: [ 'spec' ], webpack: { devtool: 'inline-source-map', module: { loaders: [{ test: /\.js$/, loader: 'babel', include: [ path.join(__dirname, 'test') ] }] }, resolve: { modulesDirectories: [ '', 'node_modules' ], alias: { 'stream-dom': path.join(__dirname, 'lib') }, extensions: [ '', '.js' ] } }, webpackMiddleware: { // Display no info to console (only warnings and errors) noInfo: true }, plugins: [ require('karma-webpack'), require('karma-sourcemap-loader'), require('karma-mocha'), require('karma-chrome-launcher'), require('karma-firefox-launcher'), require('karma-spec-reporter') ], browsers: [ 'Chrome', 'Firefox' ] }) }
Remove polyfill from unit test
Remove polyfill from unit test
JavaScript
mit
brandonpayton/stream-dom,brandonpayton/stream-dom
javascript
## Code Before: var path = require('path') module.exports = function(config) { config.set({ singleRun: true, files: [ './node_modules/babel-polyfill/browser.js', 'test/index.js', 'test/eventing.js' ], frameworks: [ 'mocha' ], preprocessors: { 'test/index.js': [ 'webpack', 'sourcemap' ], 'test/eventing.js': [ 'webpack', 'sourcemap' ] }, logLevel: config.LOG_INFO, reporters: [ 'spec' ], webpack: { devtool: 'inline-source-map', module: { loaders: [{ test: /\.js$/, loader: 'babel', include: [ path.join(__dirname, 'test') ] }] }, resolve: { modulesDirectories: [ '', 'node_modules' ], alias: { 'stream-dom': path.join(__dirname, 'lib') }, extensions: [ '', '.js' ] } }, webpackMiddleware: { // Display no info to console (only warnings and errors) noInfo: true }, plugins: [ require('karma-webpack'), require('karma-sourcemap-loader'), require('karma-mocha'), require('karma-chrome-launcher'), require('karma-firefox-launcher'), require('karma-spec-reporter') ], browsers: [ 'Chrome', 'Firefox' ] }) } ## Instruction: Remove polyfill from unit test ## Code After: var path = require('path') module.exports = function(config) { config.set({ singleRun: true, files: [ 'test/index.js', 'test/eventing.js' ], frameworks: [ 'mocha' ], preprocessors: { 'test/index.js': [ 'webpack', 'sourcemap' ], 'test/eventing.js': [ 'webpack', 'sourcemap' ] }, logLevel: config.LOG_INFO, reporters: [ 'spec' ], webpack: { devtool: 'inline-source-map', module: { loaders: [{ test: /\.js$/, loader: 'babel', include: [ path.join(__dirname, 'test') ] }] }, resolve: { modulesDirectories: [ '', 'node_modules' ], alias: { 'stream-dom': path.join(__dirname, 'lib') }, extensions: [ '', '.js' ] } }, webpackMiddleware: { // Display no info to console (only warnings and errors) noInfo: true }, plugins: [ require('karma-webpack'), require('karma-sourcemap-loader'), require('karma-mocha'), require('karma-chrome-launcher'), require('karma-firefox-launcher'), require('karma-spec-reporter') ], browsers: [ 'Chrome', 'Firefox' ] }) }
5bb3ae536087e8abe09d145c51861adc808d6434
apps/OboeTester/README.md
apps/OboeTester/README.md
Test Oboe using an Interactive App ## Test Output Test opening, starting, stopping and closing a stream. ## Test Input ## Tap to Tone Latency Measure touch screen latency plus audio output latency. ## Record and Playback Record 10 seconds of audio and play it back.
Test Oboe using an Interactive App. Download the top level oboe repository from GitHub Then use Android Studio to build the app in this folder. ## Test Output Test opening, starting, stopping and closing a stream. ## Test Input Test input streams. Displays current volume ## Tap to Tone Latency Measure touch screen latency plus audio output latency. ## Record and Playback Record 10 seconds of audio and play it back.
Add a line of OboeTester instructions
Add a line of OboeTester instructions
Markdown
apache-2.0
google/oboe,google/oboe,google/oboe,google/oboe,google/oboe,google/oboe
markdown
## Code Before: Test Oboe using an Interactive App ## Test Output Test opening, starting, stopping and closing a stream. ## Test Input ## Tap to Tone Latency Measure touch screen latency plus audio output latency. ## Record and Playback Record 10 seconds of audio and play it back. ## Instruction: Add a line of OboeTester instructions ## Code After: Test Oboe using an Interactive App. Download the top level oboe repository from GitHub Then use Android Studio to build the app in this folder. ## Test Output Test opening, starting, stopping and closing a stream. ## Test Input Test input streams. Displays current volume ## Tap to Tone Latency Measure touch screen latency plus audio output latency. ## Record and Playback Record 10 seconds of audio and play it back.
c1269b9b09f002d29f8dca54629eb87d5ba732ff
local-build.sh
local-build.sh
box build mv cdev.phar /usr/local/bin/cdev-local
INSTALL_DIR=~/.cdev ICON_COMPLETE_COLOUR=`tput setaf 2` NO_COLOUR=`tput sgr0` ICON_COMPLETE="${ICON_COMPLETE_COLOUR}\xcf\xbe${NO_COLOUR}" TARGET_RELEASE_PATH="${INSTALL_DIR}/cdev-local.phar" TARGET_RELEASE_KEY_PATH="${INSTALL_DIR}/cdev-local.phar.pubkey" ALIAS='/usr/local/bin/cdev-local' box key:create box build mv cdev.phar $TARGET_RELEASE_PATH mv cdev.phar.pubkey $TARGET_RELEASE_KEY_PATH rm $ALIAS ln -s $TARGET_RELEASE_PATH $ALIAS
Update local build to use same build methods as a release
Update local build to use same build methods as a release
Shell
mit
garethmidwood/cdev,garethmidwood/cdev
shell
## Code Before: box build mv cdev.phar /usr/local/bin/cdev-local ## Instruction: Update local build to use same build methods as a release ## Code After: INSTALL_DIR=~/.cdev ICON_COMPLETE_COLOUR=`tput setaf 2` NO_COLOUR=`tput sgr0` ICON_COMPLETE="${ICON_COMPLETE_COLOUR}\xcf\xbe${NO_COLOUR}" TARGET_RELEASE_PATH="${INSTALL_DIR}/cdev-local.phar" TARGET_RELEASE_KEY_PATH="${INSTALL_DIR}/cdev-local.phar.pubkey" ALIAS='/usr/local/bin/cdev-local' box key:create box build mv cdev.phar $TARGET_RELEASE_PATH mv cdev.phar.pubkey $TARGET_RELEASE_KEY_PATH rm $ALIAS ln -s $TARGET_RELEASE_PATH $ALIAS
e53f2efa08388b11e48ef381c095dbc903a5a30a
less/forum.less
less/forum.less
.emojionearea.emojionearea-standalone { // Hide the standalone button/display as we only use the picker itself .emojionearea-editor, .emojionearea-button { display: none; } } .ComposerBody-emojioneareaContainer { height: 0; // Same margins as .TextEditor-controls // So the position of the picker is relative to the border of the composer, even when the avatar is shown on the side @media @tablet-up { margin: 0 -20px 0 -105px; padding: 0 20px; } }
// Hide the standalone button/display as we only use the picker itself .emojionearea.emojionearea-standalone { border: none; .emojionearea-editor, .emojionearea-button { display: none; } } .ComposerBody-emojioneareaContainer { height: 0; // Same margins as .TextEditor-controls // So the position of the picker is relative to the border of the composer, even when the avatar is shown on the side @media @tablet-up { margin: 0 -20px 0 -105px; padding: 0 20px; } }
Fix white pixel showing up above "post reply" button
Fix white pixel showing up above "post reply" button
Less
mit
clarkwinkelmann/flarum-ext-emojionearea,clarkwinkelmann/flarum-ext-emojionearea
less
## Code Before: .emojionearea.emojionearea-standalone { // Hide the standalone button/display as we only use the picker itself .emojionearea-editor, .emojionearea-button { display: none; } } .ComposerBody-emojioneareaContainer { height: 0; // Same margins as .TextEditor-controls // So the position of the picker is relative to the border of the composer, even when the avatar is shown on the side @media @tablet-up { margin: 0 -20px 0 -105px; padding: 0 20px; } } ## Instruction: Fix white pixel showing up above "post reply" button ## Code After: // Hide the standalone button/display as we only use the picker itself .emojionearea.emojionearea-standalone { border: none; .emojionearea-editor, .emojionearea-button { display: none; } } .ComposerBody-emojioneareaContainer { height: 0; // Same margins as .TextEditor-controls // So the position of the picker is relative to the border of the composer, even when the avatar is shown on the side @media @tablet-up { margin: 0 -20px 0 -105px; padding: 0 20px; } }
62c9ed93e35bc1ec8f2fd21b41ce8cada1c6f614
src/java/org/dellroad/stuff/validation/EmailAddress.java
src/java/org/dellroad/stuff/validation/EmailAddress.java
/* * Copyright (C) 2011 Archie L. Cobbs. All rights reserved. * * $Id$ */ package org.dellroad.stuff.validation; import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import javax.validation.Constraint; import javax.validation.Payload; import javax.validation.ReportAsSingleViolation; import javax.validation.constraints.Pattern; /** * Validation constraint requiring a {@link String} to be a valid email address. */ @Documented @Constraint(validatedBy = {}) @Target({ ElementType.METHOD, ElementType.FIELD, ElementType.ANNOTATION_TYPE }) @Retention(RetentionPolicy.RUNTIME) @Pattern(regexp = "^[-+._\\p{Alnum}]+@[-\\p{Alnum}]+(\\.[-.\\p{Alnum}]+)+$") @ReportAsSingleViolation public @interface EmailAddress { String message() default "Invalid email address"; Class<?>[] groups() default {}; Class<? extends Payload>[] payload() default {}; }
/* * Copyright (C) 2011 Archie L. Cobbs. All rights reserved. * * $Id$ */ package org.dellroad.stuff.validation; import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import javax.validation.Constraint; import javax.validation.Payload; import javax.validation.ReportAsSingleViolation; import javax.validation.constraints.Pattern; /** * Validation constraint requiring a {@link String} to be a valid email address. */ @Documented @Constraint(validatedBy = {}) @Target({ ElementType.METHOD, ElementType.FIELD, ElementType.ANNOTATION_TYPE }) @Retention(RetentionPolicy.RUNTIME) @Pattern(regexp = EmailAddress.PATTERN) @ReportAsSingleViolation public @interface EmailAddress { /** * The regular expression string used by this constraint to validate email addresses: {@value} */ String PATTERN = "^[-+%._\\p{Alnum}]+@([-\\p{Alnum}]+\\.)+[-\\p{Alnum}]+$"; String message() default "Invalid email address"; Class<?>[] groups() default {}; Class<? extends Payload>[] payload() default {}; }
Tweak email address pattern and define the string as a constant.
Tweak email address pattern and define the string as a constant.
Java
apache-2.0
tempbottle/jsimpledb,permazen/permazen,permazen/permazen,permazen/permazen,tempbottle/jsimpledb,archiecobbs/jsimpledb,tempbottle/jsimpledb,archiecobbs/jsimpledb,archiecobbs/jsimpledb
java
## Code Before: /* * Copyright (C) 2011 Archie L. Cobbs. All rights reserved. * * $Id$ */ package org.dellroad.stuff.validation; import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import javax.validation.Constraint; import javax.validation.Payload; import javax.validation.ReportAsSingleViolation; import javax.validation.constraints.Pattern; /** * Validation constraint requiring a {@link String} to be a valid email address. */ @Documented @Constraint(validatedBy = {}) @Target({ ElementType.METHOD, ElementType.FIELD, ElementType.ANNOTATION_TYPE }) @Retention(RetentionPolicy.RUNTIME) @Pattern(regexp = "^[-+._\\p{Alnum}]+@[-\\p{Alnum}]+(\\.[-.\\p{Alnum}]+)+$") @ReportAsSingleViolation public @interface EmailAddress { String message() default "Invalid email address"; Class<?>[] groups() default {}; Class<? extends Payload>[] payload() default {}; } ## Instruction: Tweak email address pattern and define the string as a constant. ## Code After: /* * Copyright (C) 2011 Archie L. Cobbs. All rights reserved. * * $Id$ */ package org.dellroad.stuff.validation; import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import javax.validation.Constraint; import javax.validation.Payload; import javax.validation.ReportAsSingleViolation; import javax.validation.constraints.Pattern; /** * Validation constraint requiring a {@link String} to be a valid email address. */ @Documented @Constraint(validatedBy = {}) @Target({ ElementType.METHOD, ElementType.FIELD, ElementType.ANNOTATION_TYPE }) @Retention(RetentionPolicy.RUNTIME) @Pattern(regexp = EmailAddress.PATTERN) @ReportAsSingleViolation public @interface EmailAddress { /** * The regular expression string used by this constraint to validate email addresses: {@value} */ String PATTERN = "^[-+%._\\p{Alnum}]+@([-\\p{Alnum}]+\\.)+[-\\p{Alnum}]+$"; String message() default "Invalid email address"; Class<?>[] groups() default {}; Class<? extends Payload>[] payload() default {}; }
f04fe9165c3c8d1427a814758af880a5839add56
.circleci/Dockerfile
.circleci/Dockerfile
FROM ubuntu:17.04 RUN \ apt-get update -qq && \ apt-get install -y --no-install-recommends \ build-essential \ ca-certificates \ curl \ libssl-dev \ libsqlite3-dev \ pkg-config \ sqlite3 RUN curl https://sh.rustup.rs -sSf | sh -s -- -y --default-toolchain nightly ENV PATH=/root/.cargo/bin:$PATH
FROM ubuntu:17.04 RUN \ apt-get update -qq && \ apt-get install -y --no-install-recommends \ build-essential \ ca-certificates \ curl \ libpq-dev \ libsqlite3-dev \ libssl-dev \ pkg-config \ sqlite3 RUN curl https://sh.rustup.rs -sSf | sh -s -- -y --default-toolchain nightly ENV PATH=/root/.cargo/bin:$PATH
Add pg support to Rust image
chore: Add pg support to Rust image
unknown
mit
mob-rs/mob
unknown
## Code Before: FROM ubuntu:17.04 RUN \ apt-get update -qq && \ apt-get install -y --no-install-recommends \ build-essential \ ca-certificates \ curl \ libssl-dev \ libsqlite3-dev \ pkg-config \ sqlite3 RUN curl https://sh.rustup.rs -sSf | sh -s -- -y --default-toolchain nightly ENV PATH=/root/.cargo/bin:$PATH ## Instruction: chore: Add pg support to Rust image ## Code After: FROM ubuntu:17.04 RUN \ apt-get update -qq && \ apt-get install -y --no-install-recommends \ build-essential \ ca-certificates \ curl \ libpq-dev \ libsqlite3-dev \ libssl-dev \ pkg-config \ sqlite3 RUN curl https://sh.rustup.rs -sSf | sh -s -- -y --default-toolchain nightly ENV PATH=/root/.cargo/bin:$PATH
55166bcf59cb394c27007946fd95e7da4efacfed
lpc812-nrf24l01-receiver/firmware/README.md
lpc812-nrf24l01-receiver/firmware/README.md
**Note: a pre-compiled firmware is available for download [here](receiver.hex).** - **GCC for ARM** [https://launchpad.net/gcc-arm-embedded/](https://launchpad.net/gcc-arm-embedded/) - **GNU Make** [https://www.gnu.org/software/make/](https://www.gnu.org/software/make/); Windows executable is available at [http://gnuwin32.sourceforge.net/packages/make.htm](http://gnuwin32.sourceforge.net/packages/make.htm) - Optional: LCP81x-ISP [FIXME: link to tool on our github page]() Alternatively you can use any tool that can flash the LPC812 via ISP. - Windows users: **rm.exe** from [coreutils](http://gnuwin32.sourceforge.net/downlinks/coreutils-bin-zip.php). # Building the firmware Running ``make`` in a console window in this directory builds the firmware. The resulting *receiver.bin* and *receiver.hex* are located in the build directory. Running ``make program`` flashes the firmware, assuming you are using the *LCP81x-ISP* tool. It may be advisable to check the ``makefile`` whether the settings are desired for your application.
**Note: a pre-compiled firmware is available for download [here](receiver.hex).** - **GCC for ARM** [https://launchpad.net/gcc-arm-embedded/](https://launchpad.net/gcc-arm-embedded/) - **GNU Make** [https://www.gnu.org/software/make/](https://www.gnu.org/software/make/); Windows executable is available at [http://gnuwin32.sourceforge.net/packages/make.htm](http://gnuwin32.sourceforge.net/packages/make.htm) - Optional: LCP81x-ISP [https://github.com/laneboysrc/LPC81x-ISP-tool](https://github.com/laneboysrc/LPC81x-ISP-tool) Alternatively you can use any tool that can flash the LPC812 via ISP. - Windows users: **rm.exe** from [coreutils](http://gnuwin32.sourceforge.net/downlinks/coreutils-bin-zip.php). # Building the firmware Running ``make`` in a console window in this directory builds the firmware. The resulting *receiver.bin* and *receiver.hex* are located in the build directory. Running ``make program`` flashes the firmware, assuming you are using the *LCP81x-ISP* tool. It may be advisable to check the ``makefile`` whether the settings are desired for your application.
Add link to LPC81x ISP tool
Add link to LPC81x ISP tool
Markdown
unlicense
laneboysrc/nrf24l01-rc,laneboysrc/nrf24l01-rc,laneboysrc/nrf24l01-rc
markdown
## Code Before: **Note: a pre-compiled firmware is available for download [here](receiver.hex).** - **GCC for ARM** [https://launchpad.net/gcc-arm-embedded/](https://launchpad.net/gcc-arm-embedded/) - **GNU Make** [https://www.gnu.org/software/make/](https://www.gnu.org/software/make/); Windows executable is available at [http://gnuwin32.sourceforge.net/packages/make.htm](http://gnuwin32.sourceforge.net/packages/make.htm) - Optional: LCP81x-ISP [FIXME: link to tool on our github page]() Alternatively you can use any tool that can flash the LPC812 via ISP. - Windows users: **rm.exe** from [coreutils](http://gnuwin32.sourceforge.net/downlinks/coreutils-bin-zip.php). # Building the firmware Running ``make`` in a console window in this directory builds the firmware. The resulting *receiver.bin* and *receiver.hex* are located in the build directory. Running ``make program`` flashes the firmware, assuming you are using the *LCP81x-ISP* tool. It may be advisable to check the ``makefile`` whether the settings are desired for your application. ## Instruction: Add link to LPC81x ISP tool ## Code After: **Note: a pre-compiled firmware is available for download [here](receiver.hex).** - **GCC for ARM** [https://launchpad.net/gcc-arm-embedded/](https://launchpad.net/gcc-arm-embedded/) - **GNU Make** [https://www.gnu.org/software/make/](https://www.gnu.org/software/make/); Windows executable is available at [http://gnuwin32.sourceforge.net/packages/make.htm](http://gnuwin32.sourceforge.net/packages/make.htm) - Optional: LCP81x-ISP [https://github.com/laneboysrc/LPC81x-ISP-tool](https://github.com/laneboysrc/LPC81x-ISP-tool) Alternatively you can use any tool that can flash the LPC812 via ISP. - Windows users: **rm.exe** from [coreutils](http://gnuwin32.sourceforge.net/downlinks/coreutils-bin-zip.php). # Building the firmware Running ``make`` in a console window in this directory builds the firmware. The resulting *receiver.bin* and *receiver.hex* are located in the build directory. Running ``make program`` flashes the firmware, assuming you are using the *LCP81x-ISP* tool. It may be advisable to check the ``makefile`` whether the settings are desired for your application.
beaca9a888b95e8362a0ee9db080b4cf51e52806
node_modules/oae-core/etherpad/css/etherpad.css
node_modules/oae-core/etherpad/css/etherpad.css
/*! * Copyright 2014 Apereo Foundation (AF) Licensed under the * Educational Community License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may * obtain a copy of the License at * * http://opensource.org/licenses/ECL-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" * BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing * permissions and limitations under the License. */ #etherpad-container #etherpad-view-mode { margin: 30px 0; } #etherpad-container #etherpad-view-mode ul.indent li { list-style: none; } #etherpad-container #etherpad-no-content { display: block; margin: 0 auto 20px; max-width: 500px; text-align: center; } /** * Min-height needs to be applied on the container and the iframe. When the page unloads, * the iframe is removed from the DOM. This min-height will avoid the comments jumping * to the top of the page */ #etherpad-container #etherpad-edit-mode { padding: 20px 0 40px; min-height: 500px; } #etherpad-container #etherpad-edit-mode #etherpad-editor { border: 0; width: 100%; min-height: 500px; }
/*! * Copyright 2014 Apereo Foundation (AF) Licensed under the * Educational Community License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may * obtain a copy of the License at * * http://opensource.org/licenses/ECL-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" * BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing * permissions and limitations under the License. */ #etherpad-container #etherpad-view-mode { margin: 30px 0; } #etherpad-container #etherpad-view-mode ul.indent li { list-style: none; } /** * Min-height needs to be applied on the container and the iframe. When the page unloads, * the iframe is removed from the DOM. This min-height will avoid the comments jumping * to the top of the page */ #etherpad-container #etherpad-edit-mode { padding: 20px 0 40px; min-height: 500px; } #etherpad-container #etherpad-edit-mode #etherpad-editor { border: 0; width: 100%; min-height: 500px; }
Remove custom styling for empty document message
Remove custom styling for empty document message
CSS
apache-2.0
Orodan/3akai-ux-jitsi-fork,nicolaasmatthijs/3akai-ux,jfederico/3akai-ux,Orodan/3akai-ux,timdegroote/3akai-ux,Coenego/avocet-ui,stuartf/3akai-ux,timdegroote/3akai-ux,simong/3akai-ux,Orodan/3akai-ux-jitsi-fork,nicolaasmatthijs/3akai-ux,mrvisser/3akai-ux,jfederico/3akai-ux,Orodan/3akai-ux-jitsi-fork,nicolaasmatthijs/3akai-ux,simong/3akai-ux,Orodan/3akai-ux,Coenego/avocet-ui,mrvisser/3akai-ux,mrvisser/3akai-ux,jfederico/3akai-ux,timdegroote/3akai-ux,stuartf/3akai-ux,Orodan/3akai-ux,simong/3akai-ux,stuartf/3akai-ux
css
## Code Before: /*! * Copyright 2014 Apereo Foundation (AF) Licensed under the * Educational Community License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may * obtain a copy of the License at * * http://opensource.org/licenses/ECL-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" * BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing * permissions and limitations under the License. */ #etherpad-container #etherpad-view-mode { margin: 30px 0; } #etherpad-container #etherpad-view-mode ul.indent li { list-style: none; } #etherpad-container #etherpad-no-content { display: block; margin: 0 auto 20px; max-width: 500px; text-align: center; } /** * Min-height needs to be applied on the container and the iframe. When the page unloads, * the iframe is removed from the DOM. This min-height will avoid the comments jumping * to the top of the page */ #etherpad-container #etherpad-edit-mode { padding: 20px 0 40px; min-height: 500px; } #etherpad-container #etherpad-edit-mode #etherpad-editor { border: 0; width: 100%; min-height: 500px; } ## Instruction: Remove custom styling for empty document message ## Code After: /*! * Copyright 2014 Apereo Foundation (AF) Licensed under the * Educational Community License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may * obtain a copy of the License at * * http://opensource.org/licenses/ECL-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" * BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing * permissions and limitations under the License. */ #etherpad-container #etherpad-view-mode { margin: 30px 0; } #etherpad-container #etherpad-view-mode ul.indent li { list-style: none; } /** * Min-height needs to be applied on the container and the iframe. When the page unloads, * the iframe is removed from the DOM. This min-height will avoid the comments jumping * to the top of the page */ #etherpad-container #etherpad-edit-mode { padding: 20px 0 40px; min-height: 500px; } #etherpad-container #etherpad-edit-mode #etherpad-editor { border: 0; width: 100%; min-height: 500px; }
a92daf9cd71bd5e2935fbb1662791eac3fab45c8
lib/octopolo/templates/pull_request_body.erb
lib/octopolo/templates/pull_request_body.erb
<%= description %> Deploy Plan ----------- Does Infrastructure need to know anything special about this deploy? If so, keep this section and fill it in. **Otherwise, delete it.** Rollback Plan ------------- **If this pull request requires anything more complex (e.g., rolling back a migration), you MUST update this section. Otherwise, delete this note.** To roll back this change, revert the merge with `git revert -m 1 MERGE_SHA` and perform another deploy. URLs ---- <% pivotal_ids.each do |pivotal_id| -%> * [pivotal tracker story <%= pivotal_id %>](https://www.pivotaltracker.com/story/show/<%= pivotal_id %>) <% end -%> * Add any additional URLs (like Helpspot tickets) QA Plan ------- Provide a detailed QA plan, or other developers will retain the right to mock you mercilessly.
<%= description %> Deploy Plan ----------- Does Infrastructure need to know anything special about this deploy? If so, keep this section and fill it in. **Otherwise, delete it.** Rollback Plan ------------- **If this pull request requires anything more complex (e.g., rolling back a migration), you MUST update this section. Otherwise, delete this note.** To roll back this change, revert the merge with `git revert -m 1 MERGE_SHA` and perform another deploy. URLs ---- <% pivotal_ids.each do |pivotal_id| -%> * [pivotal tracker story <%= pivotal_id %>](https://www.pivotaltracker.com/story/show/<%= pivotal_id %>) <% end -%> <% jira_ids.each do |jira_id| -%> * [Jira issue <%= jira_id %>](https://<%= jira_url %>/browse/<%= jira_id %>) <% end -%> QA Plan ------- Provide a detailed QA plan, or other developers will retain the right to mock you mercilessly.
Update pull request template to include the jira issue urls
Update pull request template to include the jira issue urls
HTML+ERB
mit
sportngin/octopolo,sportngin/octopolo,sportngin/octopolo
html+erb
## Code Before: <%= description %> Deploy Plan ----------- Does Infrastructure need to know anything special about this deploy? If so, keep this section and fill it in. **Otherwise, delete it.** Rollback Plan ------------- **If this pull request requires anything more complex (e.g., rolling back a migration), you MUST update this section. Otherwise, delete this note.** To roll back this change, revert the merge with `git revert -m 1 MERGE_SHA` and perform another deploy. URLs ---- <% pivotal_ids.each do |pivotal_id| -%> * [pivotal tracker story <%= pivotal_id %>](https://www.pivotaltracker.com/story/show/<%= pivotal_id %>) <% end -%> * Add any additional URLs (like Helpspot tickets) QA Plan ------- Provide a detailed QA plan, or other developers will retain the right to mock you mercilessly. ## Instruction: Update pull request template to include the jira issue urls ## Code After: <%= description %> Deploy Plan ----------- Does Infrastructure need to know anything special about this deploy? If so, keep this section and fill it in. **Otherwise, delete it.** Rollback Plan ------------- **If this pull request requires anything more complex (e.g., rolling back a migration), you MUST update this section. Otherwise, delete this note.** To roll back this change, revert the merge with `git revert -m 1 MERGE_SHA` and perform another deploy. URLs ---- <% pivotal_ids.each do |pivotal_id| -%> * [pivotal tracker story <%= pivotal_id %>](https://www.pivotaltracker.com/story/show/<%= pivotal_id %>) <% end -%> <% jira_ids.each do |jira_id| -%> * [Jira issue <%= jira_id %>](https://<%= jira_url %>/browse/<%= jira_id %>) <% end -%> QA Plan ------- Provide a detailed QA plan, or other developers will retain the right to mock you mercilessly.
37a4052294143c19020a9f6f391ad8739f231e61
assets/scss/components/foundation/modal/_modal.scss
assets/scss/components/foundation/modal/_modal.scss
// ============================================================================= // MODAL (CSS) // ============================================================================= .modal { min-height: 240px; } .modal-header-title { text-align: center; } .modal-close { text-decoration: none; } .modal-body .modal-close { right: 20px; top: 15px; transform: none; }
// ============================================================================= // MODAL (CSS) // ============================================================================= .modal { left: 0; margin: 0 auto; max-height: 90%; max-width: 95%; min-height: 240px; right: 0; top: 50% !important; } .modal-header-title { text-align: center; } .modal-close { text-decoration: none; } .modal-body .modal-close { right: 20px; top: 15px; transform: none; }
Fix modal layout for small screens
Fix modal layout for small screens
SCSS
mit
ovsokolov/dashconnect-bc,bc-annavu/stencil,caras-ey/BST,PascalZajac/cornerstone,aaronrodier84/flukerfarms,aaronrodier84/rouxbrands,mcampa/cornerstone,aaronrodier84/rouxbrands,PascalZajac/cornerstone,PascalZajac/cornerstone,bigcommerce/stencil,ovsokolov/dashconnect-bc,rho1140/cornerstone,PeteyRev/ud-revamp,aaronrodier84/flukerfarms,PeteyRev/ud-revamp,juancho1/bauhaus,mcampa/cornerstone,caras-ey/BST,PeteyRev/ud-revamp,bc-annavu/stencil,ovsokolov/dashconnect-bc,mcampa/cornerstone,aaronrodier84/rouxbrands,caras-ey/BST,rho1140/cornerstone,aaronrodier84/flukerfarms,juancho1/bauhaus,bigcommerce/stencil,juancho1/bauhaus,bc-annavu/stencil,rho1140/cornerstone,bigcommerce/stencil
scss
## Code Before: // ============================================================================= // MODAL (CSS) // ============================================================================= .modal { min-height: 240px; } .modal-header-title { text-align: center; } .modal-close { text-decoration: none; } .modal-body .modal-close { right: 20px; top: 15px; transform: none; } ## Instruction: Fix modal layout for small screens ## Code After: // ============================================================================= // MODAL (CSS) // ============================================================================= .modal { left: 0; margin: 0 auto; max-height: 90%; max-width: 95%; min-height: 240px; right: 0; top: 50% !important; } .modal-header-title { text-align: center; } .modal-close { text-decoration: none; } .modal-body .modal-close { right: 20px; top: 15px; transform: none; }
69ddd0b9133497be045fa9fe3a82a711985c3d3e
kuulemma/templates/_footer.html
kuulemma/templates/_footer.html
<footer> <div class="container"> <div class="brand"> <img src="{{ url_for('static', filename='images/helsinki-coat-of-arms-black.png') }}" alt="Helsingin vaakuna"> <a href="#">Kerrokantasi</a> </div> <div class="footer-links"> <ul> <li> <a class="navbar-info-link" href="{{ url_for('hearing.index') }}">Kuulemiset</a> </li> <li> <a href="{{ url_for('static_pages.service_info') }}">Tietoa palvelusta</a> </li> </div> <div class="footer-login"> {% include "_auth_links.html" %} </div> </div> </footer>
<footer> <div class="container"> <div class="brand"> <img src="{{ url_for('static', filename='images/helsinki-coat-of-arms-black.png') }}" alt="Helsingin vaakuna"> <a href="{{ url_for('frontpage.index') }}">Kerrokantasi</a> </div> <div class="footer-links"> <ul> <li> <a class="navbar-info-link" href="{{ url_for('hearing.index') }}">Kuulemiset</a> </li> <li> <a href="{{ url_for('static_pages.service_info') }}">Tietoa palvelusta</a> </li> </div> <div class="footer-login"> {% include "_auth_links.html" %} </div> </div> </footer>
Fix footer home page link
Fix footer home page link
HTML
agpl-3.0
City-of-Helsinki/kuulemma,fastmonkeys/kuulemma,City-of-Helsinki/kuulemma,City-of-Helsinki/kuulemma,fastmonkeys/kuulemma,fastmonkeys/kuulemma
html
## Code Before: <footer> <div class="container"> <div class="brand"> <img src="{{ url_for('static', filename='images/helsinki-coat-of-arms-black.png') }}" alt="Helsingin vaakuna"> <a href="#">Kerrokantasi</a> </div> <div class="footer-links"> <ul> <li> <a class="navbar-info-link" href="{{ url_for('hearing.index') }}">Kuulemiset</a> </li> <li> <a href="{{ url_for('static_pages.service_info') }}">Tietoa palvelusta</a> </li> </div> <div class="footer-login"> {% include "_auth_links.html" %} </div> </div> </footer> ## Instruction: Fix footer home page link ## Code After: <footer> <div class="container"> <div class="brand"> <img src="{{ url_for('static', filename='images/helsinki-coat-of-arms-black.png') }}" alt="Helsingin vaakuna"> <a href="{{ url_for('frontpage.index') }}">Kerrokantasi</a> </div> <div class="footer-links"> <ul> <li> <a class="navbar-info-link" href="{{ url_for('hearing.index') }}">Kuulemiset</a> </li> <li> <a href="{{ url_for('static_pages.service_info') }}">Tietoa palvelusta</a> </li> </div> <div class="footer-login"> {% include "_auth_links.html" %} </div> </div> </footer>
25724a77c19828d52cba2b6e682c67f67013590e
django_counter_field/fields.py
django_counter_field/fields.py
from django.db import models class CounterField(models.IntegerField): def __init__(self, *args, **kwargs): kwargs['default'] = kwargs.get('default', 0) super(CounterField, self).__init__(*args, **kwargs) try: from south.modelsinspector import add_introspection_rules except ImportError: pass else: add_introspection_rules([], ["^pomoji\.django_counter_field\.fields\.CounterField"])
from django.db import models class CounterField(models.IntegerField): """ CounterField wraps the standard django IntegerField. It exists primarily to allow for easy validation of counter fields. The default value of a counter field is 0. """ def __init__(self, *args, **kwargs): kwargs['default'] = kwargs.get('default', 0) super(CounterField, self).__init__(*args, **kwargs) try: from south.modelsinspector import add_introspection_rules except ImportError: pass else: add_introspection_rules([], ["^django_counter_field\.fields\.CounterField"])
Fix bug in introspection rule
Fix bug in introspection rule
Python
mit
kajic/django-counter-field
python
## Code Before: from django.db import models class CounterField(models.IntegerField): def __init__(self, *args, **kwargs): kwargs['default'] = kwargs.get('default', 0) super(CounterField, self).__init__(*args, **kwargs) try: from south.modelsinspector import add_introspection_rules except ImportError: pass else: add_introspection_rules([], ["^pomoji\.django_counter_field\.fields\.CounterField"]) ## Instruction: Fix bug in introspection rule ## Code After: from django.db import models class CounterField(models.IntegerField): """ CounterField wraps the standard django IntegerField. It exists primarily to allow for easy validation of counter fields. The default value of a counter field is 0. """ def __init__(self, *args, **kwargs): kwargs['default'] = kwargs.get('default', 0) super(CounterField, self).__init__(*args, **kwargs) try: from south.modelsinspector import add_introspection_rules except ImportError: pass else: add_introspection_rules([], ["^django_counter_field\.fields\.CounterField"])
c026131558cdfd02d62d6d60bf494e93fd2d5285
test/fixtures/with-config/nuxt.config.js
test/fixtures/with-config/nuxt.config.js
module.exports = { router: { base: '/test/' }, cache: true, plugins: ['~plugins/test.js'], loading: '~components/loading', env: { bool: true, num: 23, string: 'Nuxt.js' } }
module.exports = { router: { base: '/test/' }, cache: true, plugins: ['~plugins/test.js'], loading: '~components/loading', env: { bool: true, num: 23, string: 'Nuxt.js' }, extend (config, options) { config.devtool = 'eval-source-map' } }
Add test for extend option
Add test for extend option
JavaScript
mit
jfroffice/nuxt.js,cj/nuxt.js,jfroffice/nuxt.js,cj/nuxt.js,mgesmundo/nuxt.js,mgesmundo/nuxt.js
javascript
## Code Before: module.exports = { router: { base: '/test/' }, cache: true, plugins: ['~plugins/test.js'], loading: '~components/loading', env: { bool: true, num: 23, string: 'Nuxt.js' } } ## Instruction: Add test for extend option ## Code After: module.exports = { router: { base: '/test/' }, cache: true, plugins: ['~plugins/test.js'], loading: '~components/loading', env: { bool: true, num: 23, string: 'Nuxt.js' }, extend (config, options) { config.devtool = 'eval-source-map' } }
777e4faba65618218baad5ff9ecf67a7000288a2
code/etc/adminhtml.xml
code/etc/adminhtml.xml
<?xml version="1.0"?> <config> <acl> <resources> <admin> <children> <system> <children> <config> <children> <algoliasearch translate="title" module="algoliasearch"> <title>Configuration Section</title> <sort_order>100</sort_order> </algoliasearch> </children> </config> </children> </system> </children> </admin> </resources> </acl> </config>
<?xml version="1.0"?> <config> <acl> <resources> <admin> <children> <system> <children> <config> <children> <algoliasearch translate="title" module="algoliasearch"> <title>Algolia Search Configuration</title> <sort_order>100</sort_order> </algoliasearch> </children> </config> </children> </system> </children> </admin> </resources> </acl> </config>
Fix ACL Algolia Search title
Fix ACL Algolia Search title
XML
mit
michaelscheel/algoliasearch-magento,michaelscheel/algoliasearch-magento,michaelscheel/algoliasearch-magento,michaelscheel/algoliasearch-magento
xml
## Code Before: <?xml version="1.0"?> <config> <acl> <resources> <admin> <children> <system> <children> <config> <children> <algoliasearch translate="title" module="algoliasearch"> <title>Configuration Section</title> <sort_order>100</sort_order> </algoliasearch> </children> </config> </children> </system> </children> </admin> </resources> </acl> </config> ## Instruction: Fix ACL Algolia Search title ## Code After: <?xml version="1.0"?> <config> <acl> <resources> <admin> <children> <system> <children> <config> <children> <algoliasearch translate="title" module="algoliasearch"> <title>Algolia Search Configuration</title> <sort_order>100</sort_order> </algoliasearch> </children> </config> </children> </system> </children> </admin> </resources> </acl> </config>
9fd5ee1f16e5a44219ee6fff0d38bd330da966a1
circle.yml
circle.yml
version: 2 executorType: docker containerInfo: - image: golang:1.8 stages: build: workDir: /go/src/github.com/itzg/restify steps: - type: checkout - type: shell command: | go get go install - type: deploy command: | tag=$(git describe --exact-match --tags) if [ $? = 0 ]; then go get github.com/mitchellh/gox go get github.com/tcnksm/ghr CGO_ENABLED=0 gox -ldflags "-X main.Version=$BUILD_VERSION -X main.BuildDate=$BUILD_DATE" -output "dist/${CIRCLE_PROJECT_REPONAME}_{{.OS}}_{{.Arch}}" ghr -u $CIRCLE_PROJECT_USERNAME -r $CIRCLE_PROJECT_REPONAME --replace $tag dist/ fi
version: 2 executorType: docker containerInfo: - image: golang:1.8 stages: build: workDir: /go/src/github.com/itzg/restify steps: - type: checkout - type: shell command: | go get go install - type: deploy command: | tag=$(git describe --exact-match --tags) if [ $? = 0 ]; then go get github.com/mitchellh/gox go get github.com/tcnksm/ghr CGO_ENABLED=0 gox -ldflags "-X main.Version=$BUILD_VERSION -X main.BuildDate=$BUILD_DATE" \ -output "dist/${CIRCLE_PROJECT_REPONAME}_{{.OS}}_{{.Arch}}" ghr -u $CIRCLE_USERNAME -r restify --replace $tag dist/ fi
Remove use of old CIRCLE env vars
Remove use of old CIRCLE env vars
YAML
apache-2.0
itzg/restify
yaml
## Code Before: version: 2 executorType: docker containerInfo: - image: golang:1.8 stages: build: workDir: /go/src/github.com/itzg/restify steps: - type: checkout - type: shell command: | go get go install - type: deploy command: | tag=$(git describe --exact-match --tags) if [ $? = 0 ]; then go get github.com/mitchellh/gox go get github.com/tcnksm/ghr CGO_ENABLED=0 gox -ldflags "-X main.Version=$BUILD_VERSION -X main.BuildDate=$BUILD_DATE" -output "dist/${CIRCLE_PROJECT_REPONAME}_{{.OS}}_{{.Arch}}" ghr -u $CIRCLE_PROJECT_USERNAME -r $CIRCLE_PROJECT_REPONAME --replace $tag dist/ fi ## Instruction: Remove use of old CIRCLE env vars ## Code After: version: 2 executorType: docker containerInfo: - image: golang:1.8 stages: build: workDir: /go/src/github.com/itzg/restify steps: - type: checkout - type: shell command: | go get go install - type: deploy command: | tag=$(git describe --exact-match --tags) if [ $? = 0 ]; then go get github.com/mitchellh/gox go get github.com/tcnksm/ghr CGO_ENABLED=0 gox -ldflags "-X main.Version=$BUILD_VERSION -X main.BuildDate=$BUILD_DATE" \ -output "dist/${CIRCLE_PROJECT_REPONAME}_{{.OS}}_{{.Arch}}" ghr -u $CIRCLE_USERNAME -r restify --replace $tag dist/ fi
af43e2f29ccf25169cc32fe1c9af36b876a1c11a
tests/KafkaTest/Produce/ProduceTest.php
tests/KafkaTest/Produce/ProduceTest.php
<?php /* vim: set expandtab tabstop=4 shiftwidth=4 softtabstop=4 foldmethod=marker: */ // +--------------------------------------------------------------------------- // | SWAN [ $_SWANBR_SLOGAN_$ ] // +--------------------------------------------------------------------------- // | Copyright $_SWANBR_COPYRIGHT_$ // +--------------------------------------------------------------------------- // | Version $_SWANBR_VERSION_$ // +--------------------------------------------------------------------------- // | Licensed ( $_SWANBR_LICENSED_URL_$ ) // +--------------------------------------------------------------------------- // | $_SWANBR_WEB_DOMAIN_$ // +--------------------------------------------------------------------------- namespace KafkaTest\Produce; /** +------------------------------------------------------------------------------ * Kafka protocol since Kafka v0.8 +------------------------------------------------------------------------------ * * @package * @version $_SWANBR_VERSION_$ * @copyright Copyleft * @author $_SWANBR_AUTHOR_$ +------------------------------------------------------------------------------ */ class ProduceTest extends \PHPUnit_Framework_TestCase { // {{{ consts // }}} // {{{ members // }}} // {{{ functions // {{{ public function testSend() /** * testSend * * @access public * @return void */ public function testSend() { } // }}} // }}} }
<?php /* vim: set expandtab tabstop=4 shiftwidth=4 softtabstop=4 foldmethod=marker: */ // +--------------------------------------------------------------------------- // | SWAN [ $_SWANBR_SLOGAN_$ ] // +--------------------------------------------------------------------------- // | Copyright $_SWANBR_COPYRIGHT_$ // +--------------------------------------------------------------------------- // | Version $_SWANBR_VERSION_$ // +--------------------------------------------------------------------------- // | Licensed ( $_SWANBR_LICENSED_URL_$ ) // +--------------------------------------------------------------------------- // | $_SWANBR_WEB_DOMAIN_$ // +--------------------------------------------------------------------------- namespace KafkaTest\Produce; /** +------------------------------------------------------------------------------ * Kafka protocol since Kafka v0.8 +------------------------------------------------------------------------------ * * @package * @version $_SWANBR_VERSION_$ * @copyright Copyleft * @author $_SWANBR_AUTHOR_$ +------------------------------------------------------------------------------ */ class ProduceTest extends \PHPUnit_Framework_TestCase { // {{{ consts // }}} // {{{ members // }}} // {{{ functions // {{{ public function testSend() /** * testSend * * @access public * @return void */ public function testSend() { $this->markTestIncomplete('Test not implemented yet'); } // }}} // }}} }
Mark empty test as incomplete
Mark empty test as incomplete To make things more explicit.
PHP
apache-2.0
nmred/kafka-php,nmred/kafka-php,weiboad/kafka-php
php
## Code Before: <?php /* vim: set expandtab tabstop=4 shiftwidth=4 softtabstop=4 foldmethod=marker: */ // +--------------------------------------------------------------------------- // | SWAN [ $_SWANBR_SLOGAN_$ ] // +--------------------------------------------------------------------------- // | Copyright $_SWANBR_COPYRIGHT_$ // +--------------------------------------------------------------------------- // | Version $_SWANBR_VERSION_$ // +--------------------------------------------------------------------------- // | Licensed ( $_SWANBR_LICENSED_URL_$ ) // +--------------------------------------------------------------------------- // | $_SWANBR_WEB_DOMAIN_$ // +--------------------------------------------------------------------------- namespace KafkaTest\Produce; /** +------------------------------------------------------------------------------ * Kafka protocol since Kafka v0.8 +------------------------------------------------------------------------------ * * @package * @version $_SWANBR_VERSION_$ * @copyright Copyleft * @author $_SWANBR_AUTHOR_$ +------------------------------------------------------------------------------ */ class ProduceTest extends \PHPUnit_Framework_TestCase { // {{{ consts // }}} // {{{ members // }}} // {{{ functions // {{{ public function testSend() /** * testSend * * @access public * @return void */ public function testSend() { } // }}} // }}} } ## Instruction: Mark empty test as incomplete To make things more explicit. ## Code After: <?php /* vim: set expandtab tabstop=4 shiftwidth=4 softtabstop=4 foldmethod=marker: */ // +--------------------------------------------------------------------------- // | SWAN [ $_SWANBR_SLOGAN_$ ] // +--------------------------------------------------------------------------- // | Copyright $_SWANBR_COPYRIGHT_$ // +--------------------------------------------------------------------------- // | Version $_SWANBR_VERSION_$ // +--------------------------------------------------------------------------- // | Licensed ( $_SWANBR_LICENSED_URL_$ ) // +--------------------------------------------------------------------------- // | $_SWANBR_WEB_DOMAIN_$ // +--------------------------------------------------------------------------- namespace KafkaTest\Produce; /** +------------------------------------------------------------------------------ * Kafka protocol since Kafka v0.8 +------------------------------------------------------------------------------ * * @package * @version $_SWANBR_VERSION_$ * @copyright Copyleft * @author $_SWANBR_AUTHOR_$ +------------------------------------------------------------------------------ */ class ProduceTest extends \PHPUnit_Framework_TestCase { // {{{ consts // }}} // {{{ members // }}} // {{{ functions // {{{ public function testSend() /** * testSend * * @access public * @return void */ public function testSend() { $this->markTestIncomplete('Test not implemented yet'); } // }}} // }}} }
56b9725cbd96bc462fc09fcce0c12e8a6851d2cd
package.json
package.json
{ "name": "fly-quaint", "version": "0.0.0", "description": "Quaint plugin for Fly.", "license": "MIT", "repository": "https://github.com/MadcapJake/fly-quaint", "main": "lib/index.js", "keywords": [ "fly", "fly-plugin", "quaint" ], "scripts": { "lint": "eslint *.js", "setup": "npm i && npm test", "test": "npm run lint && tape test/*.js | tspec", "build": "echo No build task specified.", "deploy": "npm run test && git push origin master && npm publish", "test-ci": "node --harmony --harmony_arrow_functions ./node_modules/tape/bin/tape test/*.js" }, "author": { "name": "Jake Russo", "email": "[email protected]", "url": "http://github.com/MadcapJake" }, "dependencies": { "earlgrey": "0.0.10", "quaint": "0.0.3" }, "devDependencies": { "eslint": "^0.21.2", "fly": "^0.3.4", "fly-earl": "^0.3.2", "tap-spec": "^4.0.2", "tape": "^4.0.0" }, "engines": { "iojs": ">= 1.0.0", "node": ">= 0.11.0" } }
{ "name": "fly-quaint", "version": "0.0.0", "description": "Quaint plugin for Fly.", "license": "MIT", "repository": "https://github.com/MadcapJake/fly-quaint", "main": "lib/index.js", "keywords": [ "fly", "fly-plugin", "quaint" ], "scripts": { "lint": "eslint *.js", "setup": "npm i && npm test", "test": "npm run lint && tape test/*.js | tspec", "build": "fly", "deploy": "npm run test && git push origin master && npm publish", "test-ci": "node --harmony --harmony_arrow_functions ./node_modules/tape/bin/tape test/*.js" }, "author": { "name": "Jake Russo", "email": "[email protected]", "url": "http://github.com/MadcapJake" }, "dependencies": { "earlgrey": "0.0.10", "quaint": "0.0.3" }, "devDependencies": { "eslint": "^0.21.2", "fly": "^0.3.4", "fly-earl": "^0.3.2", "tap-spec": "^4.0.2", "tape": "^4.0.0" }, "engines": { "iojs": ">= 1.0.0", "node": ">= 0.11.0" } }
Update `build` script to fly
Update `build` script to fly
JSON
mit
MadcapJake/fly-quaint
json
## Code Before: { "name": "fly-quaint", "version": "0.0.0", "description": "Quaint plugin for Fly.", "license": "MIT", "repository": "https://github.com/MadcapJake/fly-quaint", "main": "lib/index.js", "keywords": [ "fly", "fly-plugin", "quaint" ], "scripts": { "lint": "eslint *.js", "setup": "npm i && npm test", "test": "npm run lint && tape test/*.js | tspec", "build": "echo No build task specified.", "deploy": "npm run test && git push origin master && npm publish", "test-ci": "node --harmony --harmony_arrow_functions ./node_modules/tape/bin/tape test/*.js" }, "author": { "name": "Jake Russo", "email": "[email protected]", "url": "http://github.com/MadcapJake" }, "dependencies": { "earlgrey": "0.0.10", "quaint": "0.0.3" }, "devDependencies": { "eslint": "^0.21.2", "fly": "^0.3.4", "fly-earl": "^0.3.2", "tap-spec": "^4.0.2", "tape": "^4.0.0" }, "engines": { "iojs": ">= 1.0.0", "node": ">= 0.11.0" } } ## Instruction: Update `build` script to fly ## Code After: { "name": "fly-quaint", "version": "0.0.0", "description": "Quaint plugin for Fly.", "license": "MIT", "repository": "https://github.com/MadcapJake/fly-quaint", "main": "lib/index.js", "keywords": [ "fly", "fly-plugin", "quaint" ], "scripts": { "lint": "eslint *.js", "setup": "npm i && npm test", "test": "npm run lint && tape test/*.js | tspec", "build": "fly", "deploy": "npm run test && git push origin master && npm publish", "test-ci": "node --harmony --harmony_arrow_functions ./node_modules/tape/bin/tape test/*.js" }, "author": { "name": "Jake Russo", "email": "[email protected]", "url": "http://github.com/MadcapJake" }, "dependencies": { "earlgrey": "0.0.10", "quaint": "0.0.3" }, "devDependencies": { "eslint": "^0.21.2", "fly": "^0.3.4", "fly-earl": "^0.3.2", "tap-spec": "^4.0.2", "tape": "^4.0.0" }, "engines": { "iojs": ">= 1.0.0", "node": ">= 0.11.0" } }
4d89dbd4f128b7fb87bd28ab6ddd3887cc0c3ce1
apps/hortonworks/hdp2/centos6/README.md
apps/hortonworks/hdp2/centos6/README.md
Deploy ====== First, build the startup scripts for the instances by running the following command from this directory: ```bash make -s -C scripts/init ``` To deploy Ambari, run the following command from the top-level directory: ```bash src/cloud_launcher.sh --config apps/hortonworks/hdp2/centos6/vm/ambari.py insert ``` To see other flags, run: ```bash src/cloud_launcher.sh --help ``` Accessing Ambari ---------------- Forward the port from localhost over SSH to the Ambari instance. From the top-level project directory, run: ```bash env SERVER=ambari-server ./scripts/util/forward-port.sh ``` See the script for how to change local or remote ports. Access Ambari via [http://localhost:8080/](http://localhost:8080) to continue the installation. Installation ------------ The default login credentials are: * username: admin * password: admin These can be changed after you set up the cluster. The agent hostname pattern: ``` ambari-agent-[0-4].c.${PROJECT}.internal ``` adjust this pattern as needed, e.g., change the '4' to N-1 where N is the number of agent instances in your deployment.
Deploy ====== First, build the startup scripts for the instances by running the following command from this directory: ```bash make -s -C scripts/init ``` To deploy Ambari, run the following command from the top-level directory: ```bash src/cloud_launcher.sh --config apps/hortonworks/hdp2/centos6/vm/ambari.py insert ``` To see other flags, run: ```bash src/cloud_launcher.sh --help ``` Accessing Ambari ---------------- * run a [local SOCKS proxy](../../../../scripts/util/socks-proxy.md) and configure your browser to use it * open [http://ambari-server:8080/](http://ambari-server:8080) to continue the installation and monitor the cluster once installed You will be able to access any of the hosts in your deployment via your browser directly while you are using the SOCKS proxy as described in the instructions. Installation ------------ The default login credentials are: * username: admin * password: admin These can be changed after you set up the cluster. The agent hostname pattern: ``` ambari-agent-[0-4].c.${PROJECT}.internal ``` adjust this pattern as needed, e.g., change the '4' to N-1 where N is the number of agent instances in your deployment.
Switch from an SSH tunnel to a SOCKS proxy.
Switch from an SSH tunnel to a SOCKS proxy.
Markdown
apache-2.0
mbrukman/cloud-launcher,mbrukman/cloud-launcher,mbrukman/cloud-launcher,mbrukman/cloud-launcher
markdown
## Code Before: Deploy ====== First, build the startup scripts for the instances by running the following command from this directory: ```bash make -s -C scripts/init ``` To deploy Ambari, run the following command from the top-level directory: ```bash src/cloud_launcher.sh --config apps/hortonworks/hdp2/centos6/vm/ambari.py insert ``` To see other flags, run: ```bash src/cloud_launcher.sh --help ``` Accessing Ambari ---------------- Forward the port from localhost over SSH to the Ambari instance. From the top-level project directory, run: ```bash env SERVER=ambari-server ./scripts/util/forward-port.sh ``` See the script for how to change local or remote ports. Access Ambari via [http://localhost:8080/](http://localhost:8080) to continue the installation. Installation ------------ The default login credentials are: * username: admin * password: admin These can be changed after you set up the cluster. The agent hostname pattern: ``` ambari-agent-[0-4].c.${PROJECT}.internal ``` adjust this pattern as needed, e.g., change the '4' to N-1 where N is the number of agent instances in your deployment. ## Instruction: Switch from an SSH tunnel to a SOCKS proxy. ## Code After: Deploy ====== First, build the startup scripts for the instances by running the following command from this directory: ```bash make -s -C scripts/init ``` To deploy Ambari, run the following command from the top-level directory: ```bash src/cloud_launcher.sh --config apps/hortonworks/hdp2/centos6/vm/ambari.py insert ``` To see other flags, run: ```bash src/cloud_launcher.sh --help ``` Accessing Ambari ---------------- * run a [local SOCKS proxy](../../../../scripts/util/socks-proxy.md) and configure your browser to use it * open [http://ambari-server:8080/](http://ambari-server:8080) to continue the installation and monitor the cluster once installed You will be able to access any of the hosts in your deployment via your browser directly while you are using the SOCKS proxy as described in the instructions. Installation ------------ The default login credentials are: * username: admin * password: admin These can be changed after you set up the cluster. The agent hostname pattern: ``` ambari-agent-[0-4].c.${PROJECT}.internal ``` adjust this pattern as needed, e.g., change the '4' to N-1 where N is the number of agent instances in your deployment.
620383bae31caa246a05bd77a1abdb88c2fb7543
iOS/Plugins/FlipperKitNetworkPlugin/FlipperKitNetworkPlugin/SKDispatchQueue.h
iOS/Plugins/FlipperKitNetworkPlugin/FlipperKitNetworkPlugin/SKDispatchQueue.h
/* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the LICENSE * file in the root directory of this source tree. */ #if FB_SONARKIT_ENABLED #pragma once #import <dispatch/dispatch.h> namespace facebook { namespace flipper { class DispatchQueue { public: virtual void async(dispatch_block_t block) = 0; }; class GCDQueue: public DispatchQueue { public: GCDQueue(dispatch_queue_t underlyingQueue) :_underlyingQueue(underlyingQueue) { } void async(dispatch_block_t block) override { dispatch_async(_underlyingQueue, block); } virtual ~GCDQueue() { } private: dispatch_queue_t _underlyingQueue; }; } } #endif
/* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the LICENSE * file in the root directory of this source tree. */ #if FB_SONARKIT_ENABLED #pragma once #import <dispatch/dispatch.h> namespace facebook { namespace flipper { class DispatchQueue { public: virtual void async(dispatch_block_t block) = 0; virtual ~DispatchQueue() { } }; class GCDQueue: public DispatchQueue { public: GCDQueue(dispatch_queue_t underlyingQueue) :_underlyingQueue(underlyingQueue) { } void async(dispatch_block_t block) override { dispatch_async(_underlyingQueue, block); } virtual ~GCDQueue() { } private: dispatch_queue_t _underlyingQueue; }; } } #endif
Add virtual destructor to class with virtual functions but non-virtual destructor
Add virtual destructor to class with virtual functions but non-virtual destructor Reviewed By: jdthomas Differential Revision: D16954508 fbshipit-source-id: 958118843687145c1147ac5beeb2857b21332702
C
mit
facebook/flipper,facebook/flipper,facebook/flipper,facebook/flipper,facebook/flipper,facebook/flipper,facebook/flipper,facebook/flipper,facebook/flipper,facebook/flipper,facebook/flipper
c
## Code Before: /* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the LICENSE * file in the root directory of this source tree. */ #if FB_SONARKIT_ENABLED #pragma once #import <dispatch/dispatch.h> namespace facebook { namespace flipper { class DispatchQueue { public: virtual void async(dispatch_block_t block) = 0; }; class GCDQueue: public DispatchQueue { public: GCDQueue(dispatch_queue_t underlyingQueue) :_underlyingQueue(underlyingQueue) { } void async(dispatch_block_t block) override { dispatch_async(_underlyingQueue, block); } virtual ~GCDQueue() { } private: dispatch_queue_t _underlyingQueue; }; } } #endif ## Instruction: Add virtual destructor to class with virtual functions but non-virtual destructor Reviewed By: jdthomas Differential Revision: D16954508 fbshipit-source-id: 958118843687145c1147ac5beeb2857b21332702 ## Code After: /* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the LICENSE * file in the root directory of this source tree. */ #if FB_SONARKIT_ENABLED #pragma once #import <dispatch/dispatch.h> namespace facebook { namespace flipper { class DispatchQueue { public: virtual void async(dispatch_block_t block) = 0; virtual ~DispatchQueue() { } }; class GCDQueue: public DispatchQueue { public: GCDQueue(dispatch_queue_t underlyingQueue) :_underlyingQueue(underlyingQueue) { } void async(dispatch_block_t block) override { dispatch_async(_underlyingQueue, block); } virtual ~GCDQueue() { } private: dispatch_queue_t _underlyingQueue; }; } } #endif
68dcac11de3bebdcabaaa4d347bc94053b4baee4
README.md
README.md
This is a simple package for consuming the [Edmunds.com](http://developer.edmunds.com/) API.
This is a simple package for consuming the [Edmunds.com](http://developer.edmunds.com/) API. Install with: `Install-Package EdmundsApiSDK`
Update readme with install information.
Update readme with install information.
Markdown
mit
mnitchie/EdmundsApiSDK
markdown
## Code Before: This is a simple package for consuming the [Edmunds.com](http://developer.edmunds.com/) API. ## Instruction: Update readme with install information. ## Code After: This is a simple package for consuming the [Edmunds.com](http://developer.edmunds.com/) API. Install with: `Install-Package EdmundsApiSDK`
bff030c6b618a8b4ad868fb71775130eb50d5ab4
src/main/java/com/github/aureliano/achmed/os/pkg/IPackageManager.java
src/main/java/com/github/aureliano/achmed/os/pkg/IPackageManager.java
package com.github.aureliano.achmed.os.pkg; import com.github.aureliano.achmed.command.CommandResponse; import com.github.aureliano.achmed.resources.properties.PackageProperties; public interface IPackageManager { public abstract CommandResponse install(); public abstract CommandResponse uninstall(); public abstract String latest(); public abstract void setPackageProperties(PackageProperties properties); public abstract PackageProperties getPackageProperties(); }
package com.github.aureliano.achmed.os.pkg; import com.github.aureliano.achmed.command.CommandResponse; import com.github.aureliano.achmed.resources.properties.PackageProperties; public interface IPackageManager { public abstract CommandResponse install(); public abstract CommandResponse uninstall(); public abstract String latest(); public abstract boolean isInstalled(); public abstract void setPackageProperties(PackageProperties properties); public abstract PackageProperties getPackageProperties(); }
Add method to check whether package is installed.
Add method to check whether package is installed.
Java
mit
aureliano/achmed
java
## Code Before: package com.github.aureliano.achmed.os.pkg; import com.github.aureliano.achmed.command.CommandResponse; import com.github.aureliano.achmed.resources.properties.PackageProperties; public interface IPackageManager { public abstract CommandResponse install(); public abstract CommandResponse uninstall(); public abstract String latest(); public abstract void setPackageProperties(PackageProperties properties); public abstract PackageProperties getPackageProperties(); } ## Instruction: Add method to check whether package is installed. ## Code After: package com.github.aureliano.achmed.os.pkg; import com.github.aureliano.achmed.command.CommandResponse; import com.github.aureliano.achmed.resources.properties.PackageProperties; public interface IPackageManager { public abstract CommandResponse install(); public abstract CommandResponse uninstall(); public abstract String latest(); public abstract boolean isInstalled(); public abstract void setPackageProperties(PackageProperties properties); public abstract PackageProperties getPackageProperties(); }
701d18bb45e7f34ed38056fb5172d9286ccc5103
scripts/test.sh
scripts/test.sh
echo "##############" echo "# unit tests #" echo "##############" python -m unittest utils.test_sims || exit 1 echo "#################" echo "# pylint report #" echo "#################" pylint utils *.py exit 0
ls -aR ./ echo "##############" echo "# unit tests #" echo "##############" python -m unittest utils.test_sims || exit 1 echo "#################" echo "# pylint report #" echo "#################" pylint utils *.py exit 0
Add command to print out all files in directory
Add command to print out all files in directory
Shell
mit
wei2912/bce-simulation,wei2912/bce-simulation,wei2912/bce-simulation,wei2912/bce-simulation
shell
## Code Before: echo "##############" echo "# unit tests #" echo "##############" python -m unittest utils.test_sims || exit 1 echo "#################" echo "# pylint report #" echo "#################" pylint utils *.py exit 0 ## Instruction: Add command to print out all files in directory ## Code After: ls -aR ./ echo "##############" echo "# unit tests #" echo "##############" python -m unittest utils.test_sims || exit 1 echo "#################" echo "# pylint report #" echo "#################" pylint utils *.py exit 0
279cc2c1440a218bcea402132bb6b4f73b99500c
app/overrides/add_product_relation_admin_sub_menu_tab.rb
app/overrides/add_product_relation_admin_sub_menu_tab.rb
Deface::Override.new( virtual_path: 'spree/admin/shared/sub_menu/_product', name: 'add_product_relation_admin_sub_menu_tab', insert_bottom: '[data-hook="admin_product_sub_tabs"]', text: '<%= tab :relation_types, label: Spree::RelationType.model_name.human(count: :many) %>' )
Deface::Override.new( virtual_path: 'spree/admin/shared/sub_menu/_product', name: 'add_product_relation_admin_sub_menu_tab', insert_bottom: '[data-hook="admin_product_sub_tabs"]', text: '<%= tab :relation_types, label: plural_resource_name(Spree::RelationType) %>' )
Fix pluralization of Spree::RelationType model in menu
Fix pluralization of Spree::RelationType model in menu See https://github.com/spree/spree/issues/5842
Ruby
bsd-3-clause
macmm/spree_related_products,macmm/spree_related_products,spree-contrib/spree_related_products,spree-contrib/spree_related_products,macmm/spree_related_products,stevecawdery/spree_related_products,stevecawdery/spree_related_products,stevecawdery/spree_related_products,spree-contrib/spree_related_products
ruby
## Code Before: Deface::Override.new( virtual_path: 'spree/admin/shared/sub_menu/_product', name: 'add_product_relation_admin_sub_menu_tab', insert_bottom: '[data-hook="admin_product_sub_tabs"]', text: '<%= tab :relation_types, label: Spree::RelationType.model_name.human(count: :many) %>' ) ## Instruction: Fix pluralization of Spree::RelationType model in menu See https://github.com/spree/spree/issues/5842 ## Code After: Deface::Override.new( virtual_path: 'spree/admin/shared/sub_menu/_product', name: 'add_product_relation_admin_sub_menu_tab', insert_bottom: '[data-hook="admin_product_sub_tabs"]', text: '<%= tab :relation_types, label: plural_resource_name(Spree::RelationType) %>' )
7fb32d0117c32c40596af09e148feceb5dc52be5
plugins/inquiry/app/views/inquiry/inquiries/_inquiries.html.haml
plugins/inquiry/app/views/inquiry/inquiries/_inquiries.html.haml
.inquiries-container{data: {update_path: plugin('inquiry').inquiries_path(params.merge(partial:true)), update_interval: 600}} %table.table %thead %tr %th Request for %th Description %th Requester %th Updated %th Status %th.snug %tbody - if @inquiries.length==0 %tr %td{colspa: 6} No requests available - else - @inquiries.each do | inquiry | = render partial: 'item', locals: {inquiry: inquiry, remote_links: remote_links} .pagination = paginate @inquiries, :window => 1, :outer_window => 1, remote: remote_links, :theme => 'twitter-bootstrap-3'
.inquiries-container{data: {update_path: plugin('inquiry').inquiries_path(params.to_unsafe_hash.merge(partial:true)), update_interval: 600}} %table.table %thead %tr %th Request for %th Description %th Requester %th Updated %th Status %th.snug %tbody - if @inquiries.length==0 %tr %td{colspa: 6} No requests available - else - @inquiries.each do | inquiry | = render partial: 'item', locals: {inquiry: inquiry, remote_links: remote_links} .pagination = paginate @inquiries, :window => 1, :outer_window => 1, remote: remote_links, :theme => 'twitter-bootstrap-3'
Fix params merge error for Inquiries
Rails5: Fix params merge error for Inquiries
Haml
apache-2.0
sapcc/elektra,sapcc/elektra,sapcc/elektra,sapcc/elektra
haml
## Code Before: .inquiries-container{data: {update_path: plugin('inquiry').inquiries_path(params.merge(partial:true)), update_interval: 600}} %table.table %thead %tr %th Request for %th Description %th Requester %th Updated %th Status %th.snug %tbody - if @inquiries.length==0 %tr %td{colspa: 6} No requests available - else - @inquiries.each do | inquiry | = render partial: 'item', locals: {inquiry: inquiry, remote_links: remote_links} .pagination = paginate @inquiries, :window => 1, :outer_window => 1, remote: remote_links, :theme => 'twitter-bootstrap-3' ## Instruction: Rails5: Fix params merge error for Inquiries ## Code After: .inquiries-container{data: {update_path: plugin('inquiry').inquiries_path(params.to_unsafe_hash.merge(partial:true)), update_interval: 600}} %table.table %thead %tr %th Request for %th Description %th Requester %th Updated %th Status %th.snug %tbody - if @inquiries.length==0 %tr %td{colspa: 6} No requests available - else - @inquiries.each do | inquiry | = render partial: 'item', locals: {inquiry: inquiry, remote_links: remote_links} .pagination = paginate @inquiries, :window => 1, :outer_window => 1, remote: remote_links, :theme => 'twitter-bootstrap-3'
cc58943cdc7ea68a6a538a738f8c7c14d6fbf85a
src/common-declarations.js
src/common-declarations.js
const commonDeclarations = { boxSizing: [ 'border-box' ], display: [ 'block', 'inline-block', 'inline', 'table', 'table-cell', 'none' ], float: [ 'none', 'left', 'right' ], clear: [ 'none', 'left', 'right', 'both' ], textAlign: [ 'left', 'center', 'right', 'justify' ], fontWeight: [ 'bold', 'normal' ], textDecoration: [ 'none', 'underline' ], whiteSpace: [ 'nowrap' ], listStyle: [ 'none' ], overflow: [ 'hidden', 'scroll' ], margin: [ 0 ], marginTop: [ 0 ], marginRight: [ 0 ], marginBottom: [ 0 ], marginLeft: [ 0 ], padding: [ 0 ], paddingTop: [ 0 ], paddingRight: [ 0 ], paddingBottom: [ 0 ], paddingLeft: [ 0 ], maxWidth: [ '100%' ], height: [ 'auto' ], verticalAlign: [ 'top', 'middle', 'bottom', 'baseline' ], position: [ 'relative', 'absolute', 'fixed' ], borderRadius: [ 0 ] } export default commonDeclarations
const commonDeclarations = { boxSizing: [ 'border-box' ], display: [ 'block', 'inline-block', 'inline', 'table', 'table-cell', 'none' ], float: [ 'none', 'left', 'right' ], clear: [ 'none', 'left', 'right', 'both' ], textAlign: [ 'left', 'center', 'right', 'justify' ], fontWeight: [ 'bold', 'normal' ], textDecoration: [ 'none', 'underline' ], whiteSpace: [ 'nowrap' ], listStyle: [ 'none' ], overflow: [ 'hidden', 'scroll' ], margin: [ 0 ], marginTop: [ 0 ], marginRight: [ 0 ], marginBottom: [ 0 ], marginLeft: [ 0 ], padding: [ 0 ], paddingTop: [ 0 ], paddingRight: [ 0 ], paddingBottom: [ 0 ], paddingLeft: [ 0 ], maxWidth: [ '100%' ], height: [ 'auto' ], verticalAlign: [ 'top', 'middle', 'bottom', 'baseline' ], position: [ 'relative', 'absolute', 'fixed' ], borderRadius: [ 0 ], backgroundSize: [ 'contain', 'cover' ], backgroundRepeat: [ 'no-repeat', 'repeat', 'repeat-x', 'repeat-y' ], backgroundPosition: [ 'top', 'right', 'bottom', 'left', 'center' ] } export default commonDeclarations
Add backgrounds to common declarations
Add backgrounds to common declarations backgroundSize, backgroundRepeat, & backgroundPosition
JavaScript
mit
jxnblk/cxs
javascript
## Code Before: const commonDeclarations = { boxSizing: [ 'border-box' ], display: [ 'block', 'inline-block', 'inline', 'table', 'table-cell', 'none' ], float: [ 'none', 'left', 'right' ], clear: [ 'none', 'left', 'right', 'both' ], textAlign: [ 'left', 'center', 'right', 'justify' ], fontWeight: [ 'bold', 'normal' ], textDecoration: [ 'none', 'underline' ], whiteSpace: [ 'nowrap' ], listStyle: [ 'none' ], overflow: [ 'hidden', 'scroll' ], margin: [ 0 ], marginTop: [ 0 ], marginRight: [ 0 ], marginBottom: [ 0 ], marginLeft: [ 0 ], padding: [ 0 ], paddingTop: [ 0 ], paddingRight: [ 0 ], paddingBottom: [ 0 ], paddingLeft: [ 0 ], maxWidth: [ '100%' ], height: [ 'auto' ], verticalAlign: [ 'top', 'middle', 'bottom', 'baseline' ], position: [ 'relative', 'absolute', 'fixed' ], borderRadius: [ 0 ] } export default commonDeclarations ## Instruction: Add backgrounds to common declarations backgroundSize, backgroundRepeat, & backgroundPosition ## Code After: const commonDeclarations = { boxSizing: [ 'border-box' ], display: [ 'block', 'inline-block', 'inline', 'table', 'table-cell', 'none' ], float: [ 'none', 'left', 'right' ], clear: [ 'none', 'left', 'right', 'both' ], textAlign: [ 'left', 'center', 'right', 'justify' ], fontWeight: [ 'bold', 'normal' ], textDecoration: [ 'none', 'underline' ], whiteSpace: [ 'nowrap' ], listStyle: [ 'none' ], overflow: [ 'hidden', 'scroll' ], margin: [ 0 ], marginTop: [ 0 ], marginRight: [ 0 ], marginBottom: [ 0 ], marginLeft: [ 0 ], padding: [ 0 ], paddingTop: [ 0 ], paddingRight: [ 0 ], paddingBottom: [ 0 ], paddingLeft: [ 0 ], maxWidth: [ '100%' ], height: [ 'auto' ], verticalAlign: [ 'top', 'middle', 'bottom', 'baseline' ], position: [ 'relative', 'absolute', 'fixed' ], borderRadius: [ 0 ], backgroundSize: [ 'contain', 'cover' ], backgroundRepeat: [ 'no-repeat', 'repeat', 'repeat-x', 'repeat-y' ], backgroundPosition: [ 'top', 'right', 'bottom', 'left', 'center' ] } export default commonDeclarations
d488f14337e8bf0105a3b2bfdf076f48155320bf
test/setup.js
test/setup.js
'use strict' var mkdir = require('mkdirp').sync var rimraf = require('rimraf').sync beforeEach(function () { rimraf(__dirname + '/output/') mkdir(__dirname + '/output/') }) afterEach(function () { rimraf(__dirname + '/output/') })
'use strict' var mkdir = retry(require('mkdirp')) var rimraf = retry(require('rimraf')) beforeEach(function (callback) { rimraf(__dirname + '/output/', function (err) { if (err) return callback(err) mkdir(__dirname + '/output/', callback) }) }) afterEach(function (callback) { rimraf(__dirname + '/output/', callback) }) function retry(fn) { return function () { var args = Array.prototype.slice.call(arguments) var cb = args.pop() var attemptNo = 0 function attempt() { fn.apply(null, args.concat([function (err, res) { if (err && 4 > attemptNo++) { return setTimeout(attempt, 100) } cb(err, res) }])) } attempt() } }
Add retry logic to tests
Add retry logic to tests This should make them pass more consistently on windows
JavaScript
bsd-2-clause
ForbesLindesay/npm-fetch
javascript
## Code Before: 'use strict' var mkdir = require('mkdirp').sync var rimraf = require('rimraf').sync beforeEach(function () { rimraf(__dirname + '/output/') mkdir(__dirname + '/output/') }) afterEach(function () { rimraf(__dirname + '/output/') }) ## Instruction: Add retry logic to tests This should make them pass more consistently on windows ## Code After: 'use strict' var mkdir = retry(require('mkdirp')) var rimraf = retry(require('rimraf')) beforeEach(function (callback) { rimraf(__dirname + '/output/', function (err) { if (err) return callback(err) mkdir(__dirname + '/output/', callback) }) }) afterEach(function (callback) { rimraf(__dirname + '/output/', callback) }) function retry(fn) { return function () { var args = Array.prototype.slice.call(arguments) var cb = args.pop() var attemptNo = 0 function attempt() { fn.apply(null, args.concat([function (err, res) { if (err && 4 > attemptNo++) { return setTimeout(attempt, 100) } cb(err, res) }])) } attempt() } }
be748eb2c3059cccf65c885a5916ef81ec188003
lib/validate_as_email/rspec.rb
lib/validate_as_email/rspec.rb
require 'rspec/rails/extensions/active_record/base' # Adds a custom matcher to RSpec to make it easier to make sure your email # column is valid. # # class Person # include ActiveModel::Validations # # validates_as_email :email # # def initialize(attributes = {}) # @attributes = attributes # end # # def email # @attributes[:email] # end # # def email=(address) # @attributes[:email] = address # end # end # # describe Person # it { should have_a_valid_email_address_for(:email) } # end RSpec::Matchers.define(:have_a_valid_email_address_for) do |column_name| match do |klass| %w( [email protected] [email protected] [email protected] ).each do |valid| klass.send("#{column_name}=", valid) klass.should have(0).errors_on(column_name) end %w( word a@ @b example.com 8.8.8.8 ).each do |invalid| klass.send("#{column_name}=", invalid) klass.should have(1).error_on(column_name) end end end
RSpec::Matchers.define(:have_a_valid_email_address_for) do |column_name| match do |klass| %w( [email protected] [email protected] [email protected] ).each do |valid| klass.send("#{column_name}=", valid) klass.should have(0).errors_on(column_name) end %w( word a@ @b example.com 8.8.8.8 ).each do |invalid| klass.send("#{column_name}=", invalid) klass.should have(1).error_on(column_name) end end end
Remove one more instance of old requirement
Remove one more instance of old requirement
Ruby
mit
change/validate_as_email
ruby
## Code Before: require 'rspec/rails/extensions/active_record/base' # Adds a custom matcher to RSpec to make it easier to make sure your email # column is valid. # # class Person # include ActiveModel::Validations # # validates_as_email :email # # def initialize(attributes = {}) # @attributes = attributes # end # # def email # @attributes[:email] # end # # def email=(address) # @attributes[:email] = address # end # end # # describe Person # it { should have_a_valid_email_address_for(:email) } # end RSpec::Matchers.define(:have_a_valid_email_address_for) do |column_name| match do |klass| %w( [email protected] [email protected] [email protected] ).each do |valid| klass.send("#{column_name}=", valid) klass.should have(0).errors_on(column_name) end %w( word a@ @b example.com 8.8.8.8 ).each do |invalid| klass.send("#{column_name}=", invalid) klass.should have(1).error_on(column_name) end end end ## Instruction: Remove one more instance of old requirement ## Code After: RSpec::Matchers.define(:have_a_valid_email_address_for) do |column_name| match do |klass| %w( [email protected] [email protected] [email protected] ).each do |valid| klass.send("#{column_name}=", valid) klass.should have(0).errors_on(column_name) end %w( word a@ @b example.com 8.8.8.8 ).each do |invalid| klass.send("#{column_name}=", invalid) klass.should have(1).error_on(column_name) end end end
b1f166e61664e389a511a15ceab5ca65462c190c
Chart/Renderer/JavaScript.php
Chart/Renderer/JavaScript.php
<?php namespace Outspaced\ChartsiaBundle\Chart\Renderer; use Outspaced\ChartsiaBundle\Chart\Charts; use Outspaced\ChartsiaBundle\Chart\Config; class JavaScript { /** * @param BaseChart $chart * @param \Twig_Environment $engine */ public function renderWithTwig(Charts\BaseChart $chart, \Twig_Environment $engine) { $vars = [ 'title' => $this->renderTitle($chart->getTitle()), 'title_color' => $this->renderTitleColor($chart->getTitle()) ]; $return = $engine->render( 'OutspacedChartsiaBundle:Charts:javascript.html.twig', $vars ); return $return; } protected function renderTitle(Config\Title $title = NULL) { if ($title === NULL) { return ''; } return $title->getTitle(); } protected function renderTitleColor(Config\Title $title = NULL) { if ($title === NULL || $title->getColor() === NULL) { return ''; } return $title->getColor()->getColor(); } }
<?php namespace Outspaced\ChartsiaBundle\Chart\Renderer; use Outspaced\ChartsiaBundle\Chart\Charts; use Outspaced\ChartsiaBundle\Chart\Config; class JavaScript { /** * @param BaseChart $chart * @param \Twig_Environment $engine */ public function renderWithTwig(Charts\BaseChart $chart, \Twig_Environment $engine) { $vars = [ 'title' => $this->renderTitle($chart->getTitle()), 'title_color' => $this->renderTitleColor($chart->getTitle()), // Move these out to methods 'height' => $chart->getSize()->getHeight(), 'width' => $chart->getSize()->getWidth(), ]; $return = $engine->render( 'OutspacedChartsiaBundle:Charts:javascript.html.twig', $vars ); return $return; } protected function renderTitle(Config\Title $title = NULL) { if ($title === NULL) { return ''; } return $title->getTitle(); } protected function renderTitleColor(Config\Title $title = NULL) { if ($title === NULL || $title->getColor() === NULL) { return ''; } return $title->getColor()->getColor(); } }
Put size elements into place for JS renderer
Put size elements into place for JS renderer
PHP
mit
outspaced/chartsia,outspaced/google-chart-maker,outspaced/chartsia,outspaced/google-chart-maker
php
## Code Before: <?php namespace Outspaced\ChartsiaBundle\Chart\Renderer; use Outspaced\ChartsiaBundle\Chart\Charts; use Outspaced\ChartsiaBundle\Chart\Config; class JavaScript { /** * @param BaseChart $chart * @param \Twig_Environment $engine */ public function renderWithTwig(Charts\BaseChart $chart, \Twig_Environment $engine) { $vars = [ 'title' => $this->renderTitle($chart->getTitle()), 'title_color' => $this->renderTitleColor($chart->getTitle()) ]; $return = $engine->render( 'OutspacedChartsiaBundle:Charts:javascript.html.twig', $vars ); return $return; } protected function renderTitle(Config\Title $title = NULL) { if ($title === NULL) { return ''; } return $title->getTitle(); } protected function renderTitleColor(Config\Title $title = NULL) { if ($title === NULL || $title->getColor() === NULL) { return ''; } return $title->getColor()->getColor(); } } ## Instruction: Put size elements into place for JS renderer ## Code After: <?php namespace Outspaced\ChartsiaBundle\Chart\Renderer; use Outspaced\ChartsiaBundle\Chart\Charts; use Outspaced\ChartsiaBundle\Chart\Config; class JavaScript { /** * @param BaseChart $chart * @param \Twig_Environment $engine */ public function renderWithTwig(Charts\BaseChart $chart, \Twig_Environment $engine) { $vars = [ 'title' => $this->renderTitle($chart->getTitle()), 'title_color' => $this->renderTitleColor($chart->getTitle()), // Move these out to methods 'height' => $chart->getSize()->getHeight(), 'width' => $chart->getSize()->getWidth(), ]; $return = $engine->render( 'OutspacedChartsiaBundle:Charts:javascript.html.twig', $vars ); return $return; } protected function renderTitle(Config\Title $title = NULL) { if ($title === NULL) { return ''; } return $title->getTitle(); } protected function renderTitleColor(Config\Title $title = NULL) { if ($title === NULL || $title->getColor() === NULL) { return ''; } return $title->getColor()->getColor(); } }
d8f4763a5c6946c3323f2567cb1554244c844c05
test/unit/model/view.coffee
test/unit/model/view.coffee
sinon = require 'sinon' should = require 'should' describe 'Client model: View', -> helper = require '../helper' helper.evalConcatenatedFile 'client/code/model/tool.coffee' helper.evalConcatenatedFile 'client/code/model/view.coffee' describe 'URL', -> beforeEach -> @tool = Cu.Model.Tool.findOrCreate name: 'test-plugin' displayName: 'Test Plugin' @view = Cu.Model.View.findOrCreate user: 'test' box: 'box1' tool: 'test-plugin' it 'has a related tool', -> tool = @view.get('tool') tool.get('displayName').should.equal 'Test Plugin' class TestDb class Model constructor: (obj) -> for k of obj @[k] = obj[k] toObject: -> @ save: (callback) -> callback null @find: (_args, callback) -> callback null, [ new Model(name: 'test'), new Model(name: 'test2') ] View = require('model/view')(TestDb) describe 'Server model: View', -> before -> @saveSpy = sinon.spy TestDb.prototype, 'save' @findSpy = sinon.spy TestDb, 'find' @view = new View user: 'ickle' name: 'test' displayName: 'Test' box: 'sdjfsdf' context 'when view.save is called', -> before (done) -> @view.save done it 'calls mongoose save method', -> @saveSpy.calledOnce.should.be.true context 'when view.findAll is called', -> before (done) -> View.findAll (err, res) => @results = res done() it 'should return View results', -> @results[0].should.be.an.instanceOf View @results[0].name.should.equal 'test' @results.length.should.equal 2
sinon = require 'sinon' should = require 'should' describe 'Client model: View', -> helper = require '../helper' helper.evalConcatenatedFile 'client/code/model/tool.coffee' helper.evalConcatenatedFile 'client/code/model/view.coffee' describe 'URL', -> beforeEach -> @tool = Cu.Model.Tool.findOrCreate name: 'test-plugin' displayName: 'Test Plugin' @view = Cu.Model.View.findOrCreate user: 'test' box: 'box1' tool: 'test-plugin' it 'has a related tool', -> tool = @view.get('tool') tool.get('displayName').should.equal 'Test Plugin' class TestDb class Model constructor: (obj) -> for k of obj @[k] = obj[k] toObject: -> @ save: (callback) -> callback null @find: (_args, callback) -> callback null, [ new Model(name: 'test'), new Model(name: 'test2') ]
Remove old View test, which can't possibly work as Views don't have their own persistent model on server side
Remove old View test, which can't possibly work as Views don't have their own persistent model on server side
CoffeeScript
agpl-3.0
scraperwiki/custard,scraperwiki/custard,scraperwiki/custard
coffeescript
## Code Before: sinon = require 'sinon' should = require 'should' describe 'Client model: View', -> helper = require '../helper' helper.evalConcatenatedFile 'client/code/model/tool.coffee' helper.evalConcatenatedFile 'client/code/model/view.coffee' describe 'URL', -> beforeEach -> @tool = Cu.Model.Tool.findOrCreate name: 'test-plugin' displayName: 'Test Plugin' @view = Cu.Model.View.findOrCreate user: 'test' box: 'box1' tool: 'test-plugin' it 'has a related tool', -> tool = @view.get('tool') tool.get('displayName').should.equal 'Test Plugin' class TestDb class Model constructor: (obj) -> for k of obj @[k] = obj[k] toObject: -> @ save: (callback) -> callback null @find: (_args, callback) -> callback null, [ new Model(name: 'test'), new Model(name: 'test2') ] View = require('model/view')(TestDb) describe 'Server model: View', -> before -> @saveSpy = sinon.spy TestDb.prototype, 'save' @findSpy = sinon.spy TestDb, 'find' @view = new View user: 'ickle' name: 'test' displayName: 'Test' box: 'sdjfsdf' context 'when view.save is called', -> before (done) -> @view.save done it 'calls mongoose save method', -> @saveSpy.calledOnce.should.be.true context 'when view.findAll is called', -> before (done) -> View.findAll (err, res) => @results = res done() it 'should return View results', -> @results[0].should.be.an.instanceOf View @results[0].name.should.equal 'test' @results.length.should.equal 2 ## Instruction: Remove old View test, which can't possibly work as Views don't have their own persistent model on server side ## Code After: sinon = require 'sinon' should = require 'should' describe 'Client model: View', -> helper = require '../helper' helper.evalConcatenatedFile 'client/code/model/tool.coffee' helper.evalConcatenatedFile 'client/code/model/view.coffee' describe 'URL', -> beforeEach -> @tool = Cu.Model.Tool.findOrCreate name: 'test-plugin' displayName: 'Test Plugin' @view = Cu.Model.View.findOrCreate user: 'test' box: 'box1' tool: 'test-plugin' it 'has a related tool', -> tool = @view.get('tool') tool.get('displayName').should.equal 'Test Plugin' class TestDb class Model constructor: (obj) -> for k of obj @[k] = obj[k] toObject: -> @ save: (callback) -> callback null @find: (_args, callback) -> callback null, [ new Model(name: 'test'), new Model(name: 'test2') ]
08166e2b42acd62bd6572de07fb1525fb8144be3
app/views/quotes/index.html.haml
app/views/quotes/index.html.haml
.anime-discover.anime-show.anime-show-full .row .three.columns = render partial: 'anime/card', locals: {data: [@anime, @watchlist, 'twelve']} .nine.columns .panel.top-bar %h4= title "#{@anime.title} Quotes" %a{"data-reveal-id" => "submit-quote", :href => "#"} %span= "Submit a quote for #{@anime.title}" - @quotes.each do |quote| %fieldset %blockquote = quote.content - if quote.character_name %cite #{quote.character_name} %p.option-bar - if quote.creator (Submitted by #{link_to quote.creator.name, quote.creator}) | = pluralize quote.votes.to_i, "vote" | - if current_user && quote.has_evaluation?(:votes, current_user) = link_to "(^) remove vote", vote_anime_quote_path(@anime, quote, type: "down"), method: :post - else = link_to "^ vote", vote_anime_quote_path(@anime, quote, type: "up"), method: :post | = link_to "report", "#" #submit-quote.reveal-modal{:class => "[expand, xlarge, large, medium, small]"} = render 'quotes/new'
.anime-discover.anime-show.anime-show-full .row .three.columns = render partial: 'anime/card', locals: {data: [@anime, @watchlist, 'twelve']} .nine.columns .panel.top-bar %h4= title "#{@anime.title} Quotes" %a{"data-reveal-id" => "submit-quote", :href => "#"} %span= "Submit a quote for #{@anime.title}" - @quotes.each do |quote| %fieldset %blockquote = quote.content - if quote.character_name %cite #{quote.character_name} %p.option-bar - if quote.creator (Submitted by #{link_to quote.creator.name, quote.creator}) | = pluralize quote.votes.to_i, "vote" - if current_user && quote.has_evaluation?(:votes, current_user) = link_to vote_anime_quote_path(@anime, quote, type: "down"), method: :post do %i.icon-star - else = link_to vote_anime_quote_path(@anime, quote, type: "up"), method: :post do %i.icon-star-empty | = link_to "report", "#" #submit-quote.reveal-modal{:class => "[expand, xlarge, large, medium, small]"} = render 'quotes/new'
Replace textual quote vote link with full & empty star.
Replace textual quote vote link with full & empty star.
Haml
apache-2.0
Snitzle/hummingbird,NuckChorris/hummingbird,hummingbird-me/hummingbird,paladique/hummingbird,sidaga/hummingbird,NuckChorris/hummingbird,Snitzle/hummingbird,wlads/hummingbird,MiLk/hummingbird,qgustavor/hummingbird,NuckChorris/hummingbird,saintsantos/hummingbird,wlads/hummingbird,erengy/hummingbird,jcoady9/hummingbird,MiLk/hummingbird,vevix/hummingbird,jcoady9/hummingbird,xhocquet/hummingbird,xhocquet/hummingbird,Snitzle/hummingbird,astraldragon/hummingbird,synthtech/hummingbird,jcoady9/hummingbird,erengy/hummingbird,jcoady9/hummingbird,paladique/hummingbird,synthtech/hummingbird,qgustavor/hummingbird,erengy/hummingbird,saintsantos/hummingbird,MiLk/hummingbird,xhocquet/hummingbird,cybrox/hummingbird,erengy/hummingbird,paladique/hummingbird,sidaga/hummingbird,saintsantos/hummingbird,qgustavor/hummingbird,astraldragon/hummingbird,xhocquet/hummingbird,wlads/hummingbird,paladique/hummingbird,vevix/hummingbird,astraldragon/hummingbird,xhocquet/hummingbird,wlads/hummingbird,hummingbird-me/hummingbird,xhocquet/hummingbird,NuckChorris/hummingbird,xhocquet/hummingbird,vevix/hummingbird,sidaga/hummingbird,saintsantos/hummingbird,Snitzle/hummingbird,astraldragon/hummingbird,qgustavor/hummingbird,vevix/hummingbird,sidaga/hummingbird,MiLk/hummingbird
haml
## Code Before: .anime-discover.anime-show.anime-show-full .row .three.columns = render partial: 'anime/card', locals: {data: [@anime, @watchlist, 'twelve']} .nine.columns .panel.top-bar %h4= title "#{@anime.title} Quotes" %a{"data-reveal-id" => "submit-quote", :href => "#"} %span= "Submit a quote for #{@anime.title}" - @quotes.each do |quote| %fieldset %blockquote = quote.content - if quote.character_name %cite #{quote.character_name} %p.option-bar - if quote.creator (Submitted by #{link_to quote.creator.name, quote.creator}) | = pluralize quote.votes.to_i, "vote" | - if current_user && quote.has_evaluation?(:votes, current_user) = link_to "(^) remove vote", vote_anime_quote_path(@anime, quote, type: "down"), method: :post - else = link_to "^ vote", vote_anime_quote_path(@anime, quote, type: "up"), method: :post | = link_to "report", "#" #submit-quote.reveal-modal{:class => "[expand, xlarge, large, medium, small]"} = render 'quotes/new' ## Instruction: Replace textual quote vote link with full & empty star. ## Code After: .anime-discover.anime-show.anime-show-full .row .three.columns = render partial: 'anime/card', locals: {data: [@anime, @watchlist, 'twelve']} .nine.columns .panel.top-bar %h4= title "#{@anime.title} Quotes" %a{"data-reveal-id" => "submit-quote", :href => "#"} %span= "Submit a quote for #{@anime.title}" - @quotes.each do |quote| %fieldset %blockquote = quote.content - if quote.character_name %cite #{quote.character_name} %p.option-bar - if quote.creator (Submitted by #{link_to quote.creator.name, quote.creator}) | = pluralize quote.votes.to_i, "vote" - if current_user && quote.has_evaluation?(:votes, current_user) = link_to vote_anime_quote_path(@anime, quote, type: "down"), method: :post do %i.icon-star - else = link_to vote_anime_quote_path(@anime, quote, type: "up"), method: :post do %i.icon-star-empty | = link_to "report", "#" #submit-quote.reveal-modal{:class => "[expand, xlarge, large, medium, small]"} = render 'quotes/new'
5fe1689a6ae64429d21d8747e39a0ad4dd1fff13
templates/swagged-angular-resources-provider.hbs
templates/swagged-angular-resources-provider.hbs
'use strict'; require('angular'); require('angular-resource'); angular.module('swaggedAngularResources', ['ngResource']) .config(function($resourceProvider) { $resourceProvider.defaults.stripTrailingSlashes = false; }) {{#each resourceOperations}} .{{../angularProviderType}}('{{@key}}{{../angularProviderSuffix}}', function() { this.$get = function($resource, apiUrl) { return $resource(null, null, { {{#each this}} {{nickname}}: { method: '{{action}}', url: apiUrl + '{{path}}', {{#if pathParameters}} params: { {{#each pathParameters}} '{{name}}': '@{{name}}', {{/each}} }, {{/if}} {{#isQuery}} isArray: true, {{/isQuery}} }, {{/each}} }); }; }) {{/each}};
angular.module('swaggedAngularResources', ['ngResource']) .config(function($resourceProvider) { $resourceProvider.defaults.stripTrailingSlashes = false; }) {{#each resourceOperations}} .{{../angularProviderType}}('{{@key}}{{../angularProviderSuffix}}', function() { this.$get = function($resource, apiUrl) { return $resource(null, null, { {{#each this}} {{nickname}}: { method: '{{action}}', url: apiUrl + '{{path}}', {{#if pathParameters}} params: { {{#each pathParameters}} '{{name}}': '@{{name}}', {{/each}} }, {{/if}} {{#isQuery}} isArray: true, {{/isQuery}} }, {{/each}} }); }; }) {{/each}};
Remove browserify statements from template
Remove browserify statements from template
Handlebars
mit
mchlbrnd/swagged-angular-resources,mchlbrnd/swagged-angular-resources
handlebars
## Code Before: 'use strict'; require('angular'); require('angular-resource'); angular.module('swaggedAngularResources', ['ngResource']) .config(function($resourceProvider) { $resourceProvider.defaults.stripTrailingSlashes = false; }) {{#each resourceOperations}} .{{../angularProviderType}}('{{@key}}{{../angularProviderSuffix}}', function() { this.$get = function($resource, apiUrl) { return $resource(null, null, { {{#each this}} {{nickname}}: { method: '{{action}}', url: apiUrl + '{{path}}', {{#if pathParameters}} params: { {{#each pathParameters}} '{{name}}': '@{{name}}', {{/each}} }, {{/if}} {{#isQuery}} isArray: true, {{/isQuery}} }, {{/each}} }); }; }) {{/each}}; ## Instruction: Remove browserify statements from template ## Code After: angular.module('swaggedAngularResources', ['ngResource']) .config(function($resourceProvider) { $resourceProvider.defaults.stripTrailingSlashes = false; }) {{#each resourceOperations}} .{{../angularProviderType}}('{{@key}}{{../angularProviderSuffix}}', function() { this.$get = function($resource, apiUrl) { return $resource(null, null, { {{#each this}} {{nickname}}: { method: '{{action}}', url: apiUrl + '{{path}}', {{#if pathParameters}} params: { {{#each pathParameters}} '{{name}}': '@{{name}}', {{/each}} }, {{/if}} {{#isQuery}} isArray: true, {{/isQuery}} }, {{/each}} }); }; }) {{/each}};
3a74d30bbcab64cf528aa0bf3e1c99d12660d0f6
.travis.yml
.travis.yml
sudo: false language: node_js node_js: - '0.10' before_script: - npm install -g grunt-cli - git submodule update --init --recursive deploy: provider: s3 bucket: code.fireboltjs.com skip_cleanup: true local-dir: dist access_key_id: secure: SIFyjgmdTR5BPhOTNp6YydkSvTukSomEnc7v70fkfVI8prJAQ3/cqbCJelC9ZSlbfTcwZjm8g1n4zwis1AdnnUAp27ndpeJozTH6qJN4oqHGA3uHDKI7D2mSM9cT8VFmQZCrXsssM5vZSOZcdFlIy7EUKPGdQe6qtq+evSMYoaE= secret_access_key: secure: Rof++4WMa+rtdGrme2yRL0ITnWqcWQAKxeI+I5cfc9NABMvHTRDOMIFzqSUHqD9p1YGdR/iecghw39yWhpfwvz8+s1nJKlEga/tF7imkPjjY0NTRhUvS6xIMhAt5xsqoOllYEKcba2hxiqJad8XpAvyiIaORRlc+r321oAl6Cw0=
sudo: false language: node_js node_js: - '0.10' before_script: - npm install -g grunt-cli deploy: provider: s3 bucket: code.fireboltjs.com skip_cleanup: true local-dir: dist access_key_id: secure: SIFyjgmdTR5BPhOTNp6YydkSvTukSomEnc7v70fkfVI8prJAQ3/cqbCJelC9ZSlbfTcwZjm8g1n4zwis1AdnnUAp27ndpeJozTH6qJN4oqHGA3uHDKI7D2mSM9cT8VFmQZCrXsssM5vZSOZcdFlIy7EUKPGdQe6qtq+evSMYoaE= secret_access_key: secure: Rof++4WMa+rtdGrme2yRL0ITnWqcWQAKxeI+I5cfc9NABMvHTRDOMIFzqSUHqD9p1YGdR/iecghw39yWhpfwvz8+s1nJKlEga/tF7imkPjjY0NTRhUvS6xIMhAt5xsqoOllYEKcba2hxiqJad8XpAvyiIaORRlc+r321oAl6Cw0=
Remove unnecessary before_script command from Travis config
Remove unnecessary before_script command from Travis config [ci skip]
YAML
mit
woollybogger/code.fireboltjs.com,woollybogger/code.fireboltjs.com
yaml
## Code Before: sudo: false language: node_js node_js: - '0.10' before_script: - npm install -g grunt-cli - git submodule update --init --recursive deploy: provider: s3 bucket: code.fireboltjs.com skip_cleanup: true local-dir: dist access_key_id: secure: SIFyjgmdTR5BPhOTNp6YydkSvTukSomEnc7v70fkfVI8prJAQ3/cqbCJelC9ZSlbfTcwZjm8g1n4zwis1AdnnUAp27ndpeJozTH6qJN4oqHGA3uHDKI7D2mSM9cT8VFmQZCrXsssM5vZSOZcdFlIy7EUKPGdQe6qtq+evSMYoaE= secret_access_key: secure: Rof++4WMa+rtdGrme2yRL0ITnWqcWQAKxeI+I5cfc9NABMvHTRDOMIFzqSUHqD9p1YGdR/iecghw39yWhpfwvz8+s1nJKlEga/tF7imkPjjY0NTRhUvS6xIMhAt5xsqoOllYEKcba2hxiqJad8XpAvyiIaORRlc+r321oAl6Cw0= ## Instruction: Remove unnecessary before_script command from Travis config [ci skip] ## Code After: sudo: false language: node_js node_js: - '0.10' before_script: - npm install -g grunt-cli deploy: provider: s3 bucket: code.fireboltjs.com skip_cleanup: true local-dir: dist access_key_id: secure: SIFyjgmdTR5BPhOTNp6YydkSvTukSomEnc7v70fkfVI8prJAQ3/cqbCJelC9ZSlbfTcwZjm8g1n4zwis1AdnnUAp27ndpeJozTH6qJN4oqHGA3uHDKI7D2mSM9cT8VFmQZCrXsssM5vZSOZcdFlIy7EUKPGdQe6qtq+evSMYoaE= secret_access_key: secure: Rof++4WMa+rtdGrme2yRL0ITnWqcWQAKxeI+I5cfc9NABMvHTRDOMIFzqSUHqD9p1YGdR/iecghw39yWhpfwvz8+s1nJKlEga/tF7imkPjjY0NTRhUvS6xIMhAt5xsqoOllYEKcba2hxiqJad8XpAvyiIaORRlc+r321oAl6Cw0=
fd14a442ac48c6b9a83fc1e03e8d315581e7e33c
src/components/encodingvariations/encodingvariations.scss
src/components/encodingvariations/encodingvariations.scss
.encoding-variations { .selected-variation { display: flex; flex-direction: column; & > .vl-plotgroup { flex: 0 0 100%; box-sizing: border-box; } } } .encoding-variations .variations { max-width: 300px; width: 300px; flex-shrink: 0.2; flex-grow: 0; overflow-y: scroll; padding: 10px 15px 10px 10px; background-color: #B7C9E1; margin-top: 3px; } .encoding-variations .modal-wrapper{ overflow: hidden; } .encoding-variations .encoding-variations-main { display: flex; overflow: hidden; } .encoding-variations .wrapper { position: relative; }
.encoding-variations { .selected-variation { display: flex; flex-direction: column; & > .vl-plotgroup { flex: 0 0 100%; box-sizing: border-box; } } } .encoding-variations .variations { max-width: 300px; width: 300px; flex-shrink: 0.2; flex-grow: 0; overflow-y: scroll; padding: 10px 15px 10px 10px; background-color: #B7C9E1; margin-top: 3px; // account for margin-right: -3px in .encoding-variations-main margin-right: 3px; } .encoding-variations .modal-wrapper{ overflow: hidden; } .encoding-variations .encoding-variations-main { display: flex; overflow: hidden; // account for .selection-variation.card's 3px when .variations is hidden margin-right: -3px; } .encoding-variations .wrapper { position: relative; }
Fix offset in encoding variations
Fix offset in encoding variations
SCSS
bsd-3-clause
uwdata/voyager,uwdata/voyager,uwdata/voyager
scss
## Code Before: .encoding-variations { .selected-variation { display: flex; flex-direction: column; & > .vl-plotgroup { flex: 0 0 100%; box-sizing: border-box; } } } .encoding-variations .variations { max-width: 300px; width: 300px; flex-shrink: 0.2; flex-grow: 0; overflow-y: scroll; padding: 10px 15px 10px 10px; background-color: #B7C9E1; margin-top: 3px; } .encoding-variations .modal-wrapper{ overflow: hidden; } .encoding-variations .encoding-variations-main { display: flex; overflow: hidden; } .encoding-variations .wrapper { position: relative; } ## Instruction: Fix offset in encoding variations ## Code After: .encoding-variations { .selected-variation { display: flex; flex-direction: column; & > .vl-plotgroup { flex: 0 0 100%; box-sizing: border-box; } } } .encoding-variations .variations { max-width: 300px; width: 300px; flex-shrink: 0.2; flex-grow: 0; overflow-y: scroll; padding: 10px 15px 10px 10px; background-color: #B7C9E1; margin-top: 3px; // account for margin-right: -3px in .encoding-variations-main margin-right: 3px; } .encoding-variations .modal-wrapper{ overflow: hidden; } .encoding-variations .encoding-variations-main { display: flex; overflow: hidden; // account for .selection-variation.card's 3px when .variations is hidden margin-right: -3px; } .encoding-variations .wrapper { position: relative; }
385ffd4bee9501add7c6495bbe2dd3896c248751
length_field.go
length_field.go
package sarama import "encoding/binary" // LengthField implements the PushEncoder and PushDecoder interfaces for calculating 4-byte lengths. type lengthField struct { startOffset int } func (l *lengthField) saveOffset(in int) { l.startOffset = in } func (l *lengthField) reserveLength() int { return 4 } func (l *lengthField) run(curOffset int, buf []byte) error { binary.BigEndian.PutUint32(buf[l.startOffset:], uint32(curOffset-l.startOffset-4)) return nil } func (l *lengthField) check(curOffset int, buf []byte) error { if uint32(curOffset-l.startOffset-4) != binary.BigEndian.Uint32(buf[l.startOffset:]) { return PacketDecodingError{"length field invalid"} } return nil }
package sarama import "encoding/binary" // LengthField implements the PushEncoder and PushDecoder interfaces for calculating 4-byte lengths. type lengthField struct { startOffset int } func (l *lengthField) saveOffset(in int) { l.startOffset = in } func (l *lengthField) reserveLength() int { return 4 } func (l *lengthField) run(curOffset int, buf []byte) error { binary.BigEndian.PutUint32(buf[l.startOffset:], uint32(curOffset-l.startOffset-4)) return nil } func (l *lengthField) check(curOffset int, buf []byte) error { if uint32(curOffset-l.startOffset-4) != binary.BigEndian.Uint32(buf[l.startOffset:]) { return PacketDecodingError{"length field invalid"} } return nil } type varintLengthField struct { startOffset int length int64 } func newVarintLengthField(pd packetDecoder) (*varintLengthField, error) { n, err := pd.getVarint() if err != nil { return nil, err } return &varintLengthField{length: n}, nil } func (l *varintLengthField) saveOffset(in int) { l.startOffset = in } func (l *varintLengthField) reserveLength() int { return 0 } func (l *varintLengthField) check(curOffset int, buf []byte) error { if int64(curOffset-l.startOffset) != l.length { return PacketDecodingError{"length field invalid"} } return nil }
Add support for length fields encoded as varints.
Add support for length fields encoded as varints. Kafka 0.11 encodes Record length as varint.
Go
mit
remerge/sarama,Shopify/sarama,vrischmann/sarama,DataDog/sarama,mailgun/sarama,VividCortex/sarama,mailgun/sarama,vrischmann/sarama,DataDog/sarama,remerge/sarama,VividCortex/sarama,Shopify/sarama
go
## Code Before: package sarama import "encoding/binary" // LengthField implements the PushEncoder and PushDecoder interfaces for calculating 4-byte lengths. type lengthField struct { startOffset int } func (l *lengthField) saveOffset(in int) { l.startOffset = in } func (l *lengthField) reserveLength() int { return 4 } func (l *lengthField) run(curOffset int, buf []byte) error { binary.BigEndian.PutUint32(buf[l.startOffset:], uint32(curOffset-l.startOffset-4)) return nil } func (l *lengthField) check(curOffset int, buf []byte) error { if uint32(curOffset-l.startOffset-4) != binary.BigEndian.Uint32(buf[l.startOffset:]) { return PacketDecodingError{"length field invalid"} } return nil } ## Instruction: Add support for length fields encoded as varints. Kafka 0.11 encodes Record length as varint. ## Code After: package sarama import "encoding/binary" // LengthField implements the PushEncoder and PushDecoder interfaces for calculating 4-byte lengths. type lengthField struct { startOffset int } func (l *lengthField) saveOffset(in int) { l.startOffset = in } func (l *lengthField) reserveLength() int { return 4 } func (l *lengthField) run(curOffset int, buf []byte) error { binary.BigEndian.PutUint32(buf[l.startOffset:], uint32(curOffset-l.startOffset-4)) return nil } func (l *lengthField) check(curOffset int, buf []byte) error { if uint32(curOffset-l.startOffset-4) != binary.BigEndian.Uint32(buf[l.startOffset:]) { return PacketDecodingError{"length field invalid"} } return nil } type varintLengthField struct { startOffset int length int64 } func newVarintLengthField(pd packetDecoder) (*varintLengthField, error) { n, err := pd.getVarint() if err != nil { return nil, err } return &varintLengthField{length: n}, nil } func (l *varintLengthField) saveOffset(in int) { l.startOffset = in } func (l *varintLengthField) reserveLength() int { return 0 } func (l *varintLengthField) check(curOffset int, buf []byte) error { if int64(curOffset-l.startOffset) != l.length { return PacketDecodingError{"length field invalid"} } return nil }
e4e6bfea8a66842854b70671bf5f4a4f8e5700c3
turrentine/static/turrentine/css/page_edit.css
turrentine/static/turrentine/css/page_edit.css
label[for=id_content] { display:none; } #id_content { font-family: monospace; width: 60%; min-height: 500px; background: -webkit-linear-gradient(#fff, #eee); font-size:1.3em; } #preview_container { width:37%; height:500px; float:right; margin:0 1em; position:relative; } #live_preview { width:200%; height:1000px; border:1px solid #aaa; box-shadow:0px 12px 21px #ccc; -webkit-transform:scale(0.5); -webkit-transform-origin:top left; -moz-transform:scale(0.5); -moz-transform-origin:top left; -o-transform:scale(0.5); -o-transform-origin:top left; -ms-transform:scale(0.5); -ms-transform-origin:top left; transform:scale(0.5); transform-origin:top left; }
label[for=id_content] { display:none; } #id_content { font-family: monospace; width: 52%; min-height: 500px; background: -webkit-linear-gradient(#fff, #eee); font-size:1.3em; float:left; } #preview_container { width:45%; height:500px; float:left; margin:0 0 0 1em; position:relative; } #live_preview { width:200%; height:1000px; border:1px solid #aaa; box-shadow:0px 12px 21px #ccc; -webkit-transform:scale(0.5); -webkit-transform-origin:top left; -moz-transform:scale(0.5); -moz-transform-origin:top left; -o-transform:scale(0.5); -o-transform-origin:top left; -ms-transform:scale(0.5); -ms-transform-origin:top left; transform:scale(0.5); transform-origin:top left; }
Improve proportions between widths of the content textarea and preview iframe.
Improve proportions between widths of the content textarea and preview iframe.
CSS
bsd-3-clause
af/turrentine,af/turrentine
css
## Code Before: label[for=id_content] { display:none; } #id_content { font-family: monospace; width: 60%; min-height: 500px; background: -webkit-linear-gradient(#fff, #eee); font-size:1.3em; } #preview_container { width:37%; height:500px; float:right; margin:0 1em; position:relative; } #live_preview { width:200%; height:1000px; border:1px solid #aaa; box-shadow:0px 12px 21px #ccc; -webkit-transform:scale(0.5); -webkit-transform-origin:top left; -moz-transform:scale(0.5); -moz-transform-origin:top left; -o-transform:scale(0.5); -o-transform-origin:top left; -ms-transform:scale(0.5); -ms-transform-origin:top left; transform:scale(0.5); transform-origin:top left; } ## Instruction: Improve proportions between widths of the content textarea and preview iframe. ## Code After: label[for=id_content] { display:none; } #id_content { font-family: monospace; width: 52%; min-height: 500px; background: -webkit-linear-gradient(#fff, #eee); font-size:1.3em; float:left; } #preview_container { width:45%; height:500px; float:left; margin:0 0 0 1em; position:relative; } #live_preview { width:200%; height:1000px; border:1px solid #aaa; box-shadow:0px 12px 21px #ccc; -webkit-transform:scale(0.5); -webkit-transform-origin:top left; -moz-transform:scale(0.5); -moz-transform-origin:top left; -o-transform:scale(0.5); -o-transform-origin:top left; -ms-transform:scale(0.5); -ms-transform-origin:top left; transform:scale(0.5); transform-origin:top left; }
d88a1b91bb0213308a3ef0405f2e7703d42e69b9
doc/macbuild_master_setup.txt
doc/macbuild_master_setup.txt
passenger-install-nginx-module ## Changes to nginx configuration ## # Up the maximum size allowed for requests (allows us to POST large log files) client_max_body_size 100M; # Increase the http timeout above 60 seconds; necessary for large uploads client_body_timeout 90; # Transmit all of the .gz files under log_files as plain/text (renders then inside the browser) location ~* log_files.*?\.gz$ { types { text/plain gz; } add_header Content-Encoding gzip; }
cd /Users/square curl http://nginx.org/download/nginx-1.2.4.tar.gz | tar xvz # Install the passenger gem into the 1.9.3@kochiku gemset $ cd ~/kochiku/current $ gem install passenger -v 3.0.18 $ which passenger-install-nginx-module # => /Users/square/.rvm/gems/ruby-1.9.3-p194@kochiku/bin/passenger-install-nginx-module Run the passenger nginx install rvmsudo passenger-install-nginx-module Select the advanced install (number 2) Provide the path the nginx source (/Users/square/nginx-1.2.4) Use the default install dir (/opt/nginx) Ensure that nginx is configured with the following additional options: --with-http_gzip_static_module --with-cc-opt=-I/usr/local/include --with-ld-opt=-L/usr/local/lib Explanation for extra options: The http_gzip_static_module is used by kochiku to when serving the log files. The cc-opt and ld-opt are needed on Snow Leopard to avoid using the system's pcre install and use the one installed by Homebrew instead. The compile fails with the system pcre. After the new version finishes compiling tell nginx to reload sudo /opt/nginx/sbin/nginx -s reload Now upgrade the passenger gem in the Kochiku repo and deploy it. That gem is installed in a different location than the one above and doesn't actually get used (which is ok). We bump the gem in the kochiku project just to show what version is running on the server. ## Changes to nginx configuration ## Nginx configuration for Kochiku on macbuild-master is at: /Users/square/.nginx/http/macbuild-master.local.conf Up the maximum size allowed for requests (allows us to POST large log files) client_max_body_size 100M; Increase the http timeout above 60 seconds; necessary for large uploads client_body_timeout 120; Transmit all of the .gz files under log_files as plain/text (renders then inside the browser) location ~* log_files.*?\.gz$ { types { text/plain gz; } add_header Content-Encoding gzip; }
Update the documentation for upgrading nginx+passenger
Update the documentation for upgrading nginx+passenger
Text
apache-2.0
square/kochiku,square/kochiku,square/kochiku,rudle/kochiku,moshez/kochiku,IoraHealth/kochiku,rudle/kochiku,square/kochiku,moshez/kochiku,moshez/kochiku,rudle/kochiku,rudle/kochiku,IoraHealth/kochiku,IoraHealth/kochiku,moshez/kochiku
text
## Code Before: passenger-install-nginx-module ## Changes to nginx configuration ## # Up the maximum size allowed for requests (allows us to POST large log files) client_max_body_size 100M; # Increase the http timeout above 60 seconds; necessary for large uploads client_body_timeout 90; # Transmit all of the .gz files under log_files as plain/text (renders then inside the browser) location ~* log_files.*?\.gz$ { types { text/plain gz; } add_header Content-Encoding gzip; } ## Instruction: Update the documentation for upgrading nginx+passenger ## Code After: cd /Users/square curl http://nginx.org/download/nginx-1.2.4.tar.gz | tar xvz # Install the passenger gem into the 1.9.3@kochiku gemset $ cd ~/kochiku/current $ gem install passenger -v 3.0.18 $ which passenger-install-nginx-module # => /Users/square/.rvm/gems/ruby-1.9.3-p194@kochiku/bin/passenger-install-nginx-module Run the passenger nginx install rvmsudo passenger-install-nginx-module Select the advanced install (number 2) Provide the path the nginx source (/Users/square/nginx-1.2.4) Use the default install dir (/opt/nginx) Ensure that nginx is configured with the following additional options: --with-http_gzip_static_module --with-cc-opt=-I/usr/local/include --with-ld-opt=-L/usr/local/lib Explanation for extra options: The http_gzip_static_module is used by kochiku to when serving the log files. The cc-opt and ld-opt are needed on Snow Leopard to avoid using the system's pcre install and use the one installed by Homebrew instead. The compile fails with the system pcre. After the new version finishes compiling tell nginx to reload sudo /opt/nginx/sbin/nginx -s reload Now upgrade the passenger gem in the Kochiku repo and deploy it. That gem is installed in a different location than the one above and doesn't actually get used (which is ok). We bump the gem in the kochiku project just to show what version is running on the server. ## Changes to nginx configuration ## Nginx configuration for Kochiku on macbuild-master is at: /Users/square/.nginx/http/macbuild-master.local.conf Up the maximum size allowed for requests (allows us to POST large log files) client_max_body_size 100M; Increase the http timeout above 60 seconds; necessary for large uploads client_body_timeout 120; Transmit all of the .gz files under log_files as plain/text (renders then inside the browser) location ~* log_files.*?\.gz$ { types { text/plain gz; } add_header Content-Encoding gzip; }
33130e78840f4215223a573f22d06043c737d130
.travis.yml
.travis.yml
language: python python: - "2.7" - "3.4" - "3.5" - "3.6" - "nightly" # currently points to 3.7-dev before_install: - "pip install Cython" - "pip install numpy" install: - "pip install -r requirements-test.txt" script: PYTHONPATH=. py.test --cov after_success: coveralls
language: python matrix: include: - python: "2.7" - python: "3.4" - python: "3.5" - python: "3.6" - python: "3.7" dist: xenial sudo: true before_install: - "pip install Cython" - "pip install numpy" install: - "pip install -r requirements-test.txt" script: PYTHONPATH=. py.test --cov after_success: coveralls
Add a 3.7 test runner
Add a 3.7 test runner
YAML
bsd-2-clause
cjrh/misu
yaml
## Code Before: language: python python: - "2.7" - "3.4" - "3.5" - "3.6" - "nightly" # currently points to 3.7-dev before_install: - "pip install Cython" - "pip install numpy" install: - "pip install -r requirements-test.txt" script: PYTHONPATH=. py.test --cov after_success: coveralls ## Instruction: Add a 3.7 test runner ## Code After: language: python matrix: include: - python: "2.7" - python: "3.4" - python: "3.5" - python: "3.6" - python: "3.7" dist: xenial sudo: true before_install: - "pip install Cython" - "pip install numpy" install: - "pip install -r requirements-test.txt" script: PYTHONPATH=. py.test --cov after_success: coveralls
fd3b7560e509ef37db4333f7a4c1e98f001608a1
README.md
README.md
This repository contains FIRST Team 422's code for the 2014 season of the FIRST Robotics Challenge. This code is released under the MIT License. The text of this license is available in [LICENSE.txt](LICENSE.txt).
This repository contains FIRST Team 422's code for the 2014 season of the FIRST Robotics Challenge. This code is released under the MIT License. The text of this license is available in [LICENSE.txt](LICENSE.txt). This is another test for a development branch. With multiple commits.
Test branch merging, with multiple commits and a rebase
Test branch merging, with multiple commits and a rebase
Markdown
mit
team422/FRC-14
markdown
## Code Before: This repository contains FIRST Team 422's code for the 2014 season of the FIRST Robotics Challenge. This code is released under the MIT License. The text of this license is available in [LICENSE.txt](LICENSE.txt). ## Instruction: Test branch merging, with multiple commits and a rebase ## Code After: This repository contains FIRST Team 422's code for the 2014 season of the FIRST Robotics Challenge. This code is released under the MIT License. The text of this license is available in [LICENSE.txt](LICENSE.txt). This is another test for a development branch. With multiple commits.
9cf8c84a7d944212b7af58e366d3f11006b1cd1f
db/migrate/20210211115125_migrate_shipment_fees_to_shipments.rb
db/migrate/20210211115125_migrate_shipment_fees_to_shipments.rb
class MigrateShipmentFeesToShipments < ActiveRecord::Migration def up # Shipping fee adjustments currently have the order as the `adjustable` and the shipment as # the `source`. Both `source` and `adjustable` will now be the shipment. The `originator` is # the shipping method, and this is unchanged. Spree::Adjustment.where(originator_type: 'Spree::ShippingMethod').update_all( "adjustable_id = source_id, adjustable_type = 'Spree::Shipment'" ) end def down # Just in case: reversing this migration requires setting the `adjustable` back to the order. # The type is 'Spree::Order', and the order's id is still available on the `order_id` field. Spree::Adjustment.where(originator_type: 'Spree::ShippingMethod').update_all( "adjustable_id = order_id, adjustable_type = 'Spree::Order'" ) end end
class MigrateShipmentFeesToShipments < ActiveRecord::Migration class Spree::Adjustment < ActiveRecord::Base belongs_to :originator, polymorphic: true end def up # Shipping fee adjustments currently have the order as the `adjustable` and the shipment as # the `source`. Both `source` and `adjustable` will now be the shipment. The `originator` is # the shipping method, and this is unchanged. Spree::Adjustment.where(originator_type: 'Spree::ShippingMethod').update_all( "adjustable_id = source_id, adjustable_type = 'Spree::Shipment'" ) end def down # Just in case: reversing this migration requires setting the `adjustable` back to the order. # The type is 'Spree::Order', and the order's id is still available on the `order_id` field. Spree::Adjustment.where(originator_type: 'Spree::ShippingMethod').update_all( "adjustable_id = order_id, adjustable_type = 'Spree::Order'" ) end end
Add model definition to migration
Add model definition to migration
Ruby
agpl-3.0
mkllnk/openfoodnetwork,lin-d-hop/openfoodnetwork,Matt-Yorkley/openfoodnetwork,openfoodfoundation/openfoodnetwork,lin-d-hop/openfoodnetwork,mkllnk/openfoodnetwork,mkllnk/openfoodnetwork,Matt-Yorkley/openfoodnetwork,openfoodfoundation/openfoodnetwork,openfoodfoundation/openfoodnetwork,Matt-Yorkley/openfoodnetwork,lin-d-hop/openfoodnetwork,Matt-Yorkley/openfoodnetwork,openfoodfoundation/openfoodnetwork,lin-d-hop/openfoodnetwork,mkllnk/openfoodnetwork
ruby
## Code Before: class MigrateShipmentFeesToShipments < ActiveRecord::Migration def up # Shipping fee adjustments currently have the order as the `adjustable` and the shipment as # the `source`. Both `source` and `adjustable` will now be the shipment. The `originator` is # the shipping method, and this is unchanged. Spree::Adjustment.where(originator_type: 'Spree::ShippingMethod').update_all( "adjustable_id = source_id, adjustable_type = 'Spree::Shipment'" ) end def down # Just in case: reversing this migration requires setting the `adjustable` back to the order. # The type is 'Spree::Order', and the order's id is still available on the `order_id` field. Spree::Adjustment.where(originator_type: 'Spree::ShippingMethod').update_all( "adjustable_id = order_id, adjustable_type = 'Spree::Order'" ) end end ## Instruction: Add model definition to migration ## Code After: class MigrateShipmentFeesToShipments < ActiveRecord::Migration class Spree::Adjustment < ActiveRecord::Base belongs_to :originator, polymorphic: true end def up # Shipping fee adjustments currently have the order as the `adjustable` and the shipment as # the `source`. Both `source` and `adjustable` will now be the shipment. The `originator` is # the shipping method, and this is unchanged. Spree::Adjustment.where(originator_type: 'Spree::ShippingMethod').update_all( "adjustable_id = source_id, adjustable_type = 'Spree::Shipment'" ) end def down # Just in case: reversing this migration requires setting the `adjustable` back to the order. # The type is 'Spree::Order', and the order's id is still available on the `order_id` field. Spree::Adjustment.where(originator_type: 'Spree::ShippingMethod').update_all( "adjustable_id = order_id, adjustable_type = 'Spree::Order'" ) end end
31cb18dabb765d0473e17962fe8eac882ed242d7
lib/simple-queue.js
lib/simple-queue.js
module.exports = SimpleQueue; function SimpleQueue() { var self = this; self.fifo = []; self.executing = false; } SimpleQueue.prototype.push = function (fn) { var self = this; self.fifo.push(fn); self.maybeNext(); }; SimpleQueue.prototype.maybeNext = function () { var self = this; if (!self.executing && self.fifo.length) { var fn = self.fifo.shift(); self.executing = true; fn(function () { self.executing = false; self.maybeNext(); }); } };
module.exports = SimpleQueue; function SimpleQueue() { var self = this; self.fifo = []; self.executing = false; } SimpleQueue.prototype.push = function (fn) { var self = this; self.fifo.push(fn); self.maybeNext(); }; SimpleQueue.prototype.maybeNext = function () { var self = this; if (!self.executing) { self.next(); } }; SimpleQueue.prototype.next = function () { var self = this; if (self.fifo.length) { var fn = self.fifo.shift(); self.executing = true; fn(function () { self.executing = false; self.maybeNext(); }); } };
Add next() function to SimpleQueue
Add next() function to SimpleQueue
JavaScript
mit
silveirado/node-ibm_db,jbaxter0810/node-odbc,silveirado/node-ibm_db,gmahomarf/node-odbc,abiliooliveira/node-ibm_db,Akpotohwo/node-ibm_db,jbaxter0810/node-odbc,Papercloud/node-odbc,qpresley/node-ibm_db,Akpotohwo/node-ibm_db,jbaxter0810/node-odbc,gmahomarf/node-odbc,ibmdb/node-ibm_db,strongloop-forks/node-ibm_db,bustta/node-odbc,bustta/node-odbc,wankdanker/node-odbc,bustta/node-odbc,qpresley/node-ibm_db,ibmdb/node-ibm_db,ibmdb/node-ibm_db,qpresley/node-ibm_db,bustta/node-odbc,qpresley/node-ibm_db,ibmdb/node-ibm_db,elkorep/node-ibm_db,Akpotohwo/node-ibm_db,qpresley/node-ibm_db,gmahomarf/node-odbc,Papercloud/node-odbc,bzuillsmith/node-odbc,dfbaskin/node-odbc,bzuillsmith/node-odbc,gmahomarf/node-odbc,silveirado/node-ibm_db,elkorep/node-ibm_db,dfbaskin/node-odbc,ibmdb/node-ibm_db,wankdanker/node-odbc,Akpotohwo/node-ibm_db,bzuillsmith/node-odbc,wankdanker/node-odbc,abiliooliveira/node-ibm_db,Akpotohwo/node-ibm_db,Papercloud/node-odbc,ibmdb/node-ibm_db,Akpotohwo/node-ibm_db,elkorep/node-ibm_db,dfbaskin/node-odbc,strongloop-forks/node-ibm_db,jbaxter0810/node-odbc,abiliooliveira/node-ibm_db,elkorep/node-ibm_db,qpresley/node-ibm_db,strongloop-forks/node-ibm_db,dfbaskin/node-odbc,bzuillsmith/node-odbc,silveirado/node-ibm_db,wankdanker/node-odbc,Papercloud/node-odbc,strongloop-forks/node-ibm_db,abiliooliveira/node-ibm_db
javascript
## Code Before: module.exports = SimpleQueue; function SimpleQueue() { var self = this; self.fifo = []; self.executing = false; } SimpleQueue.prototype.push = function (fn) { var self = this; self.fifo.push(fn); self.maybeNext(); }; SimpleQueue.prototype.maybeNext = function () { var self = this; if (!self.executing && self.fifo.length) { var fn = self.fifo.shift(); self.executing = true; fn(function () { self.executing = false; self.maybeNext(); }); } }; ## Instruction: Add next() function to SimpleQueue ## Code After: module.exports = SimpleQueue; function SimpleQueue() { var self = this; self.fifo = []; self.executing = false; } SimpleQueue.prototype.push = function (fn) { var self = this; self.fifo.push(fn); self.maybeNext(); }; SimpleQueue.prototype.maybeNext = function () { var self = this; if (!self.executing) { self.next(); } }; SimpleQueue.prototype.next = function () { var self = this; if (self.fifo.length) { var fn = self.fifo.shift(); self.executing = true; fn(function () { self.executing = false; self.maybeNext(); }); } };
07688d877058ed228ffb776b927138fc2ad1ed8d
addon/index.js
addon/index.js
import Ember from 'ember'; const { RSVP } = Ember; function preloadRecord(record, toPreload) { return preloadAll([record], toPreload).then(() => { return record; }); } function preloadAll(records, toPreload) { switch(Ember.typeOf(toPreload)) { case 'object': const properties = Object.keys(toPreload); return RSVP.all(properties.map((p) => { return RSVP.all(records.map((record) => { return record.get(p); })).then((data) => { const subRecords = data.reduce((prev, cur) => prev.concat(cur.toArray()), []); return preloadAll(subRecords, toPreload[p]); }); })).then(() => records); case 'string': return RSVP.all(records.map((record) => record.get(toPreload))) .then(() => records); default: throw 'Illegal Argument'; } } function preload(thing, toPreload) { if (thing.then) { return thing.then(() => { return Ember.isArray(thing) ? preloadAll(thing, toPreload) : preloadRecord(thing, toPreload); }); } else { return Ember.isArray(thing) ? preloadAll(thing, toPreload) : preloadRecord(thing, toPreload); } } export default preload;
import Ember from 'ember'; const { RSVP } = Ember; function getPromise(object, property) { return RSVP.resolve(Ember.get(object, property)); } function preloadRecord(record, toPreload) { if (!record) { return RSVP.resolve(record); } switch(Ember.typeOf(toPreload)) { case 'string': return getPromise(record, toPreload).then(() => record); case 'array': return RSVP.all(toPreload.map((p) => preloadRecord(record, p))).then(() => record); case 'object': return RSVP.all(Object.keys(toPreload).map((p) => getPromise(record, p).then((data) => preload(data, toPreload[p])))).then(() => record); default: throw 'Illegal Argument'; } } function preloadAll(records, toPreload) { return RSVP.all(records.map((record) => preload(record, toPreload))); } function preload(thing, toPreload) { return RSVP.resolve(thing).then(() => { return Ember.isArray(thing) ? preloadAll(thing, toPreload) : preloadRecord(thing, toPreload); }); } export default preload;
Refactor preload to handle more cases
Refactor preload to handle more cases
JavaScript
mit
levanto-financial/ember-data-preload,levanto-financial/ember-data-preload
javascript
## Code Before: import Ember from 'ember'; const { RSVP } = Ember; function preloadRecord(record, toPreload) { return preloadAll([record], toPreload).then(() => { return record; }); } function preloadAll(records, toPreload) { switch(Ember.typeOf(toPreload)) { case 'object': const properties = Object.keys(toPreload); return RSVP.all(properties.map((p) => { return RSVP.all(records.map((record) => { return record.get(p); })).then((data) => { const subRecords = data.reduce((prev, cur) => prev.concat(cur.toArray()), []); return preloadAll(subRecords, toPreload[p]); }); })).then(() => records); case 'string': return RSVP.all(records.map((record) => record.get(toPreload))) .then(() => records); default: throw 'Illegal Argument'; } } function preload(thing, toPreload) { if (thing.then) { return thing.then(() => { return Ember.isArray(thing) ? preloadAll(thing, toPreload) : preloadRecord(thing, toPreload); }); } else { return Ember.isArray(thing) ? preloadAll(thing, toPreload) : preloadRecord(thing, toPreload); } } export default preload; ## Instruction: Refactor preload to handle more cases ## Code After: import Ember from 'ember'; const { RSVP } = Ember; function getPromise(object, property) { return RSVP.resolve(Ember.get(object, property)); } function preloadRecord(record, toPreload) { if (!record) { return RSVP.resolve(record); } switch(Ember.typeOf(toPreload)) { case 'string': return getPromise(record, toPreload).then(() => record); case 'array': return RSVP.all(toPreload.map((p) => preloadRecord(record, p))).then(() => record); case 'object': return RSVP.all(Object.keys(toPreload).map((p) => getPromise(record, p).then((data) => preload(data, toPreload[p])))).then(() => record); default: throw 'Illegal Argument'; } } function preloadAll(records, toPreload) { return RSVP.all(records.map((record) => preload(record, toPreload))); } function preload(thing, toPreload) { return RSVP.resolve(thing).then(() => { return Ember.isArray(thing) ? preloadAll(thing, toPreload) : preloadRecord(thing, toPreload); }); } export default preload;
42783c590b5ad9ea0f844ca2709a1e6a61721432
.github/workflows/main.yml
.github/workflows/main.yml
name: main on: push: branches: [master] pull_request: branches: [master] jobs: build: runs-on: ubuntu-latest strategy: matrix: arch: [x64, arm] target: [lib, exe, daemon, deb] steps: - uses: actions/checkout@v2 - name: Build run: make ${{ matrix.arch }}-${{ matrix.target }} - uses: actions/upload-artifact@v2 if: matrix.target == 'deb' with: name: sectora-${{ matrix.arch }} path: ${{ github.workspace }}/target/*/debian/*.deb test: needs: build runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - uses: actions/download-artifact@v2 with: name: sectora-x64 path: target - name: Run tests run: make test-deb working-directory: test env: COMPOSE_INTERACTIVE_NO_CLI: 1 TERM: xterm-256color
name: main on: push: branches: [master] pull_request: branches: [master] jobs: build: runs-on: ubuntu-latest strategy: matrix: arch: [x64, arm] target: [lib, exe, daemon, deb] steps: - uses: actions/checkout@v2 - uses: actions/cache@v1 with: path: .cargo-${{ matrix.arch }} key: ${{ matrix.arch }}-${{ hashFiles('Cargo.lock') }} - name: Build run: make ${{ matrix.arch }}-${{ matrix.target }} - uses: actions/upload-artifact@v2 if: matrix.target == 'deb' with: name: sectora-${{ matrix.arch }} path: ${{ github.workspace }}/target/*/debian/*.deb test: needs: build runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - uses: actions/download-artifact@v2 with: name: sectora-x64 path: target - name: Run tests run: make test-deb working-directory: test env: COMPOSE_INTERACTIVE_NO_CLI: 1 TERM: xterm-256color
Use cache on GitHub Actions
Use cache on GitHub Actions
YAML
mit
yasuyuky/ghteam-auth,yasuyuky/ghteam-auth
yaml
## Code Before: name: main on: push: branches: [master] pull_request: branches: [master] jobs: build: runs-on: ubuntu-latest strategy: matrix: arch: [x64, arm] target: [lib, exe, daemon, deb] steps: - uses: actions/checkout@v2 - name: Build run: make ${{ matrix.arch }}-${{ matrix.target }} - uses: actions/upload-artifact@v2 if: matrix.target == 'deb' with: name: sectora-${{ matrix.arch }} path: ${{ github.workspace }}/target/*/debian/*.deb test: needs: build runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - uses: actions/download-artifact@v2 with: name: sectora-x64 path: target - name: Run tests run: make test-deb working-directory: test env: COMPOSE_INTERACTIVE_NO_CLI: 1 TERM: xterm-256color ## Instruction: Use cache on GitHub Actions ## Code After: name: main on: push: branches: [master] pull_request: branches: [master] jobs: build: runs-on: ubuntu-latest strategy: matrix: arch: [x64, arm] target: [lib, exe, daemon, deb] steps: - uses: actions/checkout@v2 - uses: actions/cache@v1 with: path: .cargo-${{ matrix.arch }} key: ${{ matrix.arch }}-${{ hashFiles('Cargo.lock') }} - name: Build run: make ${{ matrix.arch }}-${{ matrix.target }} - uses: actions/upload-artifact@v2 if: matrix.target == 'deb' with: name: sectora-${{ matrix.arch }} path: ${{ github.workspace }}/target/*/debian/*.deb test: needs: build runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - uses: actions/download-artifact@v2 with: name: sectora-x64 path: target - name: Run tests run: make test-deb working-directory: test env: COMPOSE_INTERACTIVE_NO_CLI: 1 TERM: xterm-256color
73e4f2c333e7b4f02dbb0ec344a3a671ba97cac3
library-examples/read-replace-export-excel.py
library-examples/read-replace-export-excel.py
import shutil from openpyxl import load_workbook, Workbook shutil.copyfile('sample-input-fortest.xlsx','sample-input-fortest-out.xlsx') #point to the file to be read. Intuitive. wb2 = load_workbook('sample-input-fortest.xlsx') #convince your self that sheet names are retireved. sheet_names = wb2.get_sheet_names() print sheet_names #work book is simply a list of sheets sheet = wb2[sheet_names[0]] print sheet print "can iterate sheets, rows and columns intuitively" string_list = list() string_list.append(("sequence_number","original language")) seq_no = 1 for sheet in wb2: for row in sheet.rows: for cell in row: if None!=cell.value: string_list.append((seq_no,cell.value)) seq_no+=1 wb_out = Workbook(write_only=True) ws = wb_out.create_sheet() for string in string_list: ws.append(string) wb_out.save('new_big_file.xlsx')
import shutil from openpyxl import load_workbook, Workbook #point to the file to be read. Intuitive. wb2 = load_workbook('sample-input-fortest.xlsx') #convince your self that sheet names are retireved. sheet_names = wb2.get_sheet_names() print sheet_names #work book is simply a list of sheets sheet = wb2[sheet_names[0]] print sheet #go trhough the excel file, extract strings & replace with number. string_list = list() string_list.append(("sequence_number","original language")) seq_no = 1 for sheet in wb2: for row in sheet.rows: for cell in row: if None!=cell.value: string_list.append((seq_no,cell.value)) cell.value=str(seq_no) seq_no+=1 #save the file containing numbers that replaced the string. wb2.save('sample-input-fortest-out.xlsx') #save the extracted strings wb_out = Workbook(write_only=True) ws = wb_out.create_sheet() for string in string_list: ws.append(string) wb_out.save('new_big_file.xlsx')
Change so original input does not change.
Change so original input does not change.
Python
apache-2.0
iku000888/Excel_Translation_Helper
python
## Code Before: import shutil from openpyxl import load_workbook, Workbook shutil.copyfile('sample-input-fortest.xlsx','sample-input-fortest-out.xlsx') #point to the file to be read. Intuitive. wb2 = load_workbook('sample-input-fortest.xlsx') #convince your self that sheet names are retireved. sheet_names = wb2.get_sheet_names() print sheet_names #work book is simply a list of sheets sheet = wb2[sheet_names[0]] print sheet print "can iterate sheets, rows and columns intuitively" string_list = list() string_list.append(("sequence_number","original language")) seq_no = 1 for sheet in wb2: for row in sheet.rows: for cell in row: if None!=cell.value: string_list.append((seq_no,cell.value)) seq_no+=1 wb_out = Workbook(write_only=True) ws = wb_out.create_sheet() for string in string_list: ws.append(string) wb_out.save('new_big_file.xlsx') ## Instruction: Change so original input does not change. ## Code After: import shutil from openpyxl import load_workbook, Workbook #point to the file to be read. Intuitive. wb2 = load_workbook('sample-input-fortest.xlsx') #convince your self that sheet names are retireved. sheet_names = wb2.get_sheet_names() print sheet_names #work book is simply a list of sheets sheet = wb2[sheet_names[0]] print sheet #go trhough the excel file, extract strings & replace with number. string_list = list() string_list.append(("sequence_number","original language")) seq_no = 1 for sheet in wb2: for row in sheet.rows: for cell in row: if None!=cell.value: string_list.append((seq_no,cell.value)) cell.value=str(seq_no) seq_no+=1 #save the file containing numbers that replaced the string. wb2.save('sample-input-fortest-out.xlsx') #save the extracted strings wb_out = Workbook(write_only=True) ws = wb_out.create_sheet() for string in string_list: ws.append(string) wb_out.save('new_big_file.xlsx')
aa2009651c2b3b5f5a2da46d80f122141d099ce7
sass/partials/_forms.scss
sass/partials/_forms.scss
form { } form.feature { padding-top: $spacing; padding-bottom: $spacing * 2; } input[type="text"], input[type="password"] { padding: $spacing / 2; font-size: 150%; border: 2px solid $gray; border-radius: $radius; color: $gray; } input[type="submit"], input[type="button"], button, a.button { display: block; width: 100%; margin-bottom: $spacing / 2; padding: $spacing ($spacing / 2) $spacing ($spacing / 2); font-family: $font-body; font-size: 120%; letter-spacing: 1px; text-transform: uppercase; background-color: transparent; color: white; border: 2px solid white; border-radius: $radius; cursor: pointer; span.fa.left { margin-right: $spacing / 2; } span.fa.right { margin-left: $spacing / 2; } &.gray { color: $light-gray; border-color: $light-gray; } }
form { } form.feature { padding-top: $spacing; padding-bottom: $spacing * 2; } input[type="text"], input[type="password"] { padding: $spacing / 2; font: ($font-body-size * 1.75) $font-body; border: 2px solid $gray; border-radius: $radius; color: $gray; } input[type="submit"], input[type="button"], button, a.button { display: block; width: 100%; margin-bottom: $spacing / 2; padding: $spacing ($spacing / 2) $spacing ($spacing / 2); font-family: $font-body; font-size: 120%; letter-spacing: 1px; text-transform: uppercase; background-color: transparent; color: white; border: 2px solid white; border-radius: $radius; cursor: pointer; span.fa.left { margin-right: $spacing / 2; } span.fa.right { margin-left: $spacing / 2; } &.gray { color: $light-gray; border-color: $light-gray; } }
Fix default font on input elements.
Fix default font on input elements.
SCSS
mit
jadaradix/fyp,jadaradix/fyp
scss
## Code Before: form { } form.feature { padding-top: $spacing; padding-bottom: $spacing * 2; } input[type="text"], input[type="password"] { padding: $spacing / 2; font-size: 150%; border: 2px solid $gray; border-radius: $radius; color: $gray; } input[type="submit"], input[type="button"], button, a.button { display: block; width: 100%; margin-bottom: $spacing / 2; padding: $spacing ($spacing / 2) $spacing ($spacing / 2); font-family: $font-body; font-size: 120%; letter-spacing: 1px; text-transform: uppercase; background-color: transparent; color: white; border: 2px solid white; border-radius: $radius; cursor: pointer; span.fa.left { margin-right: $spacing / 2; } span.fa.right { margin-left: $spacing / 2; } &.gray { color: $light-gray; border-color: $light-gray; } } ## Instruction: Fix default font on input elements. ## Code After: form { } form.feature { padding-top: $spacing; padding-bottom: $spacing * 2; } input[type="text"], input[type="password"] { padding: $spacing / 2; font: ($font-body-size * 1.75) $font-body; border: 2px solid $gray; border-radius: $radius; color: $gray; } input[type="submit"], input[type="button"], button, a.button { display: block; width: 100%; margin-bottom: $spacing / 2; padding: $spacing ($spacing / 2) $spacing ($spacing / 2); font-family: $font-body; font-size: 120%; letter-spacing: 1px; text-transform: uppercase; background-color: transparent; color: white; border: 2px solid white; border-radius: $radius; cursor: pointer; span.fa.left { margin-right: $spacing / 2; } span.fa.right { margin-left: $spacing / 2; } &.gray { color: $light-gray; border-color: $light-gray; } }
cf9e3e407abaa93b5ea12eaf0100abee2e2eb537
map.asd
map.asd
(defsystem :map :serial t :description "Octave-like functionality for lisp!" :version "0.1.0" :author "Matt Novenstern <[email protected]>" :components ((:file "package") (:file "conditions") (:module utilities :components ((:file "printing") (:file "missing-functions"))) (:module vectors :components ((:file "vector") (:file "range") (:file "vector-operations"))) (:module matrix :components ((:file "matrices") (:file "matrix-predicates") (:file "matrix-operations") (:file "matrix-slices") (:file "matrix-macros") )) (:module calculus :components ((:file "integration") (:file "rkf45"))) (:module plotting :components ((:file "plot"))) (:module images :components ((:file "image")))) :depends-on (#:cl-jpeg #:external-program))
(defsystem :map :serial t :description "Octave-like functionality for lisp!" :version "0.1.0" :author "Matt Novenstern <[email protected]>" :components ((:file "package") (:file "conditions") (:module utilities :components ((:file "printing") (:file "missing-functions"))) (:module vectors :components ((:file "vector") (:file "range") (:file "vector-operations"))) (:module matrix :components ((:file "matrices") (:file "matrix-macros") (:file "matrix-predicates") (:file "matrix-operations") (:file "matrix-slices") )) (:module calculus :components ((:file "integration") (:file "rkf45"))) (:module plotting :components ((:file "plot"))) (:module images :components ((:file "image")))) :depends-on (#:cl-jpeg #:external-program))
Load macros before the functions that use them
Load macros before the functions that use them
Common Lisp
agpl-3.0
fisxoj/map
common-lisp
## Code Before: (defsystem :map :serial t :description "Octave-like functionality for lisp!" :version "0.1.0" :author "Matt Novenstern <[email protected]>" :components ((:file "package") (:file "conditions") (:module utilities :components ((:file "printing") (:file "missing-functions"))) (:module vectors :components ((:file "vector") (:file "range") (:file "vector-operations"))) (:module matrix :components ((:file "matrices") (:file "matrix-predicates") (:file "matrix-operations") (:file "matrix-slices") (:file "matrix-macros") )) (:module calculus :components ((:file "integration") (:file "rkf45"))) (:module plotting :components ((:file "plot"))) (:module images :components ((:file "image")))) :depends-on (#:cl-jpeg #:external-program)) ## Instruction: Load macros before the functions that use them ## Code After: (defsystem :map :serial t :description "Octave-like functionality for lisp!" :version "0.1.0" :author "Matt Novenstern <[email protected]>" :components ((:file "package") (:file "conditions") (:module utilities :components ((:file "printing") (:file "missing-functions"))) (:module vectors :components ((:file "vector") (:file "range") (:file "vector-operations"))) (:module matrix :components ((:file "matrices") (:file "matrix-macros") (:file "matrix-predicates") (:file "matrix-operations") (:file "matrix-slices") )) (:module calculus :components ((:file "integration") (:file "rkf45"))) (:module plotting :components ((:file "plot"))) (:module images :components ((:file "image")))) :depends-on (#:cl-jpeg #:external-program))
104109d502e3276d559c99cd7e4c3ec0f5741fa1
lib/oriented/core/transaction.rb
lib/oriented/core/transaction.rb
module Oriented module Core # # Class to encapsulate the transaction for OrientDB # # Currently, if anything fails, it will rollback the connection # and then CLOSE it. This means any existing objects in your # context will be hosed and you'll have to go get them from the db # with a new connection. # # An Identity map would help this, methinks. class Transaction def self.run connection = Oriented.connection, &block begin ensure_connection(connection) ret = yield connection.commit ret rescue => ex connection.rollback connection.close raise ensure end end private def self.ensure_connection(conn) unless conn.transaction_active? conn.connect end end end module TransactionWrapper def wrap_in_transaction(*methods) methods.each do |method| tx_method = "#{method}_no_tx" send(:alias_method, tx_method, method) send(:define_method, method) do |*args| Oriented::Core::Transaction.run { send(tx_method, *args) } end end end end end end
module Oriented module Core # # Class to encapsulate the transaction for OrientDB # # Currently, if anything fails, it will rollback the connection # and then CLOSE it. This means any existing objects in your # context will be hosed and you'll have to go get them from the db # with a new connection. # # An Identity map would help this, methinks. class Transaction def self.run connection = Oriented.connection, &block ensure_connection(connection) ret = yield connection.commit ret rescue => ex connection.rollback connection.close raise ensure end private def self.ensure_connection(conn) unless conn.transaction_active? conn.connect end end end module TransactionWrapper def wrap_in_transaction(*methods) methods.each do |method| tx_method = "#{method}_no_tx" send(:alias_method, tx_method, method) send(:define_method, method) do |*args| Oriented::Core::Transaction.run { send(tx_method, *args) } end end end end end end
Remove unnecessary begin/end from Transaction.run
Remove unnecessary begin/end from Transaction.run
Ruby
mit
ruprict/oriented
ruby
## Code Before: module Oriented module Core # # Class to encapsulate the transaction for OrientDB # # Currently, if anything fails, it will rollback the connection # and then CLOSE it. This means any existing objects in your # context will be hosed and you'll have to go get them from the db # with a new connection. # # An Identity map would help this, methinks. class Transaction def self.run connection = Oriented.connection, &block begin ensure_connection(connection) ret = yield connection.commit ret rescue => ex connection.rollback connection.close raise ensure end end private def self.ensure_connection(conn) unless conn.transaction_active? conn.connect end end end module TransactionWrapper def wrap_in_transaction(*methods) methods.each do |method| tx_method = "#{method}_no_tx" send(:alias_method, tx_method, method) send(:define_method, method) do |*args| Oriented::Core::Transaction.run { send(tx_method, *args) } end end end end end end ## Instruction: Remove unnecessary begin/end from Transaction.run ## Code After: module Oriented module Core # # Class to encapsulate the transaction for OrientDB # # Currently, if anything fails, it will rollback the connection # and then CLOSE it. This means any existing objects in your # context will be hosed and you'll have to go get them from the db # with a new connection. # # An Identity map would help this, methinks. class Transaction def self.run connection = Oriented.connection, &block ensure_connection(connection) ret = yield connection.commit ret rescue => ex connection.rollback connection.close raise ensure end private def self.ensure_connection(conn) unless conn.transaction_active? conn.connect end end end module TransactionWrapper def wrap_in_transaction(*methods) methods.each do |method| tx_method = "#{method}_no_tx" send(:alias_method, tx_method, method) send(:define_method, method) do |*args| Oriented::Core::Transaction.run { send(tx_method, *args) } end end end end end end
119415a7a4e6db2848b6e582a192ef309cff660f
README.md
README.md
This is a weather station based on node.js that creates a webpage and graphs from the sensor data. By default, it records data once per minute, and supports CSV exports to excel date/time format. ### Requirements: - Raspberry Pi - node.js - npm Node package requirements are listed in package.json. ### Supports: - TSL2591 Lux sensors - BME280 Temperature/Humidity/Pressure sensors ### Future intentions - Better output options - Better mobile and widescreen support - Improved database
This is a weather station based on node.js that creates a webpage and graphs from the sensor data. Currently, data is recorded once per minute to display in the live data section, and an average is taken every 5 minutes for display on graphs (powered by chart.js) Raw data can be output as CSV (for excel), HTML (for viewing) and JSON (for advanced applications) ### PHP version Some parts of the code (the web app) are avialable ported to PHP, in our case for use on Google cloud. Instructions and code can be found at https://github.com/comp500/sensor-website ### Requirements: - Raspberry Pi - node.js - npm Node package requirements are listed in package.json. ### Supports: - TSL2591 Lux sensors - BME280 Temperature/Humidity/Pressure sensors ### Future intentions - Improved database
Update readme with references to PHP and outputs
Update readme with references to PHP and outputs
Markdown
mit
comp500/sensor-reporter,comp500/sensor-reporter
markdown
## Code Before: This is a weather station based on node.js that creates a webpage and graphs from the sensor data. By default, it records data once per minute, and supports CSV exports to excel date/time format. ### Requirements: - Raspberry Pi - node.js - npm Node package requirements are listed in package.json. ### Supports: - TSL2591 Lux sensors - BME280 Temperature/Humidity/Pressure sensors ### Future intentions - Better output options - Better mobile and widescreen support - Improved database ## Instruction: Update readme with references to PHP and outputs ## Code After: This is a weather station based on node.js that creates a webpage and graphs from the sensor data. Currently, data is recorded once per minute to display in the live data section, and an average is taken every 5 minutes for display on graphs (powered by chart.js) Raw data can be output as CSV (for excel), HTML (for viewing) and JSON (for advanced applications) ### PHP version Some parts of the code (the web app) are avialable ported to PHP, in our case for use on Google cloud. Instructions and code can be found at https://github.com/comp500/sensor-website ### Requirements: - Raspberry Pi - node.js - npm Node package requirements are listed in package.json. ### Supports: - TSL2591 Lux sensors - BME280 Temperature/Humidity/Pressure sensors ### Future intentions - Improved database
2e6cfe56806a7fb07396293568c6c4449dccf188
gifer.js
gifer.js
'use strict'; function init() { // Disable drag + drop event for document. document.addEventListener('dragover', function(event) { event.preventDefault(); return false; }, false); document.addEventListener('drop', function(event) { event.preventDefault(); return false; }, false); // Drag and Drop holder. const holder = document.getElementById('holder'); // Placehold text in holder. const dragText = document.getElementById('drag-text'); holder.ondragover = function() { return false; }; holder.ondragleave = holder.ondragend = function() { return false; }; holder.ondrop = function(e) { e.preventDefault(); const file = e.dataTransfer.files[0]; console.log('File you dragged here is', file.path); // Remove exitng video. const existingVideo = holder.getElementsByTagName('video')[0]; if (existingVideo) { existingVideo.remove(); }; dragText.className += ' hidden'; const video = document.createElement("video"); video.setAttribute('controls', ''); video.setAttribute("width", '100%'); video.setAttribute('height', '100%'); const source = document.createElement("source"); source.setAttribute('src', file.path); video.appendChild(source); holder.appendChild(video); return false; }; }; window.onload = init;
'use strict'; const videoClip = require('./movie.js'); function bindUI() { // Bind options to videoClip file. } function reset() { // Reset options to values of video file. } function init() { // Disable drag + drop event for document. document.addEventListener('dragover', function(event) { event.preventDefault(); return false; }, false); document.addEventListener('drop', function(event) { event.preventDefault(); return false; }, false); // Drag and Drop holder. const holder = document.getElementById('holder'); // Placehold text in holder. const dragText = document.getElementById('drag-text'); holder.ondragover = function() { return false; }; holder.ondragleave = holder.ondragend = function() { return false; }; holder.ondrop = function(e) { e.preventDefault(); const file = e.dataTransfer.files[0]; console.log('File you dragged here is', file.path); videoClip.video = file.path; // Remove exitng video. const existingVideo = holder.getElementsByTagName('video')[0]; if (existingVideo) { existingVideo.remove(); }; dragText.className += ' hidden'; const video = document.createElement("video"); video.setAttribute('controls', ''); video.setAttribute("width", '100%'); video.setAttribute('height', '100%'); const source = document.createElement("source"); source.setAttribute('src', file.path); video.appendChild(source); holder.appendChild(video); return true; }; } window.onload = init;
Add functions to be filled later
Add functions to be filled later
JavaScript
mit
mikkkee/gifer-electron,mikkkee/gifer-electron,mikkkee/gifer-electron
javascript
## Code Before: 'use strict'; function init() { // Disable drag + drop event for document. document.addEventListener('dragover', function(event) { event.preventDefault(); return false; }, false); document.addEventListener('drop', function(event) { event.preventDefault(); return false; }, false); // Drag and Drop holder. const holder = document.getElementById('holder'); // Placehold text in holder. const dragText = document.getElementById('drag-text'); holder.ondragover = function() { return false; }; holder.ondragleave = holder.ondragend = function() { return false; }; holder.ondrop = function(e) { e.preventDefault(); const file = e.dataTransfer.files[0]; console.log('File you dragged here is', file.path); // Remove exitng video. const existingVideo = holder.getElementsByTagName('video')[0]; if (existingVideo) { existingVideo.remove(); }; dragText.className += ' hidden'; const video = document.createElement("video"); video.setAttribute('controls', ''); video.setAttribute("width", '100%'); video.setAttribute('height', '100%'); const source = document.createElement("source"); source.setAttribute('src', file.path); video.appendChild(source); holder.appendChild(video); return false; }; }; window.onload = init; ## Instruction: Add functions to be filled later ## Code After: 'use strict'; const videoClip = require('./movie.js'); function bindUI() { // Bind options to videoClip file. } function reset() { // Reset options to values of video file. } function init() { // Disable drag + drop event for document. document.addEventListener('dragover', function(event) { event.preventDefault(); return false; }, false); document.addEventListener('drop', function(event) { event.preventDefault(); return false; }, false); // Drag and Drop holder. const holder = document.getElementById('holder'); // Placehold text in holder. const dragText = document.getElementById('drag-text'); holder.ondragover = function() { return false; }; holder.ondragleave = holder.ondragend = function() { return false; }; holder.ondrop = function(e) { e.preventDefault(); const file = e.dataTransfer.files[0]; console.log('File you dragged here is', file.path); videoClip.video = file.path; // Remove exitng video. const existingVideo = holder.getElementsByTagName('video')[0]; if (existingVideo) { existingVideo.remove(); }; dragText.className += ' hidden'; const video = document.createElement("video"); video.setAttribute('controls', ''); video.setAttribute("width", '100%'); video.setAttribute('height', '100%'); const source = document.createElement("source"); source.setAttribute('src', file.path); video.appendChild(source); holder.appendChild(video); return true; }; } window.onload = init;
c3d094074a6c4224efb39489110fe99b491d1108
utils/swift_build_support/swift_build_support/compiler_stage.py
utils/swift_build_support/swift_build_support/compiler_stage.py
class StageArgs(object): def __init__(self, stage, args): self.stage = stage self.args = args def __getattr__(self, key): real_key = '{}{}'.format(key, self.stage.postfix) if not hasattr(self.args, real_key): return None return getattr(self.args, real_key) class Stage(object): def __init__(self, identifier, postfix=""): self.identifier = identifier self.postfix = postfix STAGE_1 = Stage(1, "") STAGE_2 = Stage(2, "_stage2")
class StageArgs(object): def __init__(self, stage, args): self.stage = stage self.args = args assert(not isinstance(self.args, StageArgs)) def __getattr__(self, key): real_key = '{}{}'.format(key, self.stage.postfix) if not hasattr(self.args, real_key): return None return getattr(self.args, real_key) class Stage(object): def __init__(self, identifier, postfix=""): self.identifier = identifier self.postfix = postfix STAGE_1 = Stage(1, "") STAGE_2 = Stage(2, "_stage2")
Make sure that StageArgs are never passed a StageArgs as their args.
[build-script] Make sure that StageArgs are never passed a StageArgs as their args. No good reason to do this and simplifies the state space.
Python
apache-2.0
hooman/swift,ahoppen/swift,gregomni/swift,benlangmuir/swift,gregomni/swift,glessard/swift,ahoppen/swift,glessard/swift,hooman/swift,roambotics/swift,apple/swift,rudkx/swift,hooman/swift,ahoppen/swift,xwu/swift,xwu/swift,xwu/swift,JGiola/swift,rudkx/swift,apple/swift,hooman/swift,roambotics/swift,atrick/swift,benlangmuir/swift,hooman/swift,atrick/swift,benlangmuir/swift,rudkx/swift,JGiola/swift,gregomni/swift,gregomni/swift,apple/swift,atrick/swift,roambotics/swift,rudkx/swift,apple/swift,apple/swift,ahoppen/swift,xwu/swift,JGiola/swift,hooman/swift,atrick/swift,roambotics/swift,glessard/swift,benlangmuir/swift,xwu/swift,atrick/swift,JGiola/swift,xwu/swift,JGiola/swift,hooman/swift,gregomni/swift,ahoppen/swift,rudkx/swift,atrick/swift,roambotics/swift,benlangmuir/swift,roambotics/swift,JGiola/swift,apple/swift,rudkx/swift,glessard/swift,glessard/swift,glessard/swift,gregomni/swift,benlangmuir/swift,ahoppen/swift,xwu/swift
python
## Code Before: class StageArgs(object): def __init__(self, stage, args): self.stage = stage self.args = args def __getattr__(self, key): real_key = '{}{}'.format(key, self.stage.postfix) if not hasattr(self.args, real_key): return None return getattr(self.args, real_key) class Stage(object): def __init__(self, identifier, postfix=""): self.identifier = identifier self.postfix = postfix STAGE_1 = Stage(1, "") STAGE_2 = Stage(2, "_stage2") ## Instruction: [build-script] Make sure that StageArgs are never passed a StageArgs as their args. No good reason to do this and simplifies the state space. ## Code After: class StageArgs(object): def __init__(self, stage, args): self.stage = stage self.args = args assert(not isinstance(self.args, StageArgs)) def __getattr__(self, key): real_key = '{}{}'.format(key, self.stage.postfix) if not hasattr(self.args, real_key): return None return getattr(self.args, real_key) class Stage(object): def __init__(self, identifier, postfix=""): self.identifier = identifier self.postfix = postfix STAGE_1 = Stage(1, "") STAGE_2 = Stage(2, "_stage2")
4ae3b77847eeefd07d83f863c6ec71d7fdf750cb
turbustat/tests/test_rfft_to_fft.py
turbustat/tests/test_rfft_to_fft.py
from turbustat.statistics.rfft_to_fft import rfft_to_fft from ._testing_data import dataset1 import numpy as np import numpy.testing as npt from unittest import TestCase class testRFFT(TestCase): """docstring for testRFFT""" def __init__(self): self.dataset1 = dataset1 self.comp_rfft = rfft_to_fft(self.dataset1) def rfft_to_rfft(self): test_rfft = np.abs(np.fft.rfftn(self.dataset1)) shape2 = test_rfft.shape[-1] npt.assert_allclose(test_rfft, self.comp_rfft[:, :, :shape2+1]) def fft_to_rfft(self): test_fft = np.abs(np.fft.fftn(self.dataset1)) npt.assert_allclose(test_fft, self.comp_rfft)
import pytest from ..statistics.rfft_to_fft import rfft_to_fft from ._testing_data import dataset1 import numpy as np import numpy.testing as npt def test_rfft_to_rfft(): comp_rfft = rfft_to_fft(dataset1['moment0'][0]) test_rfft = np.abs(np.fft.rfftn(dataset1['moment0'][0])) shape2 = test_rfft.shape[-1] npt.assert_allclose(test_rfft, comp_rfft[:, :shape2]) def test_fft_to_rfft(): comp_rfft = rfft_to_fft(dataset1['moment0'][0]) test_fft = np.abs(np.fft.fftn(dataset1['moment0'][0])) npt.assert_allclose(test_fft, comp_rfft)
Fix and update the rfft tests
Fix and update the rfft tests
Python
mit
e-koch/TurbuStat,Astroua/TurbuStat
python
## Code Before: from turbustat.statistics.rfft_to_fft import rfft_to_fft from ._testing_data import dataset1 import numpy as np import numpy.testing as npt from unittest import TestCase class testRFFT(TestCase): """docstring for testRFFT""" def __init__(self): self.dataset1 = dataset1 self.comp_rfft = rfft_to_fft(self.dataset1) def rfft_to_rfft(self): test_rfft = np.abs(np.fft.rfftn(self.dataset1)) shape2 = test_rfft.shape[-1] npt.assert_allclose(test_rfft, self.comp_rfft[:, :, :shape2+1]) def fft_to_rfft(self): test_fft = np.abs(np.fft.fftn(self.dataset1)) npt.assert_allclose(test_fft, self.comp_rfft) ## Instruction: Fix and update the rfft tests ## Code After: import pytest from ..statistics.rfft_to_fft import rfft_to_fft from ._testing_data import dataset1 import numpy as np import numpy.testing as npt def test_rfft_to_rfft(): comp_rfft = rfft_to_fft(dataset1['moment0'][0]) test_rfft = np.abs(np.fft.rfftn(dataset1['moment0'][0])) shape2 = test_rfft.shape[-1] npt.assert_allclose(test_rfft, comp_rfft[:, :shape2]) def test_fft_to_rfft(): comp_rfft = rfft_to_fft(dataset1['moment0'][0]) test_fft = np.abs(np.fft.fftn(dataset1['moment0'][0])) npt.assert_allclose(test_fft, comp_rfft)
77e10cc4e9f1dca34b4fc9970cd11406ff2e9fbc
src/Faker/Provider/Computer.php
src/Faker/Provider/Computer.php
<?php namespace Faker\Provider; class Computer extends \Faker\Provider\Base { private static $macAddressDigits = array( "0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "A", "B", "C", "D", "E", "F" ); /** * @example '32:F1:39:2F:D6:18' */ public static function macAddress() { $digits = self::$macAddressDigits; for ($i=0; $i<6; $i++) { shuffle($digits); $mac[] = $digits[0] . $digits[1]; } $mac = implode(':', $mac); return $mac; } /** * @example '10.1.1.17' */ public static function localIp() { $start = ['10','192']; $ip = $start[rand(0, 1)]; if ($ip === '192') { $ip .= '.168'; } else { $ip .= '.' . rand(0, 255); } $ip .= sprintf('.%s.%s', rand(0, 255), rand(0, 255)); return $ip; } }
<?php namespace Faker\Provider; class Computer extends \Faker\Provider\Base { private static $macAddressDigits = array( "0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "A", "B", "C", "D", "E", "F" ); /** * @example '32:F1:39:2F:D6:18' */ public static function macAddress() { for ($i=0; $i<6; $i++) { $mac[] = static::randomElement(self::$macAddressDigits) . static::randomElement(self::$macAddressDigits); } $mac = implode(':', $mac); return $mac; } /** * @example '10.1.1.17' */ public static function localIp() { $start = array('10','192'); $ip = $start[static::randomNumber(0, 1)]; if ($ip === '192') { $ip .= '.168'; } else { $ip .= '.' . static::randomNumber(0, 255); } $ip .= sprintf('.%s.%s', static::randomNumber(0, 255), static::randomNumber(0, 255)); return $ip; } }
Use randomNumber instead of rand for better randomness. Use randomElement for picking elements from the allowed digits to make it possible for both digits of a MAC address block to be the same. Fixed for test failing on PHP 5.3.
Use randomNumber instead of rand for better randomness. Use randomElement for picking elements from the allowed digits to make it possible for both digits of a MAC address block to be the same. Fixed for test failing on PHP 5.3.
PHP
mit
kkiernan/Faker,nikmauro/Faker,guillaumewf3/Faker,d3trax/Faker,brainrepo/Faker,syj610226/Faker,luisbrito/Faker,muya/Faker,pathirana/Faker,xfxf/Faker-PHP,jadb/Faker,mseshachalam/Faker,cenxun/Faker,nalekberov/Faker,cjaoude/Faker,antonsofyan/Faker,drakakisgeo/Faker,selmonal/Faker,vlakoff/Faker,stof/Faker,chrismoulton/Faker,datagit/Faker,duchaiweb/Faker-1,mousavian/Faker,ravage84/Faker,CodeYellowBV/Faker,shunsuke-takahashi/Faker,xfxf/Faker,zeropool/Faker,oshancsedu/Faker,bmitch/Faker,davidyell/Faker,oswaldderiemaecker/Faker,jeffaustin81/Faker,igorsantos07/Faker,BePsvPT/Faker,kevinrodbe/Faker,Beanhunter/Faker,huang53798584/Faker,kuldipem/Faker,matriphe/Faker,splp/Faker,simonfork/Faker,localheinz/Faker,dongnhut/Faker,Balamir/Faker,ivyhjk/Faker,lasselehtinen/Faker,alexlondon07/Faker
php
## Code Before: <?php namespace Faker\Provider; class Computer extends \Faker\Provider\Base { private static $macAddressDigits = array( "0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "A", "B", "C", "D", "E", "F" ); /** * @example '32:F1:39:2F:D6:18' */ public static function macAddress() { $digits = self::$macAddressDigits; for ($i=0; $i<6; $i++) { shuffle($digits); $mac[] = $digits[0] . $digits[1]; } $mac = implode(':', $mac); return $mac; } /** * @example '10.1.1.17' */ public static function localIp() { $start = ['10','192']; $ip = $start[rand(0, 1)]; if ($ip === '192') { $ip .= '.168'; } else { $ip .= '.' . rand(0, 255); } $ip .= sprintf('.%s.%s', rand(0, 255), rand(0, 255)); return $ip; } } ## Instruction: Use randomNumber instead of rand for better randomness. Use randomElement for picking elements from the allowed digits to make it possible for both digits of a MAC address block to be the same. Fixed for test failing on PHP 5.3. ## Code After: <?php namespace Faker\Provider; class Computer extends \Faker\Provider\Base { private static $macAddressDigits = array( "0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "A", "B", "C", "D", "E", "F" ); /** * @example '32:F1:39:2F:D6:18' */ public static function macAddress() { for ($i=0; $i<6; $i++) { $mac[] = static::randomElement(self::$macAddressDigits) . static::randomElement(self::$macAddressDigits); } $mac = implode(':', $mac); return $mac; } /** * @example '10.1.1.17' */ public static function localIp() { $start = array('10','192'); $ip = $start[static::randomNumber(0, 1)]; if ($ip === '192') { $ip .= '.168'; } else { $ip .= '.' . static::randomNumber(0, 255); } $ip .= sprintf('.%s.%s', static::randomNumber(0, 255), static::randomNumber(0, 255)); return $ip; } }
8da03723bf34150301f5256547fa1b9420135578
addon/templates/components/md-checkbox.hbs
addon/templates/components/md-checkbox.hbs
{{input type="checkbox" class="materialize-selectable-item-input" checked=isSelected disabled=disabled}} <label class="materialize-selectable-item-label">{{name}}{{yield}}</label> disabled: {{disabled}}
{{input type="checkbox" class="materialize-selectable-item-input" checked=isSelected disabled=disabled}} <label class="materialize-selectable-item-label">{{name}}{{yield}}</label>
Fix leftover "disabled" debug legend on checkbox
Fix leftover "disabled" debug legend on checkbox
Handlebars
mit
mgood/ember-cli-materialize,mdehoog/ember-cli-materialize,seanpdoyle/ember-cli-materialize,hankthetank/ember-cli-materialize,rynam0/ember-cli-materialize,dortort/ember-cli-materialize,sgasser/ember-cli-materialize,tmock12/ember-cli-materialize,mike-north/ember-cli-materialize,bashmach/ember-cli-materialize,sgasser/ember-cli-materialize,unmanbearpig/ember-cli-materialize,alexdiliberto/ember-cli-materialize,MattMSumner/ember-cli-materialize,mgood/ember-cli-materialize,floqqi/ember-cli-materialize,mdehoog/ember-cli-materialize,foxnewsnetwork/ember-cli-materialize,rynam0/ember-cli-materialize,dortort/ember-cli-materialize,unmanbearpig/ember-cli-materialize,tmock12/ember-cli-materialize,seanpdoyle/ember-cli-materialize,truenorth/ember-cli-materialize,mike-north/ember-cli-materialize,mike-north/ember-cli-materialize,alexdiliberto/ember-cli-materialize,alexdiliberto/ember-cli-materialize,MattMSumner/ember-cli-materialize,tmock12/ember-cli-materialize,floqqi/ember-cli-materialize,floqqi/ember-cli-materialize,greyhwndz/ember-cli-materialize,bashmach/ember-cli-materialize,truenorth/ember-cli-materialize,greyhwndz/ember-cli-materialize,foxnewsnetwork/ember-cli-materialize,foxnewsnetwork/ember-cli-materialize,bashmach/ember-cli-materialize,sgasser/ember-cli-materialize,MattMSumner/ember-cli-materialize,hankthetank/ember-cli-materialize,rynam0/ember-cli-materialize,mgood/ember-cli-materialize,seanpdoyle/ember-cli-materialize,truenorth/ember-cli-materialize,hankthetank/ember-cli-materialize,dortort/ember-cli-materialize,mdehoog/ember-cli-materialize,unmanbearpig/ember-cli-materialize,greyhwndz/ember-cli-materialize
handlebars
## Code Before: {{input type="checkbox" class="materialize-selectable-item-input" checked=isSelected disabled=disabled}} <label class="materialize-selectable-item-label">{{name}}{{yield}}</label> disabled: {{disabled}} ## Instruction: Fix leftover "disabled" debug legend on checkbox ## Code After: {{input type="checkbox" class="materialize-selectable-item-input" checked=isSelected disabled=disabled}} <label class="materialize-selectable-item-label">{{name}}{{yield}}</label>
5f24202607afeaa92e1e2145568607f89939ae4a
roles/initrd/provision_initrd/tasks/main.yml
roles/initrd/provision_initrd/tasks/main.yml
--- - name: Update apt cache apt: update_cache=yes - name: Install initrd kernel dependencies apt: name=linux-image-{{ initrd_kernel_version }} - name: Install required packages apt: name="{{ item.package }}={{ item.version }}" with_items: initrd_package_manifest - name: Add hook scripts to copy package binaries into the initramfs template: src="{{ playbook_dir }}/templates/initramfs_hook.j2" dest=/etc/initramfs-tools/hooks/{{ item }} mode=0755 with_items: initrd_include_binaries - name: Add 1_network script to local-top copy: src=1_network dest=/etc/initramfs-tools/scripts/local-top/ mode=0755 - name: Replace local script copy: src=local dest=/etc/initramfs-tools/scripts/local mode=0755 - name: Enable extra modules shell: "echo {{ item }} >> /etc/initramfs-tools/modules" with_items: - squashfs - overlayfs
--- - name: Update apt cache apt: update_cache=yes - name: Install initrd kernel dependencies apt: name=linux-image-{{ initrd_kernel_version }} - name: Install linux image extras for physical machine support apt: name=linux-image-extra-{{ initrd_kernel_version }} - name: Install required packages apt: name="{{ item.package }}={{ item.version }}" with_items: initrd_package_manifest - name: Add hook scripts to copy package binaries into the initramfs template: src="{{ playbook_dir }}/templates/initramfs_hook.j2" dest=/etc/initramfs-tools/hooks/{{ item }} mode=0755 with_items: initrd_include_binaries - name: Add 1_network script to local-top copy: src=1_network dest=/etc/initramfs-tools/scripts/local-top/ mode=0755 - name: Replace local script copy: src=local dest=/etc/initramfs-tools/scripts/local mode=0755 - name: Enable extra modules shell: "echo {{ item }} >> /etc/initramfs-tools/modules" with_items: - squashfs - overlayfs
Add linux-image-extras package to initrd build environment
Add linux-image-extras package to initrd build environment
YAML
apache-2.0
DavidjohnBlodgett/on-imagebuilder,DavidjohnBlodgett/on-imagebuilder
yaml
## Code Before: --- - name: Update apt cache apt: update_cache=yes - name: Install initrd kernel dependencies apt: name=linux-image-{{ initrd_kernel_version }} - name: Install required packages apt: name="{{ item.package }}={{ item.version }}" with_items: initrd_package_manifest - name: Add hook scripts to copy package binaries into the initramfs template: src="{{ playbook_dir }}/templates/initramfs_hook.j2" dest=/etc/initramfs-tools/hooks/{{ item }} mode=0755 with_items: initrd_include_binaries - name: Add 1_network script to local-top copy: src=1_network dest=/etc/initramfs-tools/scripts/local-top/ mode=0755 - name: Replace local script copy: src=local dest=/etc/initramfs-tools/scripts/local mode=0755 - name: Enable extra modules shell: "echo {{ item }} >> /etc/initramfs-tools/modules" with_items: - squashfs - overlayfs ## Instruction: Add linux-image-extras package to initrd build environment ## Code After: --- - name: Update apt cache apt: update_cache=yes - name: Install initrd kernel dependencies apt: name=linux-image-{{ initrd_kernel_version }} - name: Install linux image extras for physical machine support apt: name=linux-image-extra-{{ initrd_kernel_version }} - name: Install required packages apt: name="{{ item.package }}={{ item.version }}" with_items: initrd_package_manifest - name: Add hook scripts to copy package binaries into the initramfs template: src="{{ playbook_dir }}/templates/initramfs_hook.j2" dest=/etc/initramfs-tools/hooks/{{ item }} mode=0755 with_items: initrd_include_binaries - name: Add 1_network script to local-top copy: src=1_network dest=/etc/initramfs-tools/scripts/local-top/ mode=0755 - name: Replace local script copy: src=local dest=/etc/initramfs-tools/scripts/local mode=0755 - name: Enable extra modules shell: "echo {{ item }} >> /etc/initramfs-tools/modules" with_items: - squashfs - overlayfs
5882c3475afa44e2ad0923b8af00fe1d73ea3a7e
lib/mongoid-ancestry.rb
lib/mongoid-ancestry.rb
module Mongoid module Ancestry extend ActiveSupport::Concern autoload :ClassMethods, 'mongoid-ancestry/class_methods' autoload :InstanceMethods, 'mongoid-ancestry/instance_methods' autoload :Error, 'mongoid-ancestry/exceptions' included do cattr_accessor :base_class self.base_class = self end end end
module Mongoid module Ancestry extend ActiveSupport::Concern autoload :ClassMethods, 'mongoid-ancestry/class_methods' autoload :Error, 'mongoid-ancestry/exceptions' included do cattr_accessor :base_class self.base_class = self require 'mongoid-ancestry/instance_methods' include InstanceMethods end end end
Include instance methods manually to avoid ActiveSupport::Concern InstanceMethods deprecation notice
Include instance methods manually to avoid ActiveSupport::Concern InstanceMethods deprecation notice
Ruby
mit
WebEnrich/mongoid-ancestry,skyeagle/mongoid-ancestry
ruby
## Code Before: module Mongoid module Ancestry extend ActiveSupport::Concern autoload :ClassMethods, 'mongoid-ancestry/class_methods' autoload :InstanceMethods, 'mongoid-ancestry/instance_methods' autoload :Error, 'mongoid-ancestry/exceptions' included do cattr_accessor :base_class self.base_class = self end end end ## Instruction: Include instance methods manually to avoid ActiveSupport::Concern InstanceMethods deprecation notice ## Code After: module Mongoid module Ancestry extend ActiveSupport::Concern autoload :ClassMethods, 'mongoid-ancestry/class_methods' autoload :Error, 'mongoid-ancestry/exceptions' included do cattr_accessor :base_class self.base_class = self require 'mongoid-ancestry/instance_methods' include InstanceMethods end end end
4835e7991779ace70fa57fbfaa85813843ea76ba
README.md
README.md
Temperature logger for RPi. ## Usage 1. Install requirements: ``` pip install -r requirements.pip ``` 2. Add this to /boot/config.txt: ``` # Enable General Purpose I/O dtoverlay=w1-gpio ``` 3. And something like this to crontab (crontab -e): ``` # Log palju temperatures once a minute */1 * * * * /usr/bin/python /home/pi/paljumittari/palju.py ```
Temperature logger for RPi. ## Usage 1. Install requirements: ``` pip install -r requirements.pip ``` 2. Add this to /boot/config.txt: ``` # Enable General Purpose I/O dtoverlay=w1-gpio ``` 3. And something like this to crontab (crontab -e): ``` # Log palju temperatures once a minute */1 * * * * /usr/bin/python /home/pi/paljumittari/palju.py ``` ## Optional and notes-to-self 1. Disable starting desktop after boot. Desktop can be launched again with 'startx'. ``` sudo raspi-config ``` Boot Options > Desktop / CLI > Console Autologin 2. Connect to wifi automatically ``` sudo nano /etc/wpa_supplicant/wpa_supplicant.conf ``` ``` network={ ssid="ssid" psk="password" } ```
Add instructions for wifi connection and disabling desktop on boot.
Add instructions for wifi connection and disabling desktop on boot.
Markdown
mit
jarrrgh/paljumittari,jarrrgh/paljumittari
markdown
## Code Before: Temperature logger for RPi. ## Usage 1. Install requirements: ``` pip install -r requirements.pip ``` 2. Add this to /boot/config.txt: ``` # Enable General Purpose I/O dtoverlay=w1-gpio ``` 3. And something like this to crontab (crontab -e): ``` # Log palju temperatures once a minute */1 * * * * /usr/bin/python /home/pi/paljumittari/palju.py ``` ## Instruction: Add instructions for wifi connection and disabling desktop on boot. ## Code After: Temperature logger for RPi. ## Usage 1. Install requirements: ``` pip install -r requirements.pip ``` 2. Add this to /boot/config.txt: ``` # Enable General Purpose I/O dtoverlay=w1-gpio ``` 3. And something like this to crontab (crontab -e): ``` # Log palju temperatures once a minute */1 * * * * /usr/bin/python /home/pi/paljumittari/palju.py ``` ## Optional and notes-to-self 1. Disable starting desktop after boot. Desktop can be launched again with 'startx'. ``` sudo raspi-config ``` Boot Options > Desktop / CLI > Console Autologin 2. Connect to wifi automatically ``` sudo nano /etc/wpa_supplicant/wpa_supplicant.conf ``` ``` network={ ssid="ssid" psk="password" } ```
04ca8d97bf792ee48b263984cc09e915daa4aace
jsk_apc2015_common/README.rst
jsk_apc2015_common/README.rst
=================== jsk_2015_apc_common =================== Train classifier with dataset ============================= Get dataset ----------- To get dataset at `here <http://rll.berkeley.edu/amazon_picking_challenge/>`_, and run:: python scripts/download_dataset.py -O berkeley_dataset Process dataset --------------- Firstly, you need to create mask applied image dataset:: python scripts/create_mask_applied_dataset.py berkeley_dataset -O berkeley_dataset_mask_applied
=================== jsk_2015_apc_common =================== Train classifier with dataset ============================= Get dataset ----------- To get dataset at `here <http://rll.berkeley.edu/amazon_picking_challenge/>`_, and run:: python scripts/download_dataset.py -O berkeley_dataset Process dataset --------------- Firstly, you need to create mask applied image dataset:: python scripts/create_mask_applied_dataset.py berkeley_dataset -O berkeley_dataset_mask_applied Gazebo models ============= The mesh files under ``models/`` are originally created by Arjun Singh, Karthik Narayan, Ben Kehoe, Sachin Patil, Ken Goldberg, Pieter Abbeel in Robot Learning Lab, UC Berkeley. Their website is `here <http://rll.berkeley.edu/amazon_picking_challenge>`_.
Add credit for gazebo models
[jsk_2015_apc_common] Add credit for gazebo models
reStructuredText
bsd-3-clause
pazeshun/jsk_apc,pazeshun/jsk_apc,pazeshun/jsk_apc,pazeshun/jsk_apc,pazeshun/jsk_apc
restructuredtext
## Code Before: =================== jsk_2015_apc_common =================== Train classifier with dataset ============================= Get dataset ----------- To get dataset at `here <http://rll.berkeley.edu/amazon_picking_challenge/>`_, and run:: python scripts/download_dataset.py -O berkeley_dataset Process dataset --------------- Firstly, you need to create mask applied image dataset:: python scripts/create_mask_applied_dataset.py berkeley_dataset -O berkeley_dataset_mask_applied ## Instruction: [jsk_2015_apc_common] Add credit for gazebo models ## Code After: =================== jsk_2015_apc_common =================== Train classifier with dataset ============================= Get dataset ----------- To get dataset at `here <http://rll.berkeley.edu/amazon_picking_challenge/>`_, and run:: python scripts/download_dataset.py -O berkeley_dataset Process dataset --------------- Firstly, you need to create mask applied image dataset:: python scripts/create_mask_applied_dataset.py berkeley_dataset -O berkeley_dataset_mask_applied Gazebo models ============= The mesh files under ``models/`` are originally created by Arjun Singh, Karthik Narayan, Ben Kehoe, Sachin Patil, Ken Goldberg, Pieter Abbeel in Robot Learning Lab, UC Berkeley. Their website is `here <http://rll.berkeley.edu/amazon_picking_challenge>`_.
72d33ea47458cace13dac920ce2a82e55f83caba
statsmodels/stats/tests/test_outliers_influence.py
statsmodels/stats/tests/test_outliers_influence.py
from numpy.testing import assert_almost_equal from statsmodels.datasets import statecrime, get_rdataset from statsmodels.regression.linear_model import OLS from statsmodels.stats.outliers_influence import reset_ramsey from statsmodels.stats.outliers_influence import variance_inflation_factor from statsmodels.tools import add_constant import numpy as np data = statecrime.load_pandas().data def test_reset_stata(): mod = OLS(data.violent, add_constant(data[['murder', 'hs_grad']])) res = mod.fit() stat = reset_ramsey(res, degree=4) assert_almost_equal(stat.fvalue[0, 0], 1.52, decimal=2) assert_almost_equal(stat.pvalue, 0.2221, decimal=4) exog_idx = list(data.columns).index('urban') X_arr = np.asarray(data) vif = variance_inflation_factor(X_arr, exog_idx) assert_almost_equal(vif, 16.4394, decimal=4)
from numpy.testing import assert_almost_equal from statsmodels.datasets import statecrime from statsmodels.regression.linear_model import OLS from statsmodels.stats.outliers_influence import reset_ramsey from statsmodels.stats.outliers_influence import variance_inflation_factor from statsmodels.tools import add_constant import numpy as np data = statecrime.load_pandas().data def test_reset_stata(): mod = OLS(data.violent, add_constant(data[['murder', 'hs_grad']])) res = mod.fit() stat = reset_ramsey(res, degree=4) assert_almost_equal(stat.fvalue[0, 0], 1.52, decimal=2) assert_almost_equal(stat.pvalue, 0.2221, decimal=4) exog_idx = list(data.columns).index('urban') X_arr = np.asarray(data) vif = variance_inflation_factor(X_arr, exog_idx) assert_almost_equal(vif, 16.4394, decimal=4)
Add pandas dataframe capability in variance_inflation_factor
ENH: Add pandas dataframe capability in variance_inflation_factor
Python
bsd-3-clause
bashtage/statsmodels,josef-pkt/statsmodels,josef-pkt/statsmodels,bashtage/statsmodels,bashtage/statsmodels,statsmodels/statsmodels,josef-pkt/statsmodels,josef-pkt/statsmodels,statsmodels/statsmodels,statsmodels/statsmodels,statsmodels/statsmodels,josef-pkt/statsmodels,josef-pkt/statsmodels,statsmodels/statsmodels,bashtage/statsmodels,bashtage/statsmodels,bashtage/statsmodels,statsmodels/statsmodels
python
## Code Before: from numpy.testing import assert_almost_equal from statsmodels.datasets import statecrime, get_rdataset from statsmodels.regression.linear_model import OLS from statsmodels.stats.outliers_influence import reset_ramsey from statsmodels.stats.outliers_influence import variance_inflation_factor from statsmodels.tools import add_constant import numpy as np data = statecrime.load_pandas().data def test_reset_stata(): mod = OLS(data.violent, add_constant(data[['murder', 'hs_grad']])) res = mod.fit() stat = reset_ramsey(res, degree=4) assert_almost_equal(stat.fvalue[0, 0], 1.52, decimal=2) assert_almost_equal(stat.pvalue, 0.2221, decimal=4) exog_idx = list(data.columns).index('urban') X_arr = np.asarray(data) vif = variance_inflation_factor(X_arr, exog_idx) assert_almost_equal(vif, 16.4394, decimal=4) ## Instruction: ENH: Add pandas dataframe capability in variance_inflation_factor ## Code After: from numpy.testing import assert_almost_equal from statsmodels.datasets import statecrime from statsmodels.regression.linear_model import OLS from statsmodels.stats.outliers_influence import reset_ramsey from statsmodels.stats.outliers_influence import variance_inflation_factor from statsmodels.tools import add_constant import numpy as np data = statecrime.load_pandas().data def test_reset_stata(): mod = OLS(data.violent, add_constant(data[['murder', 'hs_grad']])) res = mod.fit() stat = reset_ramsey(res, degree=4) assert_almost_equal(stat.fvalue[0, 0], 1.52, decimal=2) assert_almost_equal(stat.pvalue, 0.2221, decimal=4) exog_idx = list(data.columns).index('urban') X_arr = np.asarray(data) vif = variance_inflation_factor(X_arr, exog_idx) assert_almost_equal(vif, 16.4394, decimal=4)
1eba4c3d6cbff506ed61c17b93db45bbf196b8d8
platforms/ios/framework/TangramMap.h
platforms/ios/framework/TangramMap.h
// // TangramMap.h // TangramMap // // Created by Matt Smollinger on 7/8/16. // // #import <UIKit/UIKit.h> /// Project version number for TangramMap. FOUNDATION_EXPORT double TangramMapVersionNumber; /// Project version string for TangramMap. FOUNDATION_EXPORT const unsigned char TangramMapVersionString[]; // In this header, you should import all the public headers of your framework using statements like #import <TangramMap/PublicHeader.h> #import <TangramMap/TGMapViewController.h>
// // TangramMap.h // TangramMap // // Created by Matt Smollinger on 7/8/16. // Updated by Karim Naaji on 2/28/17. // Copyright (c) 2017 Mapzen. All rights reserved. // #import <UIKit/UIKit.h> /// Project version number for TangramMap. FOUNDATION_EXPORT double TangramMapVersionNumber; /// Project version string for TangramMap. FOUNDATION_EXPORT const unsigned char TangramMapVersionString[]; #import <TangramMap/TGMapViewController.h> #import <TangramMap/TGMapData.h> #import <TangramMap/TGGeoPoint.h> #import <TangramMap/TGGeoPolygon.h> #import <TangramMap/TGGeoPolyline.h> #import <TangramMap/TGEaseType.h> #import <TangramMap/TGHttpHandler.h> #import <TangramMap/TGMarker.h> #import <TangramMap/TGMapData.h> #import <TangramMap/TGSceneUpdate.h> #import <TangramMap/TGMarkerPickResult.h> #import <TangramMap/TGLabelPickResult.h>
Update umbrella header with public interface
Update umbrella header with public interface
C
mit
quitejonny/tangram-es,cleeus/tangram-es,quitejonny/tangram-es,quitejonny/tangram-es,cleeus/tangram-es,cleeus/tangram-es,cleeus/tangram-es,quitejonny/tangram-es,tangrams/tangram-es,quitejonny/tangram-es,cleeus/tangram-es,tangrams/tangram-es,tangrams/tangram-es,tangrams/tangram-es,tangrams/tangram-es,tangrams/tangram-es,quitejonny/tangram-es,tangrams/tangram-es,cleeus/tangram-es
c
## Code Before: // // TangramMap.h // TangramMap // // Created by Matt Smollinger on 7/8/16. // // #import <UIKit/UIKit.h> /// Project version number for TangramMap. FOUNDATION_EXPORT double TangramMapVersionNumber; /// Project version string for TangramMap. FOUNDATION_EXPORT const unsigned char TangramMapVersionString[]; // In this header, you should import all the public headers of your framework using statements like #import <TangramMap/PublicHeader.h> #import <TangramMap/TGMapViewController.h> ## Instruction: Update umbrella header with public interface ## Code After: // // TangramMap.h // TangramMap // // Created by Matt Smollinger on 7/8/16. // Updated by Karim Naaji on 2/28/17. // Copyright (c) 2017 Mapzen. All rights reserved. // #import <UIKit/UIKit.h> /// Project version number for TangramMap. FOUNDATION_EXPORT double TangramMapVersionNumber; /// Project version string for TangramMap. FOUNDATION_EXPORT const unsigned char TangramMapVersionString[]; #import <TangramMap/TGMapViewController.h> #import <TangramMap/TGMapData.h> #import <TangramMap/TGGeoPoint.h> #import <TangramMap/TGGeoPolygon.h> #import <TangramMap/TGGeoPolyline.h> #import <TangramMap/TGEaseType.h> #import <TangramMap/TGHttpHandler.h> #import <TangramMap/TGMarker.h> #import <TangramMap/TGMapData.h> #import <TangramMap/TGSceneUpdate.h> #import <TangramMap/TGMarkerPickResult.h> #import <TangramMap/TGLabelPickResult.h>
604101ec58d8dd6e6af4aa61c0b2f0d382f89931
doc/source/devref/linuxbridge_agent.rst
doc/source/devref/linuxbridge_agent.rst
L2 Networking with Linux Bridge -------------------------------
=============================== L2 Networking with Linux Bridge =============================== This Agent uses the `Linux Bridge <http://www.linuxfoundation.org/collaborate/workgroups/networking/bridge>`_ to provide L2 connectivity for VM instances running on the compute node to the public network. A graphical illustration of the deployment can be found in `OpenStack Admin Guide Linux Bridge <http://docs.openstack.org/admin-guide-cloud/content/under_the_hood_linuxbridge.html>`_ In most common deployments, there is a compute and a network node. On both the compute and the network node, the Linux Bridge Agent will manage virtual switches, connectivity among them, and interaction via virtual ports with other network components such as namespaces and underlying interfaces. Additionally, on the compute node, the Linux Bridge Agent will manage security groups. Three use cases and their packet flow are documented as follows: 1. `Legacy implementation with Linux Bridge <http://docs.openstack.org/networking-guide/deploy_scenario1b.html>`_ 2. `High Availability using L3HA with Linux Bridge <http://docs.openstack.org/networking-guide/deploy_scenario3b.html>`_ 3. `Provider networks with Linux Bridge <http://docs.openstack.org/networking-guide/deploy_scenario4b.html>`_
Add documentation for Linux Bridge (previously missing)
Add documentation for Linux Bridge (previously missing) Change-Id: I092b609f43b37ed85d08bc80d1d048b945abe222 Closes-Bug: #1455979
reStructuredText
apache-2.0
huntxu/neutron,noironetworks/neutron,SamYaple/neutron,wolverineav/neutron,mahak/neutron,paninetworks/neutron,silenci/neutron,cisco-openstack/neutron,swdream/neutron,apporc/neutron,javaos74/neutron,mahak/neutron,takeshineshiro/neutron,JianyuWang/neutron,eonpatapon/neutron,watonyweng/neutron,dhanunjaya/neutron,vivekanand1101/neutron,asgard-lab/neutron,suneeth51/neutron,klmitch/neutron,suneeth51/neutron,noironetworks/neutron,skyddv/neutron,mmnelemane/neutron,skyddv/neutron,adelina-t/neutron,vivekanand1101/neutron,javaos74/neutron,swdream/neutron,MaximNevrov/neutron,chitr/neutron,mahak/neutron,eayunstack/neutron,yanheven/neutron,igor-toga/local-snat,glove747/liberty-neutron,jacknjzhou/neutron,openstack/neutron,sasukeh/neutron,barnsnake351/neutron,mattt416/neutron,eayunstack/neutron,bigswitch/neutron,takeshineshiro/neutron,cisco-openstack/neutron,yanheven/neutron,jumpojoy/neutron,jumpojoy/neutron,JianyuWang/neutron,barnsnake351/neutron,dhanunjaya/neutron,paninetworks/neutron,glove747/liberty-neutron,neoareslinux/neutron,sasukeh/neutron,openstack/neutron,jacknjzhou/neutron,apporc/neutron,cloudbase/neutron,watonyweng/neutron,bigswitch/neutron,wenhuizhang/neutron,dims/neutron,huntxu/neutron,dims/neutron,openstack/neutron,igor-toga/local-snat,cloudbase/neutron,mandeepdhami/neutron,sebrandon1/neutron,mmnelemane/neutron,adelina-t/neutron,wolverineav/neutron,klmitch/neutron,asgard-lab/neutron,MaximNevrov/neutron,mandeepdhami/neutron,shahbazn/neutron,eonpatapon/neutron,shahbazn/neutron,chitr/neutron,silenci/neutron,wenhuizhang/neutron,sebrandon1/neutron,SamYaple/neutron,neoareslinux/neutron,mattt416/neutron
restructuredtext
## Code Before: L2 Networking with Linux Bridge ------------------------------- ## Instruction: Add documentation for Linux Bridge (previously missing) Change-Id: I092b609f43b37ed85d08bc80d1d048b945abe222 Closes-Bug: #1455979 ## Code After: =============================== L2 Networking with Linux Bridge =============================== This Agent uses the `Linux Bridge <http://www.linuxfoundation.org/collaborate/workgroups/networking/bridge>`_ to provide L2 connectivity for VM instances running on the compute node to the public network. A graphical illustration of the deployment can be found in `OpenStack Admin Guide Linux Bridge <http://docs.openstack.org/admin-guide-cloud/content/under_the_hood_linuxbridge.html>`_ In most common deployments, there is a compute and a network node. On both the compute and the network node, the Linux Bridge Agent will manage virtual switches, connectivity among them, and interaction via virtual ports with other network components such as namespaces and underlying interfaces. Additionally, on the compute node, the Linux Bridge Agent will manage security groups. Three use cases and their packet flow are documented as follows: 1. `Legacy implementation with Linux Bridge <http://docs.openstack.org/networking-guide/deploy_scenario1b.html>`_ 2. `High Availability using L3HA with Linux Bridge <http://docs.openstack.org/networking-guide/deploy_scenario3b.html>`_ 3. `Provider networks with Linux Bridge <http://docs.openstack.org/networking-guide/deploy_scenario4b.html>`_
83831a3434cdaf0a5ca214dfc4bd7fec65d4ffac
fastai/vision/models/tvm.py
fastai/vision/models/tvm.py
from torchvision.models import ResNet,resnet18,resnet34,resnet50,resnet101,resnet152 from torchvision.models import SqueezeNet,squeezenet1_0,squeezenet1_1 from torchvision.models import densenet121,densenet169,densenet201,densenet161 from torchvision.models import vgg11_bn,vgg13_bn,vgg16_bn,vgg19_bn,alexnet
from torchvision.models import * import types as _t _g = globals() for _k, _v in list(_g.items()): if ( isinstance(_v, _t.ModuleType) and _v.__name__.startswith("torchvision.models") ) or (callable(_v) and _v.__module__ == "torchvision.models._api"): del _g[_k] del _k, _v, _g, _t
Add latest TorchVision models on fastai
Add latest TorchVision models on fastai
Python
apache-2.0
fastai/fastai
python
## Code Before: from torchvision.models import ResNet,resnet18,resnet34,resnet50,resnet101,resnet152 from torchvision.models import SqueezeNet,squeezenet1_0,squeezenet1_1 from torchvision.models import densenet121,densenet169,densenet201,densenet161 from torchvision.models import vgg11_bn,vgg13_bn,vgg16_bn,vgg19_bn,alexnet ## Instruction: Add latest TorchVision models on fastai ## Code After: from torchvision.models import * import types as _t _g = globals() for _k, _v in list(_g.items()): if ( isinstance(_v, _t.ModuleType) and _v.__name__.startswith("torchvision.models") ) or (callable(_v) and _v.__module__ == "torchvision.models._api"): del _g[_k] del _k, _v, _g, _t
d8c9ede46ce6e865f68bf56c97e6fc62b3166ef1
spec/client/preprocessors/spec.visualisation_fallback.js
spec/client/preprocessors/spec.visualisation_fallback.js
define([ 'client/preprocessors/visualisation_fallback' ], function (visualisationFallback) { describe('Module actions', function () { var originalModernizr = visualisationFallback.Modernizr; var el; beforeEach(function () { visualisationFallback.Modernizr = {}; el = $('<div class="visualisation-inner" data-src="spec/client/preprocessors/transparent.gif">original content</div>'); $('body').append(el); }); afterEach(function () { visualisationFallback.Modernizr = originalModernizr; $('.visualisation-inner').remove(); }); it('does nothing when the browser supports SVG', function () { visualisationFallback.Modernizr.inlinesvg = true; visualisationFallback(); expect(el.html()).toEqual('original content'); }); it('replaces the fallback container content with a fallback image', function () { visualisationFallback.Modernizr.inlinesvg = false; visualisationFallback(); waitsFor(function () { return (el.html() !== 'original content'); }); runs(function () { expect(el.html()).toEqual('<img src="spec/client/preprocessors/transparent.gif">'); }); }); }); });
define([ 'client/preprocessors/visualisation_fallback', 'modernizr' ], function (visualisationFallback, Modernizr) { describe('Module actions', function () { var el; beforeEach(function () { el = $('<div class="visualisation-inner" data-src="spec/client/preprocessors/transparent.gif">original content</div>'); $('body').append(el); }); afterEach(function () { $('.visualisation-inner').remove(); }); it('does nothing when the browser supports SVG', function () { Modernizr.inlinesvg = true; visualisationFallback(); expect(el.html()).toEqual('original content'); }); it('replaces the fallback container content with a fallback image', function () { Modernizr.inlinesvg = false; visualisationFallback(); waitsFor(function () { return (el.html() !== 'original content'); }); runs(function () { expect(el.html()).toEqual('<img src="spec/client/preprocessors/transparent.gif">'); }); }); }); });
Fix tests for image fallbacks
Fix tests for image fallbacks
JavaScript
mit
alphagov/spotlight,keithiopia/spotlight,tijmenb/spotlight,tijmenb/spotlight,alphagov/spotlight,keithiopia/spotlight,keithiopia/spotlight,tijmenb/spotlight,alphagov/spotlight
javascript
## Code Before: define([ 'client/preprocessors/visualisation_fallback' ], function (visualisationFallback) { describe('Module actions', function () { var originalModernizr = visualisationFallback.Modernizr; var el; beforeEach(function () { visualisationFallback.Modernizr = {}; el = $('<div class="visualisation-inner" data-src="spec/client/preprocessors/transparent.gif">original content</div>'); $('body').append(el); }); afterEach(function () { visualisationFallback.Modernizr = originalModernizr; $('.visualisation-inner').remove(); }); it('does nothing when the browser supports SVG', function () { visualisationFallback.Modernizr.inlinesvg = true; visualisationFallback(); expect(el.html()).toEqual('original content'); }); it('replaces the fallback container content with a fallback image', function () { visualisationFallback.Modernizr.inlinesvg = false; visualisationFallback(); waitsFor(function () { return (el.html() !== 'original content'); }); runs(function () { expect(el.html()).toEqual('<img src="spec/client/preprocessors/transparent.gif">'); }); }); }); }); ## Instruction: Fix tests for image fallbacks ## Code After: define([ 'client/preprocessors/visualisation_fallback', 'modernizr' ], function (visualisationFallback, Modernizr) { describe('Module actions', function () { var el; beforeEach(function () { el = $('<div class="visualisation-inner" data-src="spec/client/preprocessors/transparent.gif">original content</div>'); $('body').append(el); }); afterEach(function () { $('.visualisation-inner').remove(); }); it('does nothing when the browser supports SVG', function () { Modernizr.inlinesvg = true; visualisationFallback(); expect(el.html()).toEqual('original content'); }); it('replaces the fallback container content with a fallback image', function () { Modernizr.inlinesvg = false; visualisationFallback(); waitsFor(function () { return (el.html() !== 'original content'); }); runs(function () { expect(el.html()).toEqual('<img src="spec/client/preprocessors/transparent.gif">'); }); }); }); });
4d04e8726007974e3f1cc5981f579ea6ba2213a7
spec/integration/mince_interface_spec.rb
spec/integration/mince_interface_spec.rb
require_relative '../../lib/hashy_db' require 'mince/shared_examples/interface_example' describe 'Mince Interface with HashyDb' do before do Mince::Config.interface = Mince::HashyDb::Interface end it_behaves_like 'a mince interface' end
require_relative '../../lib/hashy_db' require 'mince/shared_examples/interface_example' describe 'Mince Interface with HashyDb' do before do Mince::Config.interface = Mince::HashyDb::Interface end after do Mince::HashyDb::Interface.clear end it_behaves_like 'a mince interface' end
Clear data store when integration tests finish so that we can run ints and units together
Clear data store when integration tests finish so that we can run ints and units together
Ruby
mit
coffeencoke/hashy_db
ruby
## Code Before: require_relative '../../lib/hashy_db' require 'mince/shared_examples/interface_example' describe 'Mince Interface with HashyDb' do before do Mince::Config.interface = Mince::HashyDb::Interface end it_behaves_like 'a mince interface' end ## Instruction: Clear data store when integration tests finish so that we can run ints and units together ## Code After: require_relative '../../lib/hashy_db' require 'mince/shared_examples/interface_example' describe 'Mince Interface with HashyDb' do before do Mince::Config.interface = Mince::HashyDb::Interface end after do Mince::HashyDb::Interface.clear end it_behaves_like 'a mince interface' end
c676bcede639977ebbf23983d90c64ac579e8c8b
_posts/prods/2022-02-13-master-mode7.md
_posts/prods/2022-02-13-master-mode7.md
--- layout: prods_post category: [posts, prods] tags: lovebyte2022 rank: 21st in Oldschool 256b intro title: Master / Mode7 img: mmode7-screenshot.png alt: image-alt authors: VectorEyes team: Bitshifters year: 2022 type: 256 Byte Demo platform: BBC Master download: mmode7.ssd video: https://www.youtube.com/watch?v=-dXdqsF7o70 --- **Master / Mode7** A 256b (well, 254b!) intro for the BBC Master. By VectorEyes of Bitshifters. This started out as an experiment to see how faithfully I could reproduce the *astounding* C64 256b demo “A Mind is Born”. The answer turned out to be: not very well at all, so I took things in a different direction and hacked together something that uses the BBC Master’s Teletext capabilities (Mode 7) to display a few different patterns. I hope you enjoy it! The music is semi-random. I was incredibly lucky to capture a video where the last few bars sound like a proper ‘ending’. At some point soon I'll make a version that seeds the RNG properly and produces fully consistent music. VectorEyes, 22:15pm GMT, 10th Feb 2022 (45 minutes before submission deadline!)
--- layout: prods_post category: [posts, prods] tags: lovebyte2022 rank: 21st in Oldschool 256b intro title: Master / Mode7 img: mmode7-screenshot.png alt: image-alt authors: VectorEyes team: Bitshifters year: 2022 type: 256 Byte Demo platform: BBC Master download: mmode7.ssd pouet: https://www.pouet.net/prod.php?which=91097 video: https://www.youtube.com/watch?v=-dXdqsF7o70 --- **Master / Mode7** A 256b (well, 254b!) intro for the BBC Master. By VectorEyes of Bitshifters. This started out as an experiment to see how faithfully I could reproduce the *astounding* C64 256b demo “A Mind is Born”. The answer turned out to be: not very well at all, so I took things in a different direction and hacked together something that uses the BBC Master’s Teletext capabilities (Mode 7) to display a few different patterns. I hope you enjoy it! The music is semi-random. I was incredibly lucky to capture a video where the last few bars sound like a proper ‘ending’. At some point soon I'll make a version that seeds the RNG properly and produces fully consistent music. VectorEyes, 22:15pm GMT, 10th Feb 2022 (45 minutes before submission deadline!)
Add pouet link for Master / Mode7
Add pouet link for Master / Mode7
Markdown
apache-2.0
bitshifters/bitshifters.github.io,bitshifters/bitshifters.github.io
markdown
## Code Before: --- layout: prods_post category: [posts, prods] tags: lovebyte2022 rank: 21st in Oldschool 256b intro title: Master / Mode7 img: mmode7-screenshot.png alt: image-alt authors: VectorEyes team: Bitshifters year: 2022 type: 256 Byte Demo platform: BBC Master download: mmode7.ssd video: https://www.youtube.com/watch?v=-dXdqsF7o70 --- **Master / Mode7** A 256b (well, 254b!) intro for the BBC Master. By VectorEyes of Bitshifters. This started out as an experiment to see how faithfully I could reproduce the *astounding* C64 256b demo “A Mind is Born”. The answer turned out to be: not very well at all, so I took things in a different direction and hacked together something that uses the BBC Master’s Teletext capabilities (Mode 7) to display a few different patterns. I hope you enjoy it! The music is semi-random. I was incredibly lucky to capture a video where the last few bars sound like a proper ‘ending’. At some point soon I'll make a version that seeds the RNG properly and produces fully consistent music. VectorEyes, 22:15pm GMT, 10th Feb 2022 (45 minutes before submission deadline!) ## Instruction: Add pouet link for Master / Mode7 ## Code After: --- layout: prods_post category: [posts, prods] tags: lovebyte2022 rank: 21st in Oldschool 256b intro title: Master / Mode7 img: mmode7-screenshot.png alt: image-alt authors: VectorEyes team: Bitshifters year: 2022 type: 256 Byte Demo platform: BBC Master download: mmode7.ssd pouet: https://www.pouet.net/prod.php?which=91097 video: https://www.youtube.com/watch?v=-dXdqsF7o70 --- **Master / Mode7** A 256b (well, 254b!) intro for the BBC Master. By VectorEyes of Bitshifters. This started out as an experiment to see how faithfully I could reproduce the *astounding* C64 256b demo “A Mind is Born”. The answer turned out to be: not very well at all, so I took things in a different direction and hacked together something that uses the BBC Master’s Teletext capabilities (Mode 7) to display a few different patterns. I hope you enjoy it! The music is semi-random. I was incredibly lucky to capture a video where the last few bars sound like a proper ‘ending’. At some point soon I'll make a version that seeds the RNG properly and produces fully consistent music. VectorEyes, 22:15pm GMT, 10th Feb 2022 (45 minutes before submission deadline!)
3a4969adc7433f2c459f9d516e30cf64783765e0
wmcore_base/ContainerScripts/pyfutureTest.sh
wmcore_base/ContainerScripts/pyfutureTest.sh
if [ -z "$ghprbPullId" -o -z "$ghprbTargetBranch" ]; then echo "Not all necessary environment variables set: ghprbPullId, ghprbTargetBranch" exit 1 fi source ./env_unittest.sh pushd wmcore_unittest/WMCore export PYTHONPATH=`pwd`/test/python:`pwd`/src/python:${PYTHONPATH} git config remote.origin.url https://github.com/dmwm/WMCore.git git fetch origin pull/${ghprbPullId}/merge:PR_MERGE export COMMIT=`git rev-parse "PR_MERGE^{commit}"` git checkout -f ${COMMIT} futurize -1 src/ test/ > test.patch # Get changed files and analyze for idioms git diff --name-only ${ghprbTargetBranch}..${COMMIT} > changedFiles.txt git diff-tree --name-status -r ${ghprbTargetBranch}..${COMMIT} | egrep "^A" | cut -f 2 > addedFiles.txt while read name; do futurize -f idioms $name >> idioms.patch || true done <changedFiles.txt ${HOME}/ContainerScripts/AnalyzePyFuture.py > added.message cp test.patch idioms.patch added.message ${HOME}/artifacts/ popd
if [ -z "$ghprbPullId" -o -z "$ghprbTargetBranch" ]; then echo "Not all necessary environment variables set: ghprbPullId, ghprbTargetBranch" exit 1 fi source ./env_unittest.sh pushd wmcore_unittest/WMCore export PYTHONPATH=`pwd`/test/python:`pwd`/src/python:${PYTHONPATH} git config remote.origin.url https://github.com/dmwm/WMCore.git git fetch origin pull/${ghprbPullId}/merge:PR_MERGE export COMMIT=`git rev-parse "PR_MERGE^{commit}"` git checkout -f ${COMMIT} futurize -1 src/ test/ > test.patch # Get changed files and analyze for idioms git diff --name-only ${ghprbTargetBranch}..${COMMIT} > changedFiles.txt git diff-tree --name-status -r ${ghprbTargetBranch}..${COMMIT} | egrep "^A" | cut -f 2 > addedFiles.txt while read name; do futurize -f execfile -f filter -f raw_input >> test.patch || true futurize -f idioms $name >> idioms.patch || true done <changedFiles.txt ${HOME}/ContainerScripts/AnalyzePyFuture.py > added.message cp test.patch idioms.patch added.message ${HOME}/artifacts/ popd
Add a few stage2 fixers
Add a few stage2 fixers
Shell
apache-2.0
dmwm/Docker,dmwm/Docker
shell
## Code Before: if [ -z "$ghprbPullId" -o -z "$ghprbTargetBranch" ]; then echo "Not all necessary environment variables set: ghprbPullId, ghprbTargetBranch" exit 1 fi source ./env_unittest.sh pushd wmcore_unittest/WMCore export PYTHONPATH=`pwd`/test/python:`pwd`/src/python:${PYTHONPATH} git config remote.origin.url https://github.com/dmwm/WMCore.git git fetch origin pull/${ghprbPullId}/merge:PR_MERGE export COMMIT=`git rev-parse "PR_MERGE^{commit}"` git checkout -f ${COMMIT} futurize -1 src/ test/ > test.patch # Get changed files and analyze for idioms git diff --name-only ${ghprbTargetBranch}..${COMMIT} > changedFiles.txt git diff-tree --name-status -r ${ghprbTargetBranch}..${COMMIT} | egrep "^A" | cut -f 2 > addedFiles.txt while read name; do futurize -f idioms $name >> idioms.patch || true done <changedFiles.txt ${HOME}/ContainerScripts/AnalyzePyFuture.py > added.message cp test.patch idioms.patch added.message ${HOME}/artifacts/ popd ## Instruction: Add a few stage2 fixers ## Code After: if [ -z "$ghprbPullId" -o -z "$ghprbTargetBranch" ]; then echo "Not all necessary environment variables set: ghprbPullId, ghprbTargetBranch" exit 1 fi source ./env_unittest.sh pushd wmcore_unittest/WMCore export PYTHONPATH=`pwd`/test/python:`pwd`/src/python:${PYTHONPATH} git config remote.origin.url https://github.com/dmwm/WMCore.git git fetch origin pull/${ghprbPullId}/merge:PR_MERGE export COMMIT=`git rev-parse "PR_MERGE^{commit}"` git checkout -f ${COMMIT} futurize -1 src/ test/ > test.patch # Get changed files and analyze for idioms git diff --name-only ${ghprbTargetBranch}..${COMMIT} > changedFiles.txt git diff-tree --name-status -r ${ghprbTargetBranch}..${COMMIT} | egrep "^A" | cut -f 2 > addedFiles.txt while read name; do futurize -f execfile -f filter -f raw_input >> test.patch || true futurize -f idioms $name >> idioms.patch || true done <changedFiles.txt ${HOME}/ContainerScripts/AnalyzePyFuture.py > added.message cp test.patch idioms.patch added.message ${HOME}/artifacts/ popd
c12db036af5e67d0436dd83011cd9f944fdd6603
metadata/com.icecondor.nest.txt
metadata/com.icecondor.nest.txt
Categories:Navigation License:Apache2 Web Site:https://icecondor.com Source Code:https://github.com/icecondor/android Issue Tracker:https://github.com/icecondor/android/issues Auto Name:IceCondor Summary:Location tracking and sharing Description: Track your location all-day and share the data. . Repo Type:git Repo:https://github.com/icecondor/android.git Build:20141218,20141218 commit=5780278fe6b47ac59296dc106bcd405d7e1b758a srclibs=1:appcompat@v7,2:AndroidAsync@42eabc6ca99c4011ed1ce30e4a7e77608f8d96fa,[email protected] prebuild=pushd $$JodaTime$$ && \ $$MVN3$$ package && \ popd && \ mkdir -p libs && \ cp $$JodaTime$$/target/*.jar libs/ Auto Update Mode:None Update Check Mode:RepoManifest Current Version:20141218 Current Version Code:20141218
Categories:Navigation License:Apache2 Web Site:https://icecondor.com Source Code:https://github.com/icecondor/android Issue Tracker:https://github.com/icecondor/android/issues Auto Name:IceCondor Summary:Location tracking and sharing Description: Track your location all-day and share the data. . Repo Type:git Repo:https://github.com/icecondor/android.git Build:20141218,20141218 commit=5780278fe6b47ac59296dc106bcd405d7e1b758a srclibs=1:appcompat@v7,2:AndroidAsync@42eabc6ca99c4011ed1ce30e4a7e77608f8d96fa,[email protected] prebuild=pushd $$JodaTime$$ && \ $$MVN3$$ package && \ popd && \ mkdir -p libs && \ cp $$JodaTime$$/target/*.jar libs/ Auto Update Mode:None Update Check Mode:RepoManifest Current Version:20150301 Current Version Code:20150301
Update CV of IceCondor to 20150301 (20150301)
Update CV of IceCondor to 20150301 (20150301)
Text
agpl-3.0
f-droid/fdroid-data,f-droid/fdroiddata,f-droid/fdroiddata
text
## Code Before: Categories:Navigation License:Apache2 Web Site:https://icecondor.com Source Code:https://github.com/icecondor/android Issue Tracker:https://github.com/icecondor/android/issues Auto Name:IceCondor Summary:Location tracking and sharing Description: Track your location all-day and share the data. . Repo Type:git Repo:https://github.com/icecondor/android.git Build:20141218,20141218 commit=5780278fe6b47ac59296dc106bcd405d7e1b758a srclibs=1:appcompat@v7,2:AndroidAsync@42eabc6ca99c4011ed1ce30e4a7e77608f8d96fa,[email protected] prebuild=pushd $$JodaTime$$ && \ $$MVN3$$ package && \ popd && \ mkdir -p libs && \ cp $$JodaTime$$/target/*.jar libs/ Auto Update Mode:None Update Check Mode:RepoManifest Current Version:20141218 Current Version Code:20141218 ## Instruction: Update CV of IceCondor to 20150301 (20150301) ## Code After: Categories:Navigation License:Apache2 Web Site:https://icecondor.com Source Code:https://github.com/icecondor/android Issue Tracker:https://github.com/icecondor/android/issues Auto Name:IceCondor Summary:Location tracking and sharing Description: Track your location all-day and share the data. . Repo Type:git Repo:https://github.com/icecondor/android.git Build:20141218,20141218 commit=5780278fe6b47ac59296dc106bcd405d7e1b758a srclibs=1:appcompat@v7,2:AndroidAsync@42eabc6ca99c4011ed1ce30e4a7e77608f8d96fa,[email protected] prebuild=pushd $$JodaTime$$ && \ $$MVN3$$ package && \ popd && \ mkdir -p libs && \ cp $$JodaTime$$/target/*.jar libs/ Auto Update Mode:None Update Check Mode:RepoManifest Current Version:20150301 Current Version Code:20150301
a1315f08bdfc99cb7b32a6156f7c2a33595dcab7
CONTRIBUTING.md
CONTRIBUTING.md
All feature requests and issues should be initially reported in the issues section for this repo. We follow the convention of naming an issue by prefixing its description with the issue type, as in "Feature Request: xxx", "Bug: xxx", or "UI: xxx". Try to use distinct and descriptive subject lines to make issues easier to identify. The description should include as much information as necessary to define the issue so that implementation can begin. # Creating a new library If you are creating a new client library, please adhere to the guidelines found here: https://github.com/attm2x/m2x-service/blob/master/CLIENT-CONTRIBUTIONS.md
If you would like to submit an issue or contribute to any M2X Client Library, please adhere to the contribution guidelines found here: https://github.com/attm2x/m2x-service/blob/master/CONTRIBUTING.md # Creating a new library If you are creating a new client library, please adhere to the guidelines found here: https://github.com/attm2x/m2x-service/blob/master/CLIENT-CONTRIBUTIONS.md
Add link to generic contribution guidelines
Add link to generic contribution guidelines
Markdown
mit
attm2x/m2x-nodejs,skazska/m2x-nodejs
markdown
## Code Before: All feature requests and issues should be initially reported in the issues section for this repo. We follow the convention of naming an issue by prefixing its description with the issue type, as in "Feature Request: xxx", "Bug: xxx", or "UI: xxx". Try to use distinct and descriptive subject lines to make issues easier to identify. The description should include as much information as necessary to define the issue so that implementation can begin. # Creating a new library If you are creating a new client library, please adhere to the guidelines found here: https://github.com/attm2x/m2x-service/blob/master/CLIENT-CONTRIBUTIONS.md ## Instruction: Add link to generic contribution guidelines ## Code After: If you would like to submit an issue or contribute to any M2X Client Library, please adhere to the contribution guidelines found here: https://github.com/attm2x/m2x-service/blob/master/CONTRIBUTING.md # Creating a new library If you are creating a new client library, please adhere to the guidelines found here: https://github.com/attm2x/m2x-service/blob/master/CLIENT-CONTRIBUTIONS.md
2802475f5ff4170cd8f6c6969dce65dc971aa8a6
README.asciidoc
README.asciidoc
= PHP Reflect **PHP Reflect** is a library that adds the ability to reverse-engineer classes, interfaces, functions, constants, namespaces, traits and more. Both were distributed as source code (install via composer) and a PHAR version that bundles all dependencies in a single file. == Install You can either : * download the phar version http://bartlett.laurent-laville.org/get/phpreflect-4.2.1.phar[4.2.1] * install via https://packagist.org/packages/bartlett/php-reflect/[packagist] the current source dev-master == Documentation The documentation for PHP Reflect 4.2 is available in http://php5.laurent-laville.org/reflect/manual/4.2/en/[English] to read it online or download to read it later (multiple formats). AsciiDoc source code are available on `docs` folder of the repository. == Authors * Laurent Laville == License This handler is licensed under the BSD-3-clauses License - see the `LICENSE` file for details
= PHP Reflect **PHP Reflect** is a library that adds the ability to reverse-engineer classes, interfaces, functions, constants, namespaces, traits and more. Both were distributed as source code (install via composer) and a PHAR version that bundles all dependencies in a single file. == Requirements PHP 5.5.0 or above. == Install You can either : * download the phar version http://bartlett.laurent-laville.org/get/phpreflect-4.2.1.phar[4.2.1] * install via https://packagist.org/packages/bartlett/php-reflect/[packagist] the current source dev-master == Documentation The documentation for PHP Reflect 4.2 is available in http://php5.laurent-laville.org/reflect/manual/4.2/en/[English] to read it online or download to read it later (multiple formats). AsciiDoc source code are available on `docs` folder of the repository. == Authors * Laurent Laville == License This handler is licensed under the BSD-3-clauses License - see the `LICENSE` file for details
Add a note about minimum requirements to install this package
Add a note about minimum requirements to install this package
AsciiDoc
bsd-3-clause
llaville/php-reflect
asciidoc
## Code Before: = PHP Reflect **PHP Reflect** is a library that adds the ability to reverse-engineer classes, interfaces, functions, constants, namespaces, traits and more. Both were distributed as source code (install via composer) and a PHAR version that bundles all dependencies in a single file. == Install You can either : * download the phar version http://bartlett.laurent-laville.org/get/phpreflect-4.2.1.phar[4.2.1] * install via https://packagist.org/packages/bartlett/php-reflect/[packagist] the current source dev-master == Documentation The documentation for PHP Reflect 4.2 is available in http://php5.laurent-laville.org/reflect/manual/4.2/en/[English] to read it online or download to read it later (multiple formats). AsciiDoc source code are available on `docs` folder of the repository. == Authors * Laurent Laville == License This handler is licensed under the BSD-3-clauses License - see the `LICENSE` file for details ## Instruction: Add a note about minimum requirements to install this package ## Code After: = PHP Reflect **PHP Reflect** is a library that adds the ability to reverse-engineer classes, interfaces, functions, constants, namespaces, traits and more. Both were distributed as source code (install via composer) and a PHAR version that bundles all dependencies in a single file. == Requirements PHP 5.5.0 or above. == Install You can either : * download the phar version http://bartlett.laurent-laville.org/get/phpreflect-4.2.1.phar[4.2.1] * install via https://packagist.org/packages/bartlett/php-reflect/[packagist] the current source dev-master == Documentation The documentation for PHP Reflect 4.2 is available in http://php5.laurent-laville.org/reflect/manual/4.2/en/[English] to read it online or download to read it later (multiple formats). AsciiDoc source code are available on `docs` folder of the repository. == Authors * Laurent Laville == License This handler is licensed under the BSD-3-clauses License - see the `LICENSE` file for details
f8fc2fe9ab91cb029a369c23f644d18b4e15815f
.rubocop.yml
.rubocop.yml
AllCops: TargetRubyVersion: 2.2
AllCops: TargetRubyVersion: 2.2 Layout/IndentHeredoc: Enabled: false Metrics/AbcSize: Max: 30 Metrics/BlockLength: Exclude: - 'test/**/*.rb' Metrics/ClassLength: Enabled: false Metrics/MethodLength: Max: 20
Copy the Dimples config for Rubocop.
Copy the Dimples config for Rubocop.
YAML
mit
ivuk/usesthis,waferbaby/usesthis,ivuk/usesthis,waferbaby/usesthis
yaml
## Code Before: AllCops: TargetRubyVersion: 2.2 ## Instruction: Copy the Dimples config for Rubocop. ## Code After: AllCops: TargetRubyVersion: 2.2 Layout/IndentHeredoc: Enabled: false Metrics/AbcSize: Max: 30 Metrics/BlockLength: Exclude: - 'test/**/*.rb' Metrics/ClassLength: Enabled: false Metrics/MethodLength: Max: 20
2f70cc67f581cd40b2617b27cde46730fea79c9c
maven/bnd-indexer-maven-plugin/src/test/resources/integration-test/test/setup.groovy
maven/bnd-indexer-maven-plugin/src/test/resources/integration-test/test/setup.groovy
import java.nio.file.*; Path source = Paths.get("${pluginBasedir}/src/test/resources/integration-test/local-repo/org/objenesis"); Path target = Paths.get("${pluginBuildDirectory}/integration-test/repo/org/objenesis"); target.deleteDir(); source.toFile().eachFileRecurse({ f -> Path s = Paths.get(f.toURI()); Path t = target.resolve(source.relativize(s)); t.toFile().getParentFile().mkdirs(); Files.copy(s, t); }); // We run this check to be sure that the metadata needed by the // bad-remote-metadata test is correctly installed in the local repo String expected = "#NOTE: This is an Aether internal implementation file, its format can be changed without prior notice.\n" + "#Thu Jul 27 10:28:27 BST 2017\n" + "objenesis-2.2.pom>central=\n" + "objenesis-2.2.jar>central=\n" + "objenesis-2.2.pom>=\n" + "objenesis-2.2.jar>=\n"; Path remote = Paths.get(target.toString(), "objenesis/2.2/_remote.repositories"); assert expected == remote.toFile().getText(); return;
import java.nio.file.*; Path source = Paths.get("${pluginBasedir}/src/test/resources/integration-test/local-repo/org/objenesis"); Path target = Paths.get("${pluginBuildDirectory}/integration-test/repo/org/objenesis"); target.deleteDir(); source.toFile().eachFileRecurse({ f -> Path s = Paths.get(f.toURI()); Path t = target.resolve(source.relativize(s)); t.toFile().getParentFile().mkdirs(); Files.copy(s, t); }); // We run this check to be sure that the metadata needed by the // bad-remote-metadata test is correctly installed in the local repo String expected = """\ #NOTE: This is an Aether internal implementation file, its format can be changed without prior notice. #Thu Jul 27 10:28:27 BST 2017 objenesis-2.2.pom>central= objenesis-2.2.jar>central= objenesis-2.2.pom>= objenesis-2.2.jar>= """ Path remote = target.resolve("objenesis/2.2/_remote.repositories"); assert remote.toFile().text.normalize() == expected.normalize() return;
Fix file content comparison to work on azure pipelines
maven: Fix file content comparison to work on azure pipelines Signed-off-by: BJ Hargrave <[email protected]>
Groovy
apache-2.0
psoreide/bnd,psoreide/bnd,psoreide/bnd
groovy
## Code Before: import java.nio.file.*; Path source = Paths.get("${pluginBasedir}/src/test/resources/integration-test/local-repo/org/objenesis"); Path target = Paths.get("${pluginBuildDirectory}/integration-test/repo/org/objenesis"); target.deleteDir(); source.toFile().eachFileRecurse({ f -> Path s = Paths.get(f.toURI()); Path t = target.resolve(source.relativize(s)); t.toFile().getParentFile().mkdirs(); Files.copy(s, t); }); // We run this check to be sure that the metadata needed by the // bad-remote-metadata test is correctly installed in the local repo String expected = "#NOTE: This is an Aether internal implementation file, its format can be changed without prior notice.\n" + "#Thu Jul 27 10:28:27 BST 2017\n" + "objenesis-2.2.pom>central=\n" + "objenesis-2.2.jar>central=\n" + "objenesis-2.2.pom>=\n" + "objenesis-2.2.jar>=\n"; Path remote = Paths.get(target.toString(), "objenesis/2.2/_remote.repositories"); assert expected == remote.toFile().getText(); return; ## Instruction: maven: Fix file content comparison to work on azure pipelines Signed-off-by: BJ Hargrave <[email protected]> ## Code After: import java.nio.file.*; Path source = Paths.get("${pluginBasedir}/src/test/resources/integration-test/local-repo/org/objenesis"); Path target = Paths.get("${pluginBuildDirectory}/integration-test/repo/org/objenesis"); target.deleteDir(); source.toFile().eachFileRecurse({ f -> Path s = Paths.get(f.toURI()); Path t = target.resolve(source.relativize(s)); t.toFile().getParentFile().mkdirs(); Files.copy(s, t); }); // We run this check to be sure that the metadata needed by the // bad-remote-metadata test is correctly installed in the local repo String expected = """\ #NOTE: This is an Aether internal implementation file, its format can be changed without prior notice. #Thu Jul 27 10:28:27 BST 2017 objenesis-2.2.pom>central= objenesis-2.2.jar>central= objenesis-2.2.pom>= objenesis-2.2.jar>= """ Path remote = target.resolve("objenesis/2.2/_remote.repositories"); assert remote.toFile().text.normalize() == expected.normalize() return;
06ed9c838e61135eef85387482209a5fc18176e8
app/assets/stylesheets/darkswarm/variables.css.scss
app/assets/stylesheets/darkswarm/variables.css.scss
@import "foundation/functions"; @import "foundation/components/global"; // Brand guide colours: // International: #81c26e // Australia: #f35746 // Africa: #f35e32 // South Africa: #f9a72b // Norway: #4b83cc // Scandanavia: #0c8bbc // UK: #e6373f $brand-colour: #f27052; // Topbar $topbar-height: rem-calc(64); $topbar-link-padding: $topbar-height / 3; $topbar-bg: $white; $topbar-bg-color: $topbar-bg; $topbar-link-color: $black; $topbar-link-color-hover: $brand-colour; $topbar-link-color-active: $black; $topbar-link-color-active-hover: $white; $topbar-link-bg-hover: $white; $topbar-dropdown-link-color: $black; $topbar-dropdown-bg: $white; $topbar-dropdown-link-bg: $white; $topbar-dropdown-link-bg-hover: $white;
@import "foundation/functions"; @import "foundation/components/global"; // Brand guide colours: // International: #81c26e // Australia: #f35746 // Africa: #f35e32 // South Africa: #f9a72b // Norway: #4b83cc // Scandanavia: #0c8bbc // UK: #e6373f $brand-colour: #f27052; // Topbar $topbar-height: rem-calc(64); $topbar-link-padding: $topbar-height / 3; $topbar-arrows: false; $topbar-bg: $white; $topbar-bg-color: $topbar-bg; $topbar-link-color: $black; $topbar-link-color-hover: $brand-colour; $topbar-link-color-active: $black; $topbar-link-color-active-hover: $white; $topbar-link-bg-hover: $white; $topbar-dropdown-link-color: $black; $topbar-dropdown-bg: $white; $topbar-dropdown-link-bg: $white; $topbar-dropdown-link-bg-hover: $white;
Remove extra left padding from top bar menu items
Remove extra left padding from top bar menu items
SCSS
agpl-3.0
openfoodfoundation/openfoodnetwork,mkllnk/openfoodnetwork,mkllnk/openfoodnetwork,Matt-Yorkley/openfoodnetwork,mkllnk/openfoodnetwork,lin-d-hop/openfoodnetwork,Matt-Yorkley/openfoodnetwork,Matt-Yorkley/openfoodnetwork,openfoodfoundation/openfoodnetwork,lin-d-hop/openfoodnetwork,lin-d-hop/openfoodnetwork,lin-d-hop/openfoodnetwork,Matt-Yorkley/openfoodnetwork,openfoodfoundation/openfoodnetwork,mkllnk/openfoodnetwork,openfoodfoundation/openfoodnetwork
scss
## Code Before: @import "foundation/functions"; @import "foundation/components/global"; // Brand guide colours: // International: #81c26e // Australia: #f35746 // Africa: #f35e32 // South Africa: #f9a72b // Norway: #4b83cc // Scandanavia: #0c8bbc // UK: #e6373f $brand-colour: #f27052; // Topbar $topbar-height: rem-calc(64); $topbar-link-padding: $topbar-height / 3; $topbar-bg: $white; $topbar-bg-color: $topbar-bg; $topbar-link-color: $black; $topbar-link-color-hover: $brand-colour; $topbar-link-color-active: $black; $topbar-link-color-active-hover: $white; $topbar-link-bg-hover: $white; $topbar-dropdown-link-color: $black; $topbar-dropdown-bg: $white; $topbar-dropdown-link-bg: $white; $topbar-dropdown-link-bg-hover: $white; ## Instruction: Remove extra left padding from top bar menu items ## Code After: @import "foundation/functions"; @import "foundation/components/global"; // Brand guide colours: // International: #81c26e // Australia: #f35746 // Africa: #f35e32 // South Africa: #f9a72b // Norway: #4b83cc // Scandanavia: #0c8bbc // UK: #e6373f $brand-colour: #f27052; // Topbar $topbar-height: rem-calc(64); $topbar-link-padding: $topbar-height / 3; $topbar-arrows: false; $topbar-bg: $white; $topbar-bg-color: $topbar-bg; $topbar-link-color: $black; $topbar-link-color-hover: $brand-colour; $topbar-link-color-active: $black; $topbar-link-color-active-hover: $white; $topbar-link-bg-hover: $white; $topbar-dropdown-link-color: $black; $topbar-dropdown-bg: $white; $topbar-dropdown-link-bg: $white; $topbar-dropdown-link-bg-hover: $white;
15650cb2b64e3cf42bc0c5339b8491c868e6567f
snake/countries.nim
snake/countries.nim
import unicode when not defined(js): import httpclient, logging, asyncdispatch, json proc getCountryForIP*(ip: string): Future[string] {.async.} = ## Returns a two-letter ISO code specifying the country that the ## IP address belongs to, if the country cannot be determined "" is returned. var client = newAsyncHttpClient() let responseFut = client.getContent("http://freegeoip.net/json/" & ip) yield responseFut if responseFut.failed: warn("Error retrieving country by IP: " & responseFut.error.msg) return "" let obj = parseJson(responseFut.read()) return obj["country_code"].getStr() proc getUnicodeForCountry*(iso: string): string = ## Retrieves a country flag unicode character for the specified ISO two-letter ## country code. let base = 127397 result = " " for c in iso: result.add($Rune(base + c.ord)) when isMainModule: doAssert getUnicodeForCountry("DE") == "🇩🇪"
import unicode when not defined(js): import httpclient, logging, asyncdispatch, json proc getCountryForIP*(ip: string): Future[string] {.async.} = ## Returns a two-letter ISO code specifying the country that the ## IP address belongs to, if the country cannot be determined "" is returned. var client = newAsyncHttpClient() let responseFut = client.getContent("http://freegeoip.net/json/" & ip) yield responseFut if responseFut.failed: warn("Error retrieving country by IP: " & responseFut.error.msg) return "" let obj = parseJson(responseFut.read()) return obj["country_code"].getStr() proc getUnicodeForCountry*(iso: string): string = ## Retrieves a country flag unicode character for the specified ISO two-letter ## country code. let base = 127397 result = "" for c in iso: result.add($Rune(base + c.ord)) if result.len == 0: return " " when isMainModule: doAssert getUnicodeForCountry("DE") == "🇩🇪"
Return " " when country code is invalid.
Return " " when country code is invalid.
Nimrod
mit
dom96/snake
nimrod
## Code Before: import unicode when not defined(js): import httpclient, logging, asyncdispatch, json proc getCountryForIP*(ip: string): Future[string] {.async.} = ## Returns a two-letter ISO code specifying the country that the ## IP address belongs to, if the country cannot be determined "" is returned. var client = newAsyncHttpClient() let responseFut = client.getContent("http://freegeoip.net/json/" & ip) yield responseFut if responseFut.failed: warn("Error retrieving country by IP: " & responseFut.error.msg) return "" let obj = parseJson(responseFut.read()) return obj["country_code"].getStr() proc getUnicodeForCountry*(iso: string): string = ## Retrieves a country flag unicode character for the specified ISO two-letter ## country code. let base = 127397 result = " " for c in iso: result.add($Rune(base + c.ord)) when isMainModule: doAssert getUnicodeForCountry("DE") == "🇩🇪" ## Instruction: Return " " when country code is invalid. ## Code After: import unicode when not defined(js): import httpclient, logging, asyncdispatch, json proc getCountryForIP*(ip: string): Future[string] {.async.} = ## Returns a two-letter ISO code specifying the country that the ## IP address belongs to, if the country cannot be determined "" is returned. var client = newAsyncHttpClient() let responseFut = client.getContent("http://freegeoip.net/json/" & ip) yield responseFut if responseFut.failed: warn("Error retrieving country by IP: " & responseFut.error.msg) return "" let obj = parseJson(responseFut.read()) return obj["country_code"].getStr() proc getUnicodeForCountry*(iso: string): string = ## Retrieves a country flag unicode character for the specified ISO two-letter ## country code. let base = 127397 result = "" for c in iso: result.add($Rune(base + c.ord)) if result.len == 0: return " " when isMainModule: doAssert getUnicodeForCountry("DE") == "🇩🇪"
2bc95d90db15160f9c4869c03f9dadb6cd8d56fa
seleniumbase/config/proxy_list.py
seleniumbase/config/proxy_list.py
PROXY_LIST = { "example1": "159.122.164.163:8080", # (Example) - set your own proxy here "proxy1": None, "proxy2": None, "proxy3": None, "proxy4": None, "proxy5": None, }
PROXY_LIST = { "example1": "159.122.164.163:8080", # (Example) - set your own proxy here "example2": "158.69.138.8:1080", # (Example) - set your own proxy here "proxy1": None, "proxy2": None, "proxy3": None, "proxy4": None, "proxy5": None, }
Add another proxy server example string
Add another proxy server example string
Python
mit
seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,mdmintz/SeleniumBase,mdmintz/SeleniumBase
python
## Code Before: PROXY_LIST = { "example1": "159.122.164.163:8080", # (Example) - set your own proxy here "proxy1": None, "proxy2": None, "proxy3": None, "proxy4": None, "proxy5": None, } ## Instruction: Add another proxy server example string ## Code After: PROXY_LIST = { "example1": "159.122.164.163:8080", # (Example) - set your own proxy here "example2": "158.69.138.8:1080", # (Example) - set your own proxy here "proxy1": None, "proxy2": None, "proxy3": None, "proxy4": None, "proxy5": None, }
0c13dcc2658e557f5b21dd57c96a1d7e9f989828
.github/ISSUE_TEMPLATE.md
.github/ISSUE_TEMPLATE.md
- [ ] I checked existing issues (open and closed) for possible duplicates - [ ] I can reproduce the problem on the latest stable version (not nightly!) - [ ] The issue is not about adding entries to the default blacklist ### What is your browser? - [ ] Firefox - [ ] Chrome ### What is your operating system? - [ ] Linux - [ ] Mac - [ ] Windows ### Description (please include examples/screenshots where applicable)
- [ ] I checked existing issues (open and closed) for possible duplicates - [ ] I can reproduce the problem on the latest stable version (not nightly!) - [ ] If you have a problem a specific website: adding the domain to your blacklist does not solve the problem - [ ] The issue is not about adding entries to the default blacklist ### What is your browser? - [ ] Firefox - [ ] Chrome ### What is your operating system? - [ ] Linux - [ ] Mac - [ ] Windows ### Description (please include examples/screenshots where applicable)
Add entry to checklist to avoid trivial tickets about blacklisting
Add entry to checklist to avoid trivial tickets about blacklisting
Markdown
mit
sblask/firefox-skip-redirect,sblask/firefox-skip-redirect
markdown
## Code Before: - [ ] I checked existing issues (open and closed) for possible duplicates - [ ] I can reproduce the problem on the latest stable version (not nightly!) - [ ] The issue is not about adding entries to the default blacklist ### What is your browser? - [ ] Firefox - [ ] Chrome ### What is your operating system? - [ ] Linux - [ ] Mac - [ ] Windows ### Description (please include examples/screenshots where applicable) ## Instruction: Add entry to checklist to avoid trivial tickets about blacklisting ## Code After: - [ ] I checked existing issues (open and closed) for possible duplicates - [ ] I can reproduce the problem on the latest stable version (not nightly!) - [ ] If you have a problem a specific website: adding the domain to your blacklist does not solve the problem - [ ] The issue is not about adding entries to the default blacklist ### What is your browser? - [ ] Firefox - [ ] Chrome ### What is your operating system? - [ ] Linux - [ ] Mac - [ ] Windows ### Description (please include examples/screenshots where applicable)
c2abbec04dd0a0af046dd4912aed8cf8e8a03cd1
setup.cfg
setup.cfg
[metadata] license_file=COPYING [sdist] owner=root group=root [bdist_wheel] universal = 1 [nosetests] with-doctest=true doctest-extension=doctest doctest-options=+IGNORE_EXCEPTION_DETAIL with-coverage=true cover-branches=true cover-package=pskc cover-inclusive=true cover-erase=true cover-html=true cover-html-dir=coverage cover-min-percentage=100 [build_sphinx] all_files = 1 builder = html man [flake8] ignore = D105 # Missing docstring in magic method D107 # Missing docstring in __init__ Q001 # Use of ''' multiline strings max-complexity = 14 max-line-length = 78
[metadata] license_file=COPYING [sdist] owner=root group=root [bdist_wheel] universal = 1 [nosetests] with-doctest=true doctest-extension=doctest doctest-options=+IGNORE_EXCEPTION_DETAIL with-coverage=true cover-branches=true cover-package=pskc cover-inclusive=true cover-erase=true cover-html=true cover-html-dir=coverage cover-min-percentage=100 [build_sphinx] all_files = 1 builder = html man [flake8] ignore = D105 # Missing docstring in magic method D107 # Missing docstring in __init__ Q001 # Use of ''' multiline strings W504 # we put the binary operator on the preceding line max-complexity = 14 max-line-length = 78
Make the multi-line operator place explicit
Make the multi-line operator place explicit Recent versions of flake8 changed the defaults of the errors to ignore.
INI
lgpl-2.1
arthurdejong/python-pskc
ini
## Code Before: [metadata] license_file=COPYING [sdist] owner=root group=root [bdist_wheel] universal = 1 [nosetests] with-doctest=true doctest-extension=doctest doctest-options=+IGNORE_EXCEPTION_DETAIL with-coverage=true cover-branches=true cover-package=pskc cover-inclusive=true cover-erase=true cover-html=true cover-html-dir=coverage cover-min-percentage=100 [build_sphinx] all_files = 1 builder = html man [flake8] ignore = D105 # Missing docstring in magic method D107 # Missing docstring in __init__ Q001 # Use of ''' multiline strings max-complexity = 14 max-line-length = 78 ## Instruction: Make the multi-line operator place explicit Recent versions of flake8 changed the defaults of the errors to ignore. ## Code After: [metadata] license_file=COPYING [sdist] owner=root group=root [bdist_wheel] universal = 1 [nosetests] with-doctest=true doctest-extension=doctest doctest-options=+IGNORE_EXCEPTION_DETAIL with-coverage=true cover-branches=true cover-package=pskc cover-inclusive=true cover-erase=true cover-html=true cover-html-dir=coverage cover-min-percentage=100 [build_sphinx] all_files = 1 builder = html man [flake8] ignore = D105 # Missing docstring in magic method D107 # Missing docstring in __init__ Q001 # Use of ''' multiline strings W504 # we put the binary operator on the preceding line max-complexity = 14 max-line-length = 78