commit
stringlengths
40
40
old_file
stringlengths
4
237
new_file
stringlengths
4
237
old_contents
stringlengths
1
4.24k
new_contents
stringlengths
5
4.84k
subject
stringlengths
15
778
message
stringlengths
16
6.86k
lang
stringlengths
1
30
license
stringclasses
13 values
repos
stringlengths
5
116k
config
stringlengths
1
30
content
stringlengths
105
8.72k
dd646b7573c1e2bb41f60723e02aa6ddf58d59f6
kobo/apps/help/permissions.py
kobo/apps/help/permissions.py
from rest_framework import permissions class InAppMessagePermissions(permissions.BasePermission): def has_permission(self, request, view): if not request.user.is_authenticated: # Deny access to anonymous users return False if request.user.is_superuser: # Allow superusers to do anything return True if request.method in permissions.SAFE_METHODS: # Allow read-only access to any authenticated user return True elif request.method == 'PATCH': if not request.data: # A `PATCH` with no data is a check to see what's allowed, or # that's what the DRF "Browsable API" does, at least. We'll # wave it through for authenticated users return True elif request.data.keys() == ['interactions']: # Allow any authenticated user to update their own interactions return True # Sorry, buddy. return False
from rest_framework import exceptions, permissions class InAppMessagePermissions(permissions.BasePermission): def has_permission(self, request, view): if not request.user.is_authenticated: # Deny access to anonymous users return False if request.user.is_superuser: # Allow superusers to do anything return True if request.method in permissions.SAFE_METHODS: # Allow read-only access to any authenticated user return True elif request.method == 'PATCH': if not request.data: # A `PATCH` with no data is a check to see what's allowed, or # that's what the DRF "Browsable API" does, at least. We'll # wave it through for authenticated users return True elif list(request.data) == ['interactions']: # Allow any authenticated user to update their own interactions return True else: formatted_fields = ', '.join( [f'`{x}`' for x in request.data.keys()] ) raise exceptions.PermissionDenied( detail=( 'You may update only `interactions`, but your request ' f'contained {formatted_fields}.' ) ) # Sorry, buddy. return False
Fix Python 2-to-3 bug in in-app messages
Fix Python 2-to-3 bug in in-app messages …so that the permission check for `PATCH`ing `interactions` does not always fail. Fixes #2762
Python
agpl-3.0
kobotoolbox/kpi,kobotoolbox/kpi,kobotoolbox/kpi,kobotoolbox/kpi,kobotoolbox/kpi
python
## Code Before: from rest_framework import permissions class InAppMessagePermissions(permissions.BasePermission): def has_permission(self, request, view): if not request.user.is_authenticated: # Deny access to anonymous users return False if request.user.is_superuser: # Allow superusers to do anything return True if request.method in permissions.SAFE_METHODS: # Allow read-only access to any authenticated user return True elif request.method == 'PATCH': if not request.data: # A `PATCH` with no data is a check to see what's allowed, or # that's what the DRF "Browsable API" does, at least. We'll # wave it through for authenticated users return True elif request.data.keys() == ['interactions']: # Allow any authenticated user to update their own interactions return True # Sorry, buddy. return False ## Instruction: Fix Python 2-to-3 bug in in-app messages …so that the permission check for `PATCH`ing `interactions` does not always fail. Fixes #2762 ## Code After: from rest_framework import exceptions, permissions class InAppMessagePermissions(permissions.BasePermission): def has_permission(self, request, view): if not request.user.is_authenticated: # Deny access to anonymous users return False if request.user.is_superuser: # Allow superusers to do anything return True if request.method in permissions.SAFE_METHODS: # Allow read-only access to any authenticated user return True elif request.method == 'PATCH': if not request.data: # A `PATCH` with no data is a check to see what's allowed, or # that's what the DRF "Browsable API" does, at least. We'll # wave it through for authenticated users return True elif list(request.data) == ['interactions']: # Allow any authenticated user to update their own interactions return True else: formatted_fields = ', '.join( [f'`{x}`' for x in request.data.keys()] ) raise exceptions.PermissionDenied( detail=( 'You may update only `interactions`, but your request ' f'contained {formatted_fields}.' ) ) # Sorry, buddy. return False
a30c712278b84c8ccc254cdf23d3ad44043de0d1
framework/src/sbt-plugin/src/main/scala/PlayProject.scala
framework/src/sbt-plugin/src/main/scala/PlayProject.scala
package sbt import Keys._ import jline._ import play.api._ import play.core._ import play.utils.Colors object PlayProject extends Plugin with PlayExceptions with PlayKeys with PlayReloader with PlayCommands with PlaySettings { private def whichLang(name: String) = { if (name == JAVA) { defaultJavaSettings } else if (name == SCALA) { defaultScalaSettings } else { Seq.empty } } // ----- Create a Play project with default settings def apply(name: String, applicationVersion: String = "1.0", dependencies: Seq[ModuleID] = Nil, path: File = file("."), mainLang: String = NONE) = { Project(name, path) .settings(eclipseCommandSettings(mainLang): _*) .settings(PlayProject.defaultSettings: _*) .settings(Seq(testListeners += testListener): _*) .settings(whichLang(mainLang): _*) .settings( scalacOptions ++= Seq("-deprecation", "-unchecked", "-Xcheckinit", "-encoding", "utf8"), javacOptions ++= Seq("-encoding", "utf8"), version := applicationVersion, libraryDependencies ++= dependencies ) } }
package sbt import Keys._ import jline._ import play.api._ import play.core._ import play.utils.Colors object PlayProject extends Plugin with PlayExceptions with PlayKeys with PlayReloader with PlayCommands with PlaySettings { Option(System.getProperty("play.version")).map { case badVersion if badVersion != play.core.PlayVersion.current => { println( Colors.red(""" |This project uses Play %s! |Update the Play sbt-plugin version to %s (usually in project/plugins.sbt) """.stripMargin.format(play.core.PlayVersion.current, badVersion)) ) } case _ => } private def whichLang(name: String) = { if (name == JAVA) { defaultJavaSettings } else if (name == SCALA) { defaultScalaSettings } else { Seq.empty } } // ----- Create a Play project with default settings def apply(name: String, applicationVersion: String = "1.0", dependencies: Seq[ModuleID] = Nil, path: File = file("."), mainLang: String = NONE) = { Project(name, path) .settings(eclipseCommandSettings(mainLang): _*) .settings(PlayProject.defaultSettings: _*) .settings(Seq(testListeners += testListener): _*) .settings(whichLang(mainLang): _*) .settings( scalacOptions ++= Seq("-deprecation", "-unchecked", "-Xcheckinit", "-encoding", "utf8"), javacOptions ++= Seq("-encoding", "utf8"), version := applicationVersion, libraryDependencies ++= dependencies ) } }
Check Play version and display a warning
Check Play version and display a warning
Scala
apache-2.0
Shenker93/playframework,mkurz/playframework,mkurz/playframework,rajeshpg/playframework,hagl/playframework,ktoso/playframework,rajeshpg/playframework,richdougherty/playframework,benmccann/playframework,hagl/playframework,wsargent/playframework,ktoso/playframework,benmccann/playframework,benmccann/playframework,hagl/playframework,aradchykov/playframework,aradchykov/playframework,rajeshpg/playframework,aradchykov/playframework,wegtam/playframework,wegtam/playframework,marcospereira/playframework,wegtam/playframework,wegtam/playframework,marcospereira/playframework,richdougherty/playframework,zaneli/playframework,benmccann/playframework,wsargent/playframework,Shruti9520/playframework,mkurz/playframework,Shruti9520/playframework,richdougherty/playframework,richdougherty/playframework,Shenker93/playframework,wsargent/playframework,playframework/playframework,marcospereira/playframework,mkurz/playframework,ktoso/playframework,Shenker93/playframework,Shenker93/playframework,aradchykov/playframework,playframework/playframework,wsargent/playframework,rajeshpg/playframework,zaneli/playframework,marcospereira/playframework,hagl/playframework,Shruti9520/playframework,playframework/playframework,ktoso/playframework,Shruti9520/playframework,zaneli/playframework,zaneli/playframework
scala
## Code Before: package sbt import Keys._ import jline._ import play.api._ import play.core._ import play.utils.Colors object PlayProject extends Plugin with PlayExceptions with PlayKeys with PlayReloader with PlayCommands with PlaySettings { private def whichLang(name: String) = { if (name == JAVA) { defaultJavaSettings } else if (name == SCALA) { defaultScalaSettings } else { Seq.empty } } // ----- Create a Play project with default settings def apply(name: String, applicationVersion: String = "1.0", dependencies: Seq[ModuleID] = Nil, path: File = file("."), mainLang: String = NONE) = { Project(name, path) .settings(eclipseCommandSettings(mainLang): _*) .settings(PlayProject.defaultSettings: _*) .settings(Seq(testListeners += testListener): _*) .settings(whichLang(mainLang): _*) .settings( scalacOptions ++= Seq("-deprecation", "-unchecked", "-Xcheckinit", "-encoding", "utf8"), javacOptions ++= Seq("-encoding", "utf8"), version := applicationVersion, libraryDependencies ++= dependencies ) } } ## Instruction: Check Play version and display a warning ## Code After: package sbt import Keys._ import jline._ import play.api._ import play.core._ import play.utils.Colors object PlayProject extends Plugin with PlayExceptions with PlayKeys with PlayReloader with PlayCommands with PlaySettings { Option(System.getProperty("play.version")).map { case badVersion if badVersion != play.core.PlayVersion.current => { println( Colors.red(""" |This project uses Play %s! |Update the Play sbt-plugin version to %s (usually in project/plugins.sbt) """.stripMargin.format(play.core.PlayVersion.current, badVersion)) ) } case _ => } private def whichLang(name: String) = { if (name == JAVA) { defaultJavaSettings } else if (name == SCALA) { defaultScalaSettings } else { Seq.empty } } // ----- Create a Play project with default settings def apply(name: String, applicationVersion: String = "1.0", dependencies: Seq[ModuleID] = Nil, path: File = file("."), mainLang: String = NONE) = { Project(name, path) .settings(eclipseCommandSettings(mainLang): _*) .settings(PlayProject.defaultSettings: _*) .settings(Seq(testListeners += testListener): _*) .settings(whichLang(mainLang): _*) .settings( scalacOptions ++= Seq("-deprecation", "-unchecked", "-Xcheckinit", "-encoding", "utf8"), javacOptions ++= Seq("-encoding", "utf8"), version := applicationVersion, libraryDependencies ++= dependencies ) } }
aa1e0f7624b4e33dad438d82acd0c9aa89189a3e
docs/pages/installation/ConstructorOptions.vue
docs/pages/installation/ConstructorOptions.vue
<template> <div> <div class="example is-paddingless"> <CodeView :code="usage | pre" lang="javascript" expanded/> </div> <ApiView :data="api"/> </div> </template> <script> import api from './api/constructor-options' export default { data() { return { api, usage: ` Vue.use(Buefy, { defaultIconPack: 'fas', defaultContainerElement: '#content', // ... })` } } } </script>
<template> <div> <div class="example is-paddingless"> <CodeView title="Full bundle" :code="usageBundle | pre" lang="javascript" expanded/> </div> <div class="example is-paddingless"> <CodeView title="Individual components" :code="usageComponents | pre" lang="javascript" expanded/> </div> <ApiView :data="api"/> </div> </template> <script> import api from './api/constructor-options' export default { data() { return { api, usageBundle: ` Vue.use(Buefy, { defaultIconPack: 'fas', defaultContainerElement: '#content', // ... })`, usageComponents: ` import { BuefyProgrammatic, Table, Input } from 'buefy' Vue.use(BuefyProgrammatic, { defaultIconPack: 'fas', defaultContainerElement: '#content', // ... }) Vue.use(Table) Vue.use(Input)` } } } </script>
Add buefy programmatic to docs
Add buefy programmatic to docs
Vue
mit
rafaelpimpa/buefy,rafaelpimpa/buefy,rafaelpimpa/buefy
vue
## Code Before: <template> <div> <div class="example is-paddingless"> <CodeView :code="usage | pre" lang="javascript" expanded/> </div> <ApiView :data="api"/> </div> </template> <script> import api from './api/constructor-options' export default { data() { return { api, usage: ` Vue.use(Buefy, { defaultIconPack: 'fas', defaultContainerElement: '#content', // ... })` } } } </script> ## Instruction: Add buefy programmatic to docs ## Code After: <template> <div> <div class="example is-paddingless"> <CodeView title="Full bundle" :code="usageBundle | pre" lang="javascript" expanded/> </div> <div class="example is-paddingless"> <CodeView title="Individual components" :code="usageComponents | pre" lang="javascript" expanded/> </div> <ApiView :data="api"/> </div> </template> <script> import api from './api/constructor-options' export default { data() { return { api, usageBundle: ` Vue.use(Buefy, { defaultIconPack: 'fas', defaultContainerElement: '#content', // ... })`, usageComponents: ` import { BuefyProgrammatic, Table, Input } from 'buefy' Vue.use(BuefyProgrammatic, { defaultIconPack: 'fas', defaultContainerElement: '#content', // ... }) Vue.use(Table) Vue.use(Input)` } } } </script>
4dd519ee8697a699b79d3f7dbaae29c2dc4b6b84
tasks/options/autoprefixer.js
tasks/options/autoprefixer.js
module.exports = { options: { // Options we might want to enable in the future. diff: false, map: false }, multiple_files: { // Prefix all CSS files found in `src/static/css` and overwrite. expand: true, src: 'demo/static/css/main.css' }, };
module.exports = { options: { // Options we might want to enable in the future. diff: false, map: false }, main: { // Prefix all properties found in `main.css` and overwrite. expand: true, src: 'demo/static/css/main.css' }, };
Fix Autoprefixer target name and comment
Fix Autoprefixer target name and comment
JavaScript
cc0-1.0
cfpb/cf-grunt-config,ascott1/cf-grunt-config,Scotchester/cf-grunt-config
javascript
## Code Before: module.exports = { options: { // Options we might want to enable in the future. diff: false, map: false }, multiple_files: { // Prefix all CSS files found in `src/static/css` and overwrite. expand: true, src: 'demo/static/css/main.css' }, }; ## Instruction: Fix Autoprefixer target name and comment ## Code After: module.exports = { options: { // Options we might want to enable in the future. diff: false, map: false }, main: { // Prefix all properties found in `main.css` and overwrite. expand: true, src: 'demo/static/css/main.css' }, };
fb6dd16789f7e8069407ee03ca70559218368dc9
pkgs/applications/networking/mailreaders/hasmail/default.nix
pkgs/applications/networking/mailreaders/hasmail/default.nix
{ lib , buildGoModule , fetchFromGitHub , pkgconfig , gobject-introspection , pango , cairo , gtk2 }: buildGoModule rec { pname = "hasmail-unstable"; version = "2019-08-24"; src = fetchFromGitHub { owner = "jonhoo"; repo = "hasmail"; rev = "eb52536d26815383bfe5990cd5ace8bb9d036c8d"; sha256 = "1p6kwa5xk1mb1fkkxz1b5rcyp5kb4zc8nfif1gk6fab6wbdj9ia1"; }; modSha256 = "0z3asz7v1izg81f9xifx9s2sp5hly173hajsn9idi3bkv0h78is2"; buildInputs = [ pkgconfig gobject-introspection pango cairo gtk2 ]; meta = with lib; { description = "Simple tray icon for detecting new email on IMAP servers"; homepage = "https://github.com/jonhoo/hasmail"; license = licenses.unlicense; maintainers = with maintainers; [ doronbehar ]; platforms = platforms.all; }; }
{ lib , buildGoModule , fetchFromGitHub , pkgconfig , gobject-introspection , pango , cairo , gtk2 }: buildGoModule rec { pname = "hasmail-unstable"; version = "2019-08-24"; src = fetchFromGitHub { owner = "jonhoo"; repo = "hasmail"; rev = "eb52536d26815383bfe5990cd5ace8bb9d036c8d"; sha256 = "1p6kwa5xk1mb1fkkxz1b5rcyp5kb4zc8nfif1gk6fab6wbdj9ia1"; }; modSha256 = "0z3asz7v1izg81f9xifx9s2sp5hly173hajsn9idi3bkv0h78is2"; nativeBuildInputs = [ pkgconfig ]; buildInputs = [ pango cairo gtk2 ]; meta = with lib; { description = "Simple tray icon for detecting new email on IMAP servers"; homepage = "https://github.com/jonhoo/hasmail"; license = licenses.unlicense; maintainers = with maintainers; [ doronbehar ]; platforms = platforms.all; }; }
Remove gobject-introspection and put pkgconfig in native
Remove gobject-introspection and put pkgconfig in native
Nix
mit
NixOS/nixpkgs,NixOS/nixpkgs,NixOS/nixpkgs,NixOS/nixpkgs,NixOS/nixpkgs,NixOS/nixpkgs,NixOS/nixpkgs,NixOS/nixpkgs,NixOS/nixpkgs,NixOS/nixpkgs,NixOS/nixpkgs,NixOS/nixpkgs,NixOS/nixpkgs,NixOS/nixpkgs,NixOS/nixpkgs
nix
## Code Before: { lib , buildGoModule , fetchFromGitHub , pkgconfig , gobject-introspection , pango , cairo , gtk2 }: buildGoModule rec { pname = "hasmail-unstable"; version = "2019-08-24"; src = fetchFromGitHub { owner = "jonhoo"; repo = "hasmail"; rev = "eb52536d26815383bfe5990cd5ace8bb9d036c8d"; sha256 = "1p6kwa5xk1mb1fkkxz1b5rcyp5kb4zc8nfif1gk6fab6wbdj9ia1"; }; modSha256 = "0z3asz7v1izg81f9xifx9s2sp5hly173hajsn9idi3bkv0h78is2"; buildInputs = [ pkgconfig gobject-introspection pango cairo gtk2 ]; meta = with lib; { description = "Simple tray icon for detecting new email on IMAP servers"; homepage = "https://github.com/jonhoo/hasmail"; license = licenses.unlicense; maintainers = with maintainers; [ doronbehar ]; platforms = platforms.all; }; } ## Instruction: Remove gobject-introspection and put pkgconfig in native ## Code After: { lib , buildGoModule , fetchFromGitHub , pkgconfig , gobject-introspection , pango , cairo , gtk2 }: buildGoModule rec { pname = "hasmail-unstable"; version = "2019-08-24"; src = fetchFromGitHub { owner = "jonhoo"; repo = "hasmail"; rev = "eb52536d26815383bfe5990cd5ace8bb9d036c8d"; sha256 = "1p6kwa5xk1mb1fkkxz1b5rcyp5kb4zc8nfif1gk6fab6wbdj9ia1"; }; modSha256 = "0z3asz7v1izg81f9xifx9s2sp5hly173hajsn9idi3bkv0h78is2"; nativeBuildInputs = [ pkgconfig ]; buildInputs = [ pango cairo gtk2 ]; meta = with lib; { description = "Simple tray icon for detecting new email on IMAP servers"; homepage = "https://github.com/jonhoo/hasmail"; license = licenses.unlicense; maintainers = with maintainers; [ doronbehar ]; platforms = platforms.all; }; }
7b914f6f7c21b574a8e94d70fccb2090ec2650f7
src/org/jmist/framework/services/WorkerClient.java
src/org/jmist/framework/services/WorkerClient.java
/** * */ package org.jmist.framework.services; import java.rmi.RMISecurityManager; import java.util.concurrent.Executor; import java.util.concurrent.Executors; import javax.swing.JDialog; import org.jmist.framework.Job; import org.jmist.framework.reporting.ProgressPanel; /** * @author brad * */ public final class WorkerClient { /** * @param args */ public static void main(String[] args) { if (System.getSecurityManager() == null) { System.setSecurityManager(new RMISecurityManager()); } String host = args.length > 0 ? args[0] : "localhost"; JDialog dialog = new JDialog(); ProgressPanel monitor = new ProgressPanel(); int numberOfCpus = Runtime.getRuntime().availableProcessors(); Executor threadPool = Executors.newFixedThreadPool(numberOfCpus, new BackgroundThreadFactory()); Job workerJob = new ThreadServiceWorkerJob(host, 10000, numberOfCpus, threadPool); dialog.add(monitor); dialog.setBounds(0, 0, 400, 300); dialog.setVisible(true); workerJob.go(monitor); } }
/** * */ package org.jmist.framework.services; import java.rmi.RMISecurityManager; import java.util.concurrent.Executor; import java.util.concurrent.Executors; import javax.swing.JDialog; import org.jmist.framework.Job; import org.jmist.framework.reporting.ProgressPanel; /** * @author brad * */ public final class WorkerClient { /** * @param args */ public static void main(String[] args) { if (System.getSecurityManager() == null) { System.setSecurityManager(new RMISecurityManager()); } String host = args.length > 0 ? args[0] : "localhost"; JDialog dialog = new JDialog(); ProgressPanel monitor = new ProgressPanel(); int numberOfCpus = Runtime.getRuntime().availableProcessors(); Executor threadPool = Executors.newFixedThreadPool(numberOfCpus, new BackgroundThreadFactory()); Job workerJob = new ThreadServiceWorkerJob(host, 10000, numberOfCpus, threadPool); monitor.setRootVisible(false); dialog.add(monitor); dialog.setBounds(0, 0, 400, 300); dialog.setVisible(true); workerJob.go(monitor); } }
Hide the top level progress node.
Hide the top level progress node.
Java
mit
bwkimmel/jmist
java
## Code Before: /** * */ package org.jmist.framework.services; import java.rmi.RMISecurityManager; import java.util.concurrent.Executor; import java.util.concurrent.Executors; import javax.swing.JDialog; import org.jmist.framework.Job; import org.jmist.framework.reporting.ProgressPanel; /** * @author brad * */ public final class WorkerClient { /** * @param args */ public static void main(String[] args) { if (System.getSecurityManager() == null) { System.setSecurityManager(new RMISecurityManager()); } String host = args.length > 0 ? args[0] : "localhost"; JDialog dialog = new JDialog(); ProgressPanel monitor = new ProgressPanel(); int numberOfCpus = Runtime.getRuntime().availableProcessors(); Executor threadPool = Executors.newFixedThreadPool(numberOfCpus, new BackgroundThreadFactory()); Job workerJob = new ThreadServiceWorkerJob(host, 10000, numberOfCpus, threadPool); dialog.add(monitor); dialog.setBounds(0, 0, 400, 300); dialog.setVisible(true); workerJob.go(monitor); } } ## Instruction: Hide the top level progress node. ## Code After: /** * */ package org.jmist.framework.services; import java.rmi.RMISecurityManager; import java.util.concurrent.Executor; import java.util.concurrent.Executors; import javax.swing.JDialog; import org.jmist.framework.Job; import org.jmist.framework.reporting.ProgressPanel; /** * @author brad * */ public final class WorkerClient { /** * @param args */ public static void main(String[] args) { if (System.getSecurityManager() == null) { System.setSecurityManager(new RMISecurityManager()); } String host = args.length > 0 ? args[0] : "localhost"; JDialog dialog = new JDialog(); ProgressPanel monitor = new ProgressPanel(); int numberOfCpus = Runtime.getRuntime().availableProcessors(); Executor threadPool = Executors.newFixedThreadPool(numberOfCpus, new BackgroundThreadFactory()); Job workerJob = new ThreadServiceWorkerJob(host, 10000, numberOfCpus, threadPool); monitor.setRootVisible(false); dialog.add(monitor); dialog.setBounds(0, 0, 400, 300); dialog.setVisible(true); workerJob.go(monitor); } }
9d752d26aaa01a941d16185fbd2211a953707d9e
app.js
app.js
var app = require('express')(); app.get('/', function(req, res) { res.send('Hello World!'); }); var server = app.listen(3000, function() { console.log('Listening on http://localhost:3000'); });
var app = require('express')(); var db = require('mongoose'); db.connect('mongodb://localhost/caedence_net_development'); var Note = db.model('Note', { title: String, body_html: String, body_text: String, user_id: Number, created_at: { type: Date, default: Date.now }, updated_at: { type: Date, default: Date.now } }); // create some records with mongo cli like this: // $ mongod // > use caedence_net_development // > db.notes.insert({ title: "Hello", body_html: "<p>World</p>", body_text: "World" }) // WriteResult({ "nInserted" : 1 }) // > db.notes.find() // { "_id" : ObjectId("562473c0802fa33ca991217b"), "title" : "Hello", "body_html" : "<p>World</p>", "body_text" : "World" } app.get('/notes/', function(req, res) { Note.find(function(err, notes) { res.json(notes); }); }); app.get('/', function(req, res) { res.send('Hello World!'); }); var server = app.listen(3000, function() { console.log('Listening on http://localhost:3000'); });
Create Mongoose model for Note, and list notes.
Create Mongoose model for Note, and list notes.
JavaScript
mit
unixmonkey/caedence.net-2015,unixmonkey/caedence.net-2015
javascript
## Code Before: var app = require('express')(); app.get('/', function(req, res) { res.send('Hello World!'); }); var server = app.listen(3000, function() { console.log('Listening on http://localhost:3000'); }); ## Instruction: Create Mongoose model for Note, and list notes. ## Code After: var app = require('express')(); var db = require('mongoose'); db.connect('mongodb://localhost/caedence_net_development'); var Note = db.model('Note', { title: String, body_html: String, body_text: String, user_id: Number, created_at: { type: Date, default: Date.now }, updated_at: { type: Date, default: Date.now } }); // create some records with mongo cli like this: // $ mongod // > use caedence_net_development // > db.notes.insert({ title: "Hello", body_html: "<p>World</p>", body_text: "World" }) // WriteResult({ "nInserted" : 1 }) // > db.notes.find() // { "_id" : ObjectId("562473c0802fa33ca991217b"), "title" : "Hello", "body_html" : "<p>World</p>", "body_text" : "World" } app.get('/notes/', function(req, res) { Note.find(function(err, notes) { res.json(notes); }); }); app.get('/', function(req, res) { res.send('Hello World!'); }); var server = app.listen(3000, function() { console.log('Listening on http://localhost:3000'); });
5601f5eea7ebc41510c9f24fdf3f0892e6bd78fc
.codecov.yml
.codecov.yml
codecov: # Use `develop` as the default branch branch: develop ignore: - Tests coverage: status: project: default: # Allow coverage to drop by up to 1% without marking a PR with a failing status. threshold: 1 patch: default: # Allow patch to be at least 50% covered without marking a PR with a failing status. target: 50
codecov: # Use `develop` as the default branch branch: develop ignore: - Tests coverage: status: project: default: false base16: paths: "Sources/Base16/" # Allow coverage to drop by up to 1% without marking a PR with a failing status. threshold: 1 base32: paths: "Sources/Base32/" # Allow coverage to drop by up to 1% without marking a PR with a failing status. threshold: 1 patch: default: # Allow patch to be at least 50% covered without marking a PR with a failing status. target: 50
Split coverage contexts for Base16 and Base32
Split coverage contexts for Base16 and Base32
YAML
mit
mattrubin/Bases
yaml
## Code Before: codecov: # Use `develop` as the default branch branch: develop ignore: - Tests coverage: status: project: default: # Allow coverage to drop by up to 1% without marking a PR with a failing status. threshold: 1 patch: default: # Allow patch to be at least 50% covered without marking a PR with a failing status. target: 50 ## Instruction: Split coverage contexts for Base16 and Base32 ## Code After: codecov: # Use `develop` as the default branch branch: develop ignore: - Tests coverage: status: project: default: false base16: paths: "Sources/Base16/" # Allow coverage to drop by up to 1% without marking a PR with a failing status. threshold: 1 base32: paths: "Sources/Base32/" # Allow coverage to drop by up to 1% without marking a PR with a failing status. threshold: 1 patch: default: # Allow patch to be at least 50% covered without marking a PR with a failing status. target: 50
46b00107e90df8f34a9cce5c4b010fdfb88f5f52
shovel/code.py
shovel/code.py
from __future__ import absolute_import, division, print_function from pathlib import Path from isort import SortImports from shovel import task # isort multi_line_output modes GRID = 0 VERTICAL = 1 HANGING_INDENT = 2 VERTICAL_HANGING_INDENT = 3 HANGING_GRID = 4 HANGING_GRID_GROUPED = 5 @task def format_imports(): """Sort imports into a consistent style.""" astrodynamics_dir = Path('astrodynamics') constants_dir = astrodynamics_dir / 'constants' for initfile in astrodynamics_dir.glob('**/__init__.py'): if constants_dir in initfile.parents: continue SortImports(str(initfile), multi_line_output=VERTICAL_HANGING_INDENT, not_skip=['__init__.py']) # Exclude __init__.py # Exclude generated constants/ python files for pyfile in astrodynamics_dir.glob('**/*.py'): if constants_dir in pyfile.parents and pyfile.stem != 'constant': continue SortImports(str(pyfile), multi_line_output=HANGING_GRID, skip=['__init__.py'])
from __future__ import absolute_import, division, print_function from pathlib import Path from isort import SortImports from shovel import task # isort multi_line_output modes GRID = 0 VERTICAL = 1 HANGING_INDENT = 2 VERTICAL_HANGING_INDENT = 3 HANGING_GRID = 4 HANGING_GRID_GROUPED = 5 @task def format_imports(): """Sort imports into a consistent style.""" astrodynamics_dir = Path('astrodynamics') constants_dir = astrodynamics_dir / 'constants' for initfile in astrodynamics_dir.glob('**/__init__.py'): if constants_dir in initfile.parents: continue SortImports(str(initfile), multi_line_output=VERTICAL_HANGING_INDENT, not_skip=['__init__.py']) # Exclude __init__.py # Exclude generated constants/ python files for pyfile in astrodynamics_dir.glob('**/*.py'): if constants_dir in pyfile.parents and pyfile.stem != 'constant': continue SortImports(str(pyfile), multi_line_output=HANGING_GRID, skip=['__init__.py'], known_third_party=['six'])
Add 'six' to known_third_party for SortImports
Add 'six' to known_third_party for SortImports six was being sorted incorrectly due to being classed as first party.
Python
mit
python-astrodynamics/astrodynamics,python-astrodynamics/astrodynamics
python
## Code Before: from __future__ import absolute_import, division, print_function from pathlib import Path from isort import SortImports from shovel import task # isort multi_line_output modes GRID = 0 VERTICAL = 1 HANGING_INDENT = 2 VERTICAL_HANGING_INDENT = 3 HANGING_GRID = 4 HANGING_GRID_GROUPED = 5 @task def format_imports(): """Sort imports into a consistent style.""" astrodynamics_dir = Path('astrodynamics') constants_dir = astrodynamics_dir / 'constants' for initfile in astrodynamics_dir.glob('**/__init__.py'): if constants_dir in initfile.parents: continue SortImports(str(initfile), multi_line_output=VERTICAL_HANGING_INDENT, not_skip=['__init__.py']) # Exclude __init__.py # Exclude generated constants/ python files for pyfile in astrodynamics_dir.glob('**/*.py'): if constants_dir in pyfile.parents and pyfile.stem != 'constant': continue SortImports(str(pyfile), multi_line_output=HANGING_GRID, skip=['__init__.py']) ## Instruction: Add 'six' to known_third_party for SortImports six was being sorted incorrectly due to being classed as first party. ## Code After: from __future__ import absolute_import, division, print_function from pathlib import Path from isort import SortImports from shovel import task # isort multi_line_output modes GRID = 0 VERTICAL = 1 HANGING_INDENT = 2 VERTICAL_HANGING_INDENT = 3 HANGING_GRID = 4 HANGING_GRID_GROUPED = 5 @task def format_imports(): """Sort imports into a consistent style.""" astrodynamics_dir = Path('astrodynamics') constants_dir = astrodynamics_dir / 'constants' for initfile in astrodynamics_dir.glob('**/__init__.py'): if constants_dir in initfile.parents: continue SortImports(str(initfile), multi_line_output=VERTICAL_HANGING_INDENT, not_skip=['__init__.py']) # Exclude __init__.py # Exclude generated constants/ python files for pyfile in astrodynamics_dir.glob('**/*.py'): if constants_dir in pyfile.parents and pyfile.stem != 'constant': continue SortImports(str(pyfile), multi_line_output=HANGING_GRID, skip=['__init__.py'], known_third_party=['six'])
5ea93cec78fa870d9890dd860e1ce47b76eb274c
spec/features/user_creates_species_variation_observation_spec.rb
spec/features/user_creates_species_variation_observation_spec.rb
require 'rails_helper' RSpec.feature 'User creates a species variation observation' do let(:user) { create(:user) } before do create(:plot) sign_in(user) visit new_species_variation_observation_path end scenario 'with valid attributes' do fill_in('Observation date', with: '09/11/2001') select('Plot #1', from: 'Plot') select('Plant Example', from: 'Plant') fill_in('Average height', with: '10.0') fill_in('Average width', with: '10.0') fill_in('Qualitative observations', with: 'Fake description') click_on('Create Species variation observation') expect(page).to have_content('Species variation observation was successfully created') end scenario 'with invalid sample attributes' do click_on('Create Species variation observation') expect(page).to have_content('The form contains 5 errors') end end
require 'rails_helper' RSpec.feature 'User creates a species variation observation' do let(:user) { create(:user) } before do create(:plot) sign_in(user) visit new_species_variation_observation_path end scenario 'with valid attributes' do fill_in('Observation date', with: '09/11/2001') select('Plot #1', from: 'Plot') select('Plant Example', from: 'Plant') fill_in('Average height', with: '10.0') fill_in('Average width', with: '10.0') fill_in('Qualitative observations', with: 'Fake description') click_on('Create Obs.') expect(page).to have_content('Species variation observation was successfully created') end scenario 'with invalid sample attributes' do click_on('Create Obs.') expect(page).to have_content('The form contains 5 errors') end end
Update test to match UI
Update test to match UI
Ruby
mit
osu-cascades/ecotone-web,osu-cascades/ecotone-web,osu-cascades/ecotone-web,osu-cascades/ecotone-web
ruby
## Code Before: require 'rails_helper' RSpec.feature 'User creates a species variation observation' do let(:user) { create(:user) } before do create(:plot) sign_in(user) visit new_species_variation_observation_path end scenario 'with valid attributes' do fill_in('Observation date', with: '09/11/2001') select('Plot #1', from: 'Plot') select('Plant Example', from: 'Plant') fill_in('Average height', with: '10.0') fill_in('Average width', with: '10.0') fill_in('Qualitative observations', with: 'Fake description') click_on('Create Species variation observation') expect(page).to have_content('Species variation observation was successfully created') end scenario 'with invalid sample attributes' do click_on('Create Species variation observation') expect(page).to have_content('The form contains 5 errors') end end ## Instruction: Update test to match UI ## Code After: require 'rails_helper' RSpec.feature 'User creates a species variation observation' do let(:user) { create(:user) } before do create(:plot) sign_in(user) visit new_species_variation_observation_path end scenario 'with valid attributes' do fill_in('Observation date', with: '09/11/2001') select('Plot #1', from: 'Plot') select('Plant Example', from: 'Plant') fill_in('Average height', with: '10.0') fill_in('Average width', with: '10.0') fill_in('Qualitative observations', with: 'Fake description') click_on('Create Obs.') expect(page).to have_content('Species variation observation was successfully created') end scenario 'with invalid sample attributes' do click_on('Create Obs.') expect(page).to have_content('The form contains 5 errors') end end
2c140d48019e52bc03c4984d4b34c9df369501bf
packages/ff/fficxx.yaml
packages/ff/fficxx.yaml
homepage: '' changelog-type: '' hash: 8aadf5449302ecaca2772f5dfa1e8aedf72fe7a4760fec89a6ed6c17d440f666 test-bench-deps: {} maintainer: Ian-Woo Kim <[email protected]> synopsis: automatic C++ binding generation changelog: '' basic-deps: either: -any bytestring: -any split: -any Cabal: -any base: ==4.* unordered-containers: -any text: -any filepath: ! '>1' process: -any pureMD5: -any data-default: -any containers: -any haskell-src-exts: -any lens: ! '>3' mtl: ! '>2' hashable: -any template: -any transformers: ! '>=0.3' errors: -any template-haskell: -any directory: -any all-versions: - '0.1' - '0.1.0' - '0.2' - '0.2.1' - '0.3' - '0.3.1' author: Ian-Woo Kim latest: '0.3.1' description-type: haddock description: automatic C++ binding generation license-name: BSD3
homepage: '' changelog-type: '' hash: eb8f534a4b793998f391dce6705b08edb8d91f11e18f1dbff7045c41edec6456 test-bench-deps: {} maintainer: Ian-Woo Kim <[email protected]> synopsis: automatic C++ binding generation changelog: '' basic-deps: either: -any bytestring: -any split: -any Cabal: -any base: ==4.* unordered-containers: -any text: -any filepath: ! '>1' process: -any pureMD5: -any data-default: -any containers: -any haskell-src-exts: ! '>=1.18' lens: ! '>3' mtl: ! '>2' hashable: -any template: -any transformers: ! '>=0.3' errors: -any template-haskell: -any directory: -any all-versions: - '0.1' - '0.1.0' - '0.2' - '0.2.1' - '0.3' - '0.3.1' - '0.4' author: Ian-Woo Kim latest: '0.4' description-type: haddock description: automatic C++ binding generation license-name: BSD3
Update from Hackage at 2018-06-24T19:20:56Z
Update from Hackage at 2018-06-24T19:20:56Z
YAML
mit
commercialhaskell/all-cabal-metadata
yaml
## Code Before: homepage: '' changelog-type: '' hash: 8aadf5449302ecaca2772f5dfa1e8aedf72fe7a4760fec89a6ed6c17d440f666 test-bench-deps: {} maintainer: Ian-Woo Kim <[email protected]> synopsis: automatic C++ binding generation changelog: '' basic-deps: either: -any bytestring: -any split: -any Cabal: -any base: ==4.* unordered-containers: -any text: -any filepath: ! '>1' process: -any pureMD5: -any data-default: -any containers: -any haskell-src-exts: -any lens: ! '>3' mtl: ! '>2' hashable: -any template: -any transformers: ! '>=0.3' errors: -any template-haskell: -any directory: -any all-versions: - '0.1' - '0.1.0' - '0.2' - '0.2.1' - '0.3' - '0.3.1' author: Ian-Woo Kim latest: '0.3.1' description-type: haddock description: automatic C++ binding generation license-name: BSD3 ## Instruction: Update from Hackage at 2018-06-24T19:20:56Z ## Code After: homepage: '' changelog-type: '' hash: eb8f534a4b793998f391dce6705b08edb8d91f11e18f1dbff7045c41edec6456 test-bench-deps: {} maintainer: Ian-Woo Kim <[email protected]> synopsis: automatic C++ binding generation changelog: '' basic-deps: either: -any bytestring: -any split: -any Cabal: -any base: ==4.* unordered-containers: -any text: -any filepath: ! '>1' process: -any pureMD5: -any data-default: -any containers: -any haskell-src-exts: ! '>=1.18' lens: ! '>3' mtl: ! '>2' hashable: -any template: -any transformers: ! '>=0.3' errors: -any template-haskell: -any directory: -any all-versions: - '0.1' - '0.1.0' - '0.2' - '0.2.1' - '0.3' - '0.3.1' - '0.4' author: Ian-Woo Kim latest: '0.4' description-type: haddock description: automatic C++ binding generation license-name: BSD3
835b8adfb610cdac0233840497f3a1cf9860f946
cerebro/tests/core/test_usecases.py
cerebro/tests/core/test_usecases.py
import unittest import cerebro.core.entities as en import cerebro.core.usecases as uc class TestUseCases(unittest.TestCase): def setUp(self): self.neurons_path = ["./cerebro/neurons"] self.neuron_test = ("system check") self.neuron_test_response = "All working properly." self.command_args = ("arg1", "arg2") self.test_command = en.Command(self.neuron_test, self.command_args) self.error_test = ("asd asdasd ") self.error_test_response = "Sorry, I could not process that." self.error_command = en.Command(self.error_test, self.command_args) self.total_neurons = 2 uc.get_all_neurons(self.neurons_path) def test_get_all_neurons(self): assert len(uc.NEURONS) == self.total_neurons def test_neuron_execution(self): assert uc.NEURONS[self.neuron_test]() == self.neuron_test_response def test_command_execution(self): response = uc.process_command(self.test_command) assert response == self.neuron_test_response def test_command_execution_faliure(self): response = uc.process_command(self.error_command) assert response == self.error_test_response
import unittest import cerebro.core.entities as en import cerebro.core.usecases as uc class TestUseCases(unittest.TestCase): def setUp(self): self.neurons_path = ["./cerebro/neurons"] self.neuron_test = ("system check") self.neuron_test_response = "All working properly." self.command_args = ("arg1", "arg2") self.test_command = en.Command(self.neuron_test, self.command_args) self.total_neurons = 2 uc.get_all_neurons(self.neurons_path) def test_get_all_neurons(self): assert len(uc.NEURONS) == self.total_neurons def test_neuron_execution(self): assert uc.NEURONS[self.neuron_test]() == self.neuron_test_response def test_command_execution(self): response = uc.process_command(self.test_command) assert response == self.neuron_test_response def test_command_execution_faliure(self): error_test = ("asd asdasd ") error_test_response = "Sorry, I could not process that." error_command = en.Command(error_test, self.command_args) response = uc.process_command(error_command) assert response == error_test_response
Test cases changed and minor optimization
Test cases changed and minor optimization
Python
mit
Le-Bot/cerebro
python
## Code Before: import unittest import cerebro.core.entities as en import cerebro.core.usecases as uc class TestUseCases(unittest.TestCase): def setUp(self): self.neurons_path = ["./cerebro/neurons"] self.neuron_test = ("system check") self.neuron_test_response = "All working properly." self.command_args = ("arg1", "arg2") self.test_command = en.Command(self.neuron_test, self.command_args) self.error_test = ("asd asdasd ") self.error_test_response = "Sorry, I could not process that." self.error_command = en.Command(self.error_test, self.command_args) self.total_neurons = 2 uc.get_all_neurons(self.neurons_path) def test_get_all_neurons(self): assert len(uc.NEURONS) == self.total_neurons def test_neuron_execution(self): assert uc.NEURONS[self.neuron_test]() == self.neuron_test_response def test_command_execution(self): response = uc.process_command(self.test_command) assert response == self.neuron_test_response def test_command_execution_faliure(self): response = uc.process_command(self.error_command) assert response == self.error_test_response ## Instruction: Test cases changed and minor optimization ## Code After: import unittest import cerebro.core.entities as en import cerebro.core.usecases as uc class TestUseCases(unittest.TestCase): def setUp(self): self.neurons_path = ["./cerebro/neurons"] self.neuron_test = ("system check") self.neuron_test_response = "All working properly." self.command_args = ("arg1", "arg2") self.test_command = en.Command(self.neuron_test, self.command_args) self.total_neurons = 2 uc.get_all_neurons(self.neurons_path) def test_get_all_neurons(self): assert len(uc.NEURONS) == self.total_neurons def test_neuron_execution(self): assert uc.NEURONS[self.neuron_test]() == self.neuron_test_response def test_command_execution(self): response = uc.process_command(self.test_command) assert response == self.neuron_test_response def test_command_execution_faliure(self): error_test = ("asd asdasd ") error_test_response = "Sorry, I could not process that." error_command = en.Command(error_test, self.command_args) response = uc.process_command(error_command) assert response == error_test_response
93348dd720c8386e94a876554d594a9a06be761f
rust/test/publisher/wpublisher_tests.rs
rust/test/publisher/wpublisher_tests.rs
use wpublisher as TheModule; mod inc;
use wpublisher as TheModule; #[ cfg( feature = "use_std" ) ] mod inc;
Fix tests of utility `wpublisher`, setup tests under features
Fix tests of utility `wpublisher`, setup tests under features
Rust
mit
Wandalen/wTools,Wandalen/wTools
rust
## Code Before: use wpublisher as TheModule; mod inc; ## Instruction: Fix tests of utility `wpublisher`, setup tests under features ## Code After: use wpublisher as TheModule; #[ cfg( feature = "use_std" ) ] mod inc;
616205d5cb608689dbe1848e385324f31214c17e
README.md
README.md
Run `brew upgrade ruby` to upgrade ruby on Mac M1. Run `gem install jekyll; gem install jekyll-paginate` to install jekyll dependencies Run `jekyll s` to view in local host: http://127.0.0.1:4000/
Run `gem install jekyll; gem install jekyll-paginate` to install jekyll dependencies Run `jekyll s` to view in local host: http://127.0.0.1:4000/ # Fix for issue with Ruby and Jekyll on Mac M1 Run `brew upgrade ruby` to upgrade ruby on Mac M1. If you need to have ruby first in your PATH, run: ``` echo 'export PATH="/opt/homebrew/opt/ruby/bin:$PATH"' >> ~/.zshrc ``` For compilers to find ruby you may need to set: ``` export LDFLAGS="-L/opt/homebrew/opt/ruby/lib" export CPPFLAGS="-I/opt/homebrew/opt/ruby/include" ``` For pkg-config to find ruby you may need to set: ``` export PKG_CONFIG_PATH="/opt/homebrew/opt/ruby/lib/pkgconfig" ``` Run `gem install webrick` if seeing `require': cannot load such file -- webrick (LoadError)`. If an old version of Ruby is used even though `which ruby` and `ruby -v` seem correct. Use the specific binary `/opt/homebrew/lib/ruby/gems/3.0.0/bin/jekyll s` instead.
Fix for Ruby with Apple M1
Fix for Ruby with Apple M1 Signed-off-by: Yuan Tang <[email protected]>
Markdown
apache-2.0
terrytangyuan/terrytangyuan.github.com,terrytangyuan/terrytangyuan.github.com
markdown
## Code Before: Run `brew upgrade ruby` to upgrade ruby on Mac M1. Run `gem install jekyll; gem install jekyll-paginate` to install jekyll dependencies Run `jekyll s` to view in local host: http://127.0.0.1:4000/ ## Instruction: Fix for Ruby with Apple M1 Signed-off-by: Yuan Tang <[email protected]> ## Code After: Run `gem install jekyll; gem install jekyll-paginate` to install jekyll dependencies Run `jekyll s` to view in local host: http://127.0.0.1:4000/ # Fix for issue with Ruby and Jekyll on Mac M1 Run `brew upgrade ruby` to upgrade ruby on Mac M1. If you need to have ruby first in your PATH, run: ``` echo 'export PATH="/opt/homebrew/opt/ruby/bin:$PATH"' >> ~/.zshrc ``` For compilers to find ruby you may need to set: ``` export LDFLAGS="-L/opt/homebrew/opt/ruby/lib" export CPPFLAGS="-I/opt/homebrew/opt/ruby/include" ``` For pkg-config to find ruby you may need to set: ``` export PKG_CONFIG_PATH="/opt/homebrew/opt/ruby/lib/pkgconfig" ``` Run `gem install webrick` if seeing `require': cannot load such file -- webrick (LoadError)`. If an old version of Ruby is used even though `which ruby` and `ruby -v` seem correct. Use the specific binary `/opt/homebrew/lib/ruby/gems/3.0.0/bin/jekyll s` instead.
d03467971de5a3a89e7583fcbb485ff6454adbf0
diesel/src/pg/mod.rs
diesel/src/pg/mod.rs
//! Provides types and functions related to working with PostgreSQL //! //! Much of this module is re-exported from database agnostic locations. //! However, if you are writing code specifically to extend Diesel on //! PostgreSQL, you may need to work with this module directly. pub mod expression; pub mod types; #[doc(hidden)] #[cfg(feature = "with-deprecated")] #[deprecated(since = "2.0.0", note = "Use `diesel::upsert` instead")] pub use crate::upsert; mod backend; mod connection; mod metadata_lookup; pub(crate) mod query_builder; pub(crate) mod serialize; mod transaction; mod value; pub use self::backend::{Pg, PgTypeMetadata}; pub use self::connection::PgConnection; pub use self::metadata_lookup::PgMetadataLookup; pub use self::query_builder::DistinctOnClause; pub use self::query_builder::PgQueryBuilder; pub use self::transaction::TransactionBuilder; pub use self::value::PgValue; /// Data structures for PG types which have no corresponding Rust type /// /// Most of these types are used to implement `ToSql` and `FromSql` for higher /// level types. pub mod data_types { #[doc(inline)] pub use super::types::date_and_time::{PgDate, PgInterval, PgTime, PgTimestamp}; #[doc(inline)] pub use super::types::floats::PgNumeric; #[doc(inline)] pub use super::types::money::PgMoney; pub use super::types::money::PgMoney as Cents; }
//! Provides types and functions related to working with PostgreSQL //! //! Much of this module is re-exported from database agnostic locations. //! However, if you are writing code specifically to extend Diesel on //! PostgreSQL, you may need to work with this module directly. pub mod expression; pub mod types; mod backend; mod connection; mod metadata_lookup; pub(crate) mod query_builder; pub(crate) mod serialize; mod transaction; mod value; pub use self::backend::{Pg, PgTypeMetadata}; pub use self::connection::PgConnection; pub use self::metadata_lookup::PgMetadataLookup; pub use self::query_builder::DistinctOnClause; pub use self::query_builder::PgQueryBuilder; pub use self::transaction::TransactionBuilder; pub use self::value::PgValue; #[doc(hidden)] #[cfg(feature = "with-deprecated")] #[deprecated(since = "2.0.0", note = "Use `diesel::upsert` instead")] pub use crate::upsert; /// Data structures for PG types which have no corresponding Rust type /// /// Most of these types are used to implement `ToSql` and `FromSql` for higher /// level types. pub mod data_types { #[doc(inline)] pub use super::types::date_and_time::{PgDate, PgInterval, PgTime, PgTimestamp}; #[doc(inline)] pub use super::types::floats::PgNumeric; #[doc(inline)] pub use super::types::money::PgMoney; pub use super::types::money::PgMoney as Cents; }
Move reexport to the other reexports
Move reexport to the other reexports
Rust
apache-2.0
diesel-rs/diesel,sgrif/diesel,diesel-rs/diesel,sgrif/diesel
rust
## Code Before: //! Provides types and functions related to working with PostgreSQL //! //! Much of this module is re-exported from database agnostic locations. //! However, if you are writing code specifically to extend Diesel on //! PostgreSQL, you may need to work with this module directly. pub mod expression; pub mod types; #[doc(hidden)] #[cfg(feature = "with-deprecated")] #[deprecated(since = "2.0.0", note = "Use `diesel::upsert` instead")] pub use crate::upsert; mod backend; mod connection; mod metadata_lookup; pub(crate) mod query_builder; pub(crate) mod serialize; mod transaction; mod value; pub use self::backend::{Pg, PgTypeMetadata}; pub use self::connection::PgConnection; pub use self::metadata_lookup::PgMetadataLookup; pub use self::query_builder::DistinctOnClause; pub use self::query_builder::PgQueryBuilder; pub use self::transaction::TransactionBuilder; pub use self::value::PgValue; /// Data structures for PG types which have no corresponding Rust type /// /// Most of these types are used to implement `ToSql` and `FromSql` for higher /// level types. pub mod data_types { #[doc(inline)] pub use super::types::date_and_time::{PgDate, PgInterval, PgTime, PgTimestamp}; #[doc(inline)] pub use super::types::floats::PgNumeric; #[doc(inline)] pub use super::types::money::PgMoney; pub use super::types::money::PgMoney as Cents; } ## Instruction: Move reexport to the other reexports ## Code After: //! Provides types and functions related to working with PostgreSQL //! //! Much of this module is re-exported from database agnostic locations. //! However, if you are writing code specifically to extend Diesel on //! PostgreSQL, you may need to work with this module directly. pub mod expression; pub mod types; mod backend; mod connection; mod metadata_lookup; pub(crate) mod query_builder; pub(crate) mod serialize; mod transaction; mod value; pub use self::backend::{Pg, PgTypeMetadata}; pub use self::connection::PgConnection; pub use self::metadata_lookup::PgMetadataLookup; pub use self::query_builder::DistinctOnClause; pub use self::query_builder::PgQueryBuilder; pub use self::transaction::TransactionBuilder; pub use self::value::PgValue; #[doc(hidden)] #[cfg(feature = "with-deprecated")] #[deprecated(since = "2.0.0", note = "Use `diesel::upsert` instead")] pub use crate::upsert; /// Data structures for PG types which have no corresponding Rust type /// /// Most of these types are used to implement `ToSql` and `FromSql` for higher /// level types. pub mod data_types { #[doc(inline)] pub use super::types::date_and_time::{PgDate, PgInterval, PgTime, PgTimestamp}; #[doc(inline)] pub use super::types::floats::PgNumeric; #[doc(inline)] pub use super::types::money::PgMoney; pub use super::types::money::PgMoney as Cents; }
44e977948e0f69d3c055a2455002d0d4906a156b
README.md
README.md
An implementation of a mach-o parser using nom This implementation essentially ignores the existance of 32bit mach objects.
An implementation of a mach-o parser using nom This implementation essentially ignores the existance of 32bit mach objects. As yet it's extremely incomplete, however if you're on OSX you can try: cargo run --example=reader `which cat` To emit the data structure resulting from parsing your `cat` binary
Add a tiny note to readme
Add a tiny note to readme
Markdown
mit
richo/macho-rs,richo/macho-rs
markdown
## Code Before: An implementation of a mach-o parser using nom This implementation essentially ignores the existance of 32bit mach objects. ## Instruction: Add a tiny note to readme ## Code After: An implementation of a mach-o parser using nom This implementation essentially ignores the existance of 32bit mach objects. As yet it's extremely incomplete, however if you're on OSX you can try: cargo run --example=reader `which cat` To emit the data structure resulting from parsing your `cat` binary
01ab23ce67ebd73a11fc3aa0724405e4c27abd57
deployHelp.ps1
deployHelp.ps1
$deployToolsDir = Split-Path ((Get-Variable MyInvocation -Scope 0).Value.MyCommand.Path) if (Test-Path variable:\OctopusParameters) { foreach($kp in $OctopusParameters.GetEnumerator()) { Set-Content ("env:\" + $kp.Key) ($kp.Value) -Force } } if(!(Test-Path "env:\ConfigOnly")) { Set-Content "env:\ConfigOnly" $false } function ensconce { if (@($input).Count -ne 0) { $input.Reset() $results = $input | & "$deployToolsDir\ensconce.exe" $args 2>&1 } else { $results = & "$deployToolsDir\ensconce.exe" $args 2>&1 } if ($LASTEXITCODE -ne 0) { if (Test-Path env:\TEAMCITY_VERSION) { Write-Host "##teamcity[buildStatus status='FAILURE' text='{build.status.text}; Ensconce failure']" $results exit $LASTEXITCODE } else { throw ( "Ensconce operation failed. $results") } } $results } function EnsurePath([string]$name) { $path = $name | ensconce -i if ((Test-Path $path) -eq $False) { md $path } } function is64bit() { return ([IntPtr]::Size -eq 8) }
$deployToolsDir = Split-Path ((Get-Variable MyInvocation -Scope 0).Value.MyCommand.Path) if (Test-Path variable:\OctopusParameters) { foreach($kp in $OctopusParameters.GetEnumerator()) { Set-Content ("env:\" + $kp.Key) ($kp.Value) -Force } } if(!(Test-Path "env:\ConfigOnly")) { Set-Content "env:\ConfigOnly" $false } function ensconce { if (@($input).Count -ne 0) { $input.Reset() $results = $input | & "$deployToolsDir\ensconce.exe" $args 2>&1 } else { $results = & "$deployToolsDir\ensconce.exe" $args 2>&1 } if ($LASTEXITCODE -ne 0) { if (Test-Path env:\TEAMCITY_VERSION) { Write-Host "##teamcity[buildStatus status='FAILURE' text='{build.status.text}; Ensconce failure']" $results exit $LASTEXITCODE } else { throw ( "Ensconce operation failed. $results") } } $results } function EnsurePath([string]$name) { $path = $name | ensconce -i if ((Test-Path $path) -eq $False) { md $path } } function is64bit() { return ( (Get-WmiObject Win32_OperatingSystem).OSArchitecture -eq "64-bit") }
Update of is64bit so it's not dependent on version of Powershell being used
Update of is64bit so it's not dependent on version of Powershell being used
PowerShell
mit
harrisonmeister/Ensconce,ahmed-bakhdher-15below/Ensconce,richard-green/Ensconce,15below/Ensconce,BlythMeister/Ensconce,robert-berry-15below/Ensconce,Yewridge/Ensconce,harrisonmeister/Ensconce,octoberclub/Ensconce,BlythMeister/Ensconce,15below/Ensconce,ahmed-bakhdher-15below/Ensconce,Yewridge/Ensconce,mavnn/Ensconce,octoberclub/Ensconce,mavnn/Ensconce,robert-berry-15below/Ensconce
powershell
## Code Before: $deployToolsDir = Split-Path ((Get-Variable MyInvocation -Scope 0).Value.MyCommand.Path) if (Test-Path variable:\OctopusParameters) { foreach($kp in $OctopusParameters.GetEnumerator()) { Set-Content ("env:\" + $kp.Key) ($kp.Value) -Force } } if(!(Test-Path "env:\ConfigOnly")) { Set-Content "env:\ConfigOnly" $false } function ensconce { if (@($input).Count -ne 0) { $input.Reset() $results = $input | & "$deployToolsDir\ensconce.exe" $args 2>&1 } else { $results = & "$deployToolsDir\ensconce.exe" $args 2>&1 } if ($LASTEXITCODE -ne 0) { if (Test-Path env:\TEAMCITY_VERSION) { Write-Host "##teamcity[buildStatus status='FAILURE' text='{build.status.text}; Ensconce failure']" $results exit $LASTEXITCODE } else { throw ( "Ensconce operation failed. $results") } } $results } function EnsurePath([string]$name) { $path = $name | ensconce -i if ((Test-Path $path) -eq $False) { md $path } } function is64bit() { return ([IntPtr]::Size -eq 8) } ## Instruction: Update of is64bit so it's not dependent on version of Powershell being used ## Code After: $deployToolsDir = Split-Path ((Get-Variable MyInvocation -Scope 0).Value.MyCommand.Path) if (Test-Path variable:\OctopusParameters) { foreach($kp in $OctopusParameters.GetEnumerator()) { Set-Content ("env:\" + $kp.Key) ($kp.Value) -Force } } if(!(Test-Path "env:\ConfigOnly")) { Set-Content "env:\ConfigOnly" $false } function ensconce { if (@($input).Count -ne 0) { $input.Reset() $results = $input | & "$deployToolsDir\ensconce.exe" $args 2>&1 } else { $results = & "$deployToolsDir\ensconce.exe" $args 2>&1 } if ($LASTEXITCODE -ne 0) { if (Test-Path env:\TEAMCITY_VERSION) { Write-Host "##teamcity[buildStatus status='FAILURE' text='{build.status.text}; Ensconce failure']" $results exit $LASTEXITCODE } else { throw ( "Ensconce operation failed. $results") } } $results } function EnsurePath([string]$name) { $path = $name | ensconce -i if ((Test-Path $path) -eq $False) { md $path } } function is64bit() { return ( (Get-WmiObject Win32_OperatingSystem).OSArchitecture -eq "64-bit") }
26c03b575dad5dbe4720b147279bdadde24f0748
packages/postcss-merge-longhand/src/index.js
packages/postcss-merge-longhand/src/index.js
import postcss from 'postcss'; import margin from './lib/decl/margin'; import padding from './lib/decl/padding'; import borders from './lib/decl/borders'; import columns from './lib/decl/columns'; const processors = [ margin, padding, borders, columns, ]; export default postcss.plugin('postcss-merge-longhand', () => { return css => { let abort = false; css.walkRules(rule => { processors.forEach(p => { const res = p.explode(rule); if (res === false) { abort = true; } }); if (abort) { return; } processors.slice().reverse().forEach(p => p.merge(rule)); }); }; });
import postcss from 'postcss'; import margin from './lib/decl/margin'; import padding from './lib/decl/padding'; import borders from './lib/decl/borders'; import columns from './lib/decl/columns'; const processors = [ margin, padding, borders, columns, ]; export default postcss.plugin('postcss-merge-longhand', () => { return css => { css.walkRules(rule => { let abort = false; processors.forEach(p => { const res = p.explode(rule); if (typeof res === 'boolean') { abort = true; } }); if (abort) { return; } processors.slice().reverse().forEach(p => p.merge(rule)); }); }; });
Resolve issue with running plugin on multiple rules.
Resolve issue with running plugin on multiple rules.
JavaScript
mit
ben-eb/cssnano
javascript
## Code Before: import postcss from 'postcss'; import margin from './lib/decl/margin'; import padding from './lib/decl/padding'; import borders from './lib/decl/borders'; import columns from './lib/decl/columns'; const processors = [ margin, padding, borders, columns, ]; export default postcss.plugin('postcss-merge-longhand', () => { return css => { let abort = false; css.walkRules(rule => { processors.forEach(p => { const res = p.explode(rule); if (res === false) { abort = true; } }); if (abort) { return; } processors.slice().reverse().forEach(p => p.merge(rule)); }); }; }); ## Instruction: Resolve issue with running plugin on multiple rules. ## Code After: import postcss from 'postcss'; import margin from './lib/decl/margin'; import padding from './lib/decl/padding'; import borders from './lib/decl/borders'; import columns from './lib/decl/columns'; const processors = [ margin, padding, borders, columns, ]; export default postcss.plugin('postcss-merge-longhand', () => { return css => { css.walkRules(rule => { let abort = false; processors.forEach(p => { const res = p.explode(rule); if (typeof res === 'boolean') { abort = true; } }); if (abort) { return; } processors.slice().reverse().forEach(p => p.merge(rule)); }); }; });
f2b4ee16d23e379e8045b0bd2fb6e663de0bf083
test/sg_mailer_test.rb
test/sg_mailer_test.rb
require 'test_helper' class SGMailerTest < Minitest::Test def test_configuration_setups_a_client_with_api_key api_key = 'xxx' SGMailer.configure(api_key: api_key) assert_equal api_key, SGMailer.client.api_key end def test_configuration_for_testing_client SGMailer.configure(test_client: true) assert SGMailer.client.is_a?(SGMailer::TestClient) end end
require 'test_helper' class SGMailerTest < Minitest::Test def setup SGMailer.client = nil end def test_configuration_setups_a_client_with_api_key api_key = 'xxx' SGMailer.configure(api_key: api_key) assert_equal api_key, SGMailer.client.api_key end def test_configuration_for_testing_client SGMailer.configure(test_client: true) assert SGMailer.client.is_a?(SGMailer::TestClient) end def test_sending_without_configuration assert_raises SGMailer::ConfigurationError do SGMailer.send({}) end end end
Test out unconfigured mail sending
Test out unconfigured mail sending
Ruby
mit
gsamokovarov/sg_mailer,gsamokovarov/sg_mailer
ruby
## Code Before: require 'test_helper' class SGMailerTest < Minitest::Test def test_configuration_setups_a_client_with_api_key api_key = 'xxx' SGMailer.configure(api_key: api_key) assert_equal api_key, SGMailer.client.api_key end def test_configuration_for_testing_client SGMailer.configure(test_client: true) assert SGMailer.client.is_a?(SGMailer::TestClient) end end ## Instruction: Test out unconfigured mail sending ## Code After: require 'test_helper' class SGMailerTest < Minitest::Test def setup SGMailer.client = nil end def test_configuration_setups_a_client_with_api_key api_key = 'xxx' SGMailer.configure(api_key: api_key) assert_equal api_key, SGMailer.client.api_key end def test_configuration_for_testing_client SGMailer.configure(test_client: true) assert SGMailer.client.is_a?(SGMailer::TestClient) end def test_sending_without_configuration assert_raises SGMailer::ConfigurationError do SGMailer.send({}) end end end
250972162f264f6a6cf5f0a3f96b3cd9f0d2791b
README.md
README.md
The MIT License (MIT). Please see [License File](LICENSE.md) for more information. ## Installation 1. Install laravel using Composer (eg: to create a project named connexion: `laravel new connexion`) 2. Change to the project folder created and fix permissions on bootstrap and storage folders: ``` sudo chmod -R 777 storage sudo chmod -R 777 bootstrap ``` 3. Check the Laravel installation is running properly before proceeding. 4. Add the connexion package to composer.json by adding a repositories section and requiring the package as follows (note the minimum-stability setting is just until tymon/jwt-auth reaches v 1.0.0 - at the moment it is a RC: ``` "repositories": [ { "type": "git", "url": "https://github.com/bishopm/connexion" } ], "require": { ... "bishopm/connexion": "dev-master" }, "minimum-stability": "RC", ``` 5. Run *composer update* in the project folder, which will pull in the package and its dependencies 6. Add your database credentials to .env 7. In the project folder, run `php artisan connexion:install` follow the prompts to create your first admin user, and you're good to go!
The MIT License (MIT). Please see [License File](LICENSE.md) for more information. ## Installation 1. Install laravel using Composer (eg: to create a project named connexion: `laravel new connexion`) 2. Change to the project folder created and fix permissions on bootstrap and storage folders: ``` sudo chmod -R 777 storage sudo chmod -R 777 bootstrap ``` 3. Check the Laravel installation is running properly before proceeding. 4. Add the connexion package to composer.json by adding a repositories section and requiring the package as follows (note the minimum-stability setting is just until tymon/jwt-auth reaches v 1.0.0 - at the moment it is a RC: ``` "repositories": [ { "type": "git", "url": "https://github.com/bishopm/connexion" } ], "require": { ... "bishopm/connexion": "dev-master" }, "minimum-stability": "RC", ``` 5. Run *composer update* in the project folder, which will pull in the package and its dependencies 6. Add your database credentials to .env 7. Add Bishopm\Connexion\Providers\ConnexionServiceProvider::class at the bottom of the list of providers in config/app.php (We're not using Laravel's package auto-discovery at the moment because it creates problems with some of the package routes) 8. In the project folder, run `php artisan connexion:install` follow the prompts to create your first admin user, and you're good to go!
Include service provider details in Readme
Include service provider details in Readme
Markdown
mit
bishopm/connexion,bishopm/base,bishopm/base,bishopm/connexion,bishopm/base,bishopm/connexion
markdown
## Code Before: The MIT License (MIT). Please see [License File](LICENSE.md) for more information. ## Installation 1. Install laravel using Composer (eg: to create a project named connexion: `laravel new connexion`) 2. Change to the project folder created and fix permissions on bootstrap and storage folders: ``` sudo chmod -R 777 storage sudo chmod -R 777 bootstrap ``` 3. Check the Laravel installation is running properly before proceeding. 4. Add the connexion package to composer.json by adding a repositories section and requiring the package as follows (note the minimum-stability setting is just until tymon/jwt-auth reaches v 1.0.0 - at the moment it is a RC: ``` "repositories": [ { "type": "git", "url": "https://github.com/bishopm/connexion" } ], "require": { ... "bishopm/connexion": "dev-master" }, "minimum-stability": "RC", ``` 5. Run *composer update* in the project folder, which will pull in the package and its dependencies 6. Add your database credentials to .env 7. In the project folder, run `php artisan connexion:install` follow the prompts to create your first admin user, and you're good to go! ## Instruction: Include service provider details in Readme ## Code After: The MIT License (MIT). Please see [License File](LICENSE.md) for more information. ## Installation 1. Install laravel using Composer (eg: to create a project named connexion: `laravel new connexion`) 2. Change to the project folder created and fix permissions on bootstrap and storage folders: ``` sudo chmod -R 777 storage sudo chmod -R 777 bootstrap ``` 3. Check the Laravel installation is running properly before proceeding. 4. Add the connexion package to composer.json by adding a repositories section and requiring the package as follows (note the minimum-stability setting is just until tymon/jwt-auth reaches v 1.0.0 - at the moment it is a RC: ``` "repositories": [ { "type": "git", "url": "https://github.com/bishopm/connexion" } ], "require": { ... "bishopm/connexion": "dev-master" }, "minimum-stability": "RC", ``` 5. Run *composer update* in the project folder, which will pull in the package and its dependencies 6. Add your database credentials to .env 7. Add Bishopm\Connexion\Providers\ConnexionServiceProvider::class at the bottom of the list of providers in config/app.php (We're not using Laravel's package auto-discovery at the moment because it creates problems with some of the package routes) 8. In the project folder, run `php artisan connexion:install` follow the prompts to create your first admin user, and you're good to go!
d7d44c73ceb6f7a8f7fe1fbc009e28265ee2d86b
spec/support/custom_matchers/response_matchers.rb
spec/support/custom_matchers/response_matchers.rb
RSpec::Matchers.define :respond_with_redirect_to do |path| match do |actual| response = actual.call response.redirect? && response.location =~ /#{path}$/ end end
RSpec::Matchers.define :respond_with_status do |status| match do |block| block.call if Symbol === status if [:success, :missing, :redirect, :error].include?(status) response.send("#{status}?") else code = Rack::Utils::SYMBOL_TO_STATUS_CODE[status] code == response.response_code end else status == response.response_code end end failure_message_for_should do |actual| "expected a #{status} response, but response was #{response.status}" end description do "respond with status" end end class RespondWithRedirectMatcher def initialize(rspec, response, target_path, &target_path_block) @rspec = rspec @response = response @target_path = target_path @target_path_block = target_path_block end def matches?(block) block.call target_path = @target_path_block.try(:call) || @target_path @response.should @rspec.redirect_to(target_path) end def failure_message_for_should "expected a redirect to #{@target_path}" end def description "respond with redirect" end end define_method :respond_with_redirect_to do |*target_paths, &target_path_block| target_path = target_paths.first RespondWithRedirectMatcher.new(self, response, target_path, &target_path_block) end RSpec::Matchers.define :respond_with_template do |template_name| match do |block| block.call response.should render_template(template_name) true end end RSpec::Matchers.define :assign do |*vars| match do |block| block.call vars.all? { |var| assigns(var) } end end RSpec::Matchers.define :set_flash do |type| chain :to do |message| @message = message end match do |block| block.call flash[type].should_not be_nil (flash[type].match(@message)).should be_true if @message end failure_message_for_should do |actual| message = "Expected flash[#{type}] to " if @message message += "match '#{@message}', but was '#{flash[type]}'" else message += "be set, but it was not" end end end
Add custom controller matchers for status
Add custom controller matchers for status
Ruby
mit
orchardpie/nochat,orchardpie/nochat
ruby
## Code Before: RSpec::Matchers.define :respond_with_redirect_to do |path| match do |actual| response = actual.call response.redirect? && response.location =~ /#{path}$/ end end ## Instruction: Add custom controller matchers for status ## Code After: RSpec::Matchers.define :respond_with_status do |status| match do |block| block.call if Symbol === status if [:success, :missing, :redirect, :error].include?(status) response.send("#{status}?") else code = Rack::Utils::SYMBOL_TO_STATUS_CODE[status] code == response.response_code end else status == response.response_code end end failure_message_for_should do |actual| "expected a #{status} response, but response was #{response.status}" end description do "respond with status" end end class RespondWithRedirectMatcher def initialize(rspec, response, target_path, &target_path_block) @rspec = rspec @response = response @target_path = target_path @target_path_block = target_path_block end def matches?(block) block.call target_path = @target_path_block.try(:call) || @target_path @response.should @rspec.redirect_to(target_path) end def failure_message_for_should "expected a redirect to #{@target_path}" end def description "respond with redirect" end end define_method :respond_with_redirect_to do |*target_paths, &target_path_block| target_path = target_paths.first RespondWithRedirectMatcher.new(self, response, target_path, &target_path_block) end RSpec::Matchers.define :respond_with_template do |template_name| match do |block| block.call response.should render_template(template_name) true end end RSpec::Matchers.define :assign do |*vars| match do |block| block.call vars.all? { |var| assigns(var) } end end RSpec::Matchers.define :set_flash do |type| chain :to do |message| @message = message end match do |block| block.call flash[type].should_not be_nil (flash[type].match(@message)).should be_true if @message end failure_message_for_should do |actual| message = "Expected flash[#{type}] to " if @message message += "match '#{@message}', but was '#{flash[type]}'" else message += "be set, but it was not" end end end
7e5e82086fc83df07af4802716301ccf139dab47
scripts/MicrosoftFontInstaller.zsh
scripts/MicrosoftFontInstaller.zsh
echo "Attempting to grab Microsoft fonts..." if [ ! -f /usr/local/bin/cabextract || -f /usr/local/bin/mkfontsdir]; then echo "Dependencies missing! Please run \`\$PACKAGE_MANAGER install cabextract mkfontsdir curl\` and try again." exit fi set -e set -x mkdir temp cd temp curl -O http://download.microsoft.com/download/E/6/7/E675FFFC-2A6D-4AB0-B3EB-27C9F8C8F696/PowerPointViewer.exe cabextract -L -F ppviewer.cab PowerPointViewer.exe cabextract ppviewer.cab if [ ! -d ~/fonts ]; then mkdir ~/.fonts fi mv *.TTF ~/.fonts/ cd ~/.fonts mkfontsdir xset fp+ ~/.fonts xset fp rehash echo "Successfully installed Microsoft Fonts"
echo "Attempting to grab Microsoft fonts..." if [ ! -f /usr/local/bin/cabextract || -f /usr/local/bin/mkfontsdir]; then echo "Dependencies missing! Please run \`\$PACKAGE_MANAGER install cabextract mkfontsdir curl\` and try again." exit fi set -e set -x mkdir temp cd temp curl -O http://download.microsoft.com/download/E/6/7/E675FFFC-2A6D-4AB0-B3EB-27C9F8C8F696/PowerPointViewer.exe cabextract -L -F ppviewer.cab PowerPointViewer.exe cabextract ppviewer.cab if [ ! -d ~/fonts ]; then mkdir ~/.fonts fi mv *.TTF ~/.fonts/ cd ~/.fonts mkfontscale mkfontdir xset fp+ ~/.fonts xset fp rehash echo "Successfully installed Microsoft Fonts"
Fix mkfontdir typo in font installer
Fix mkfontdir typo in font installer
Shell
mit
gregstula/unix-config,gregstula-autoreleasepool/unix-config,gregstula-autoreleasepool/unix-config,gregstula/unix-config
shell
## Code Before: echo "Attempting to grab Microsoft fonts..." if [ ! -f /usr/local/bin/cabextract || -f /usr/local/bin/mkfontsdir]; then echo "Dependencies missing! Please run \`\$PACKAGE_MANAGER install cabextract mkfontsdir curl\` and try again." exit fi set -e set -x mkdir temp cd temp curl -O http://download.microsoft.com/download/E/6/7/E675FFFC-2A6D-4AB0-B3EB-27C9F8C8F696/PowerPointViewer.exe cabextract -L -F ppviewer.cab PowerPointViewer.exe cabextract ppviewer.cab if [ ! -d ~/fonts ]; then mkdir ~/.fonts fi mv *.TTF ~/.fonts/ cd ~/.fonts mkfontsdir xset fp+ ~/.fonts xset fp rehash echo "Successfully installed Microsoft Fonts" ## Instruction: Fix mkfontdir typo in font installer ## Code After: echo "Attempting to grab Microsoft fonts..." if [ ! -f /usr/local/bin/cabextract || -f /usr/local/bin/mkfontsdir]; then echo "Dependencies missing! Please run \`\$PACKAGE_MANAGER install cabextract mkfontsdir curl\` and try again." exit fi set -e set -x mkdir temp cd temp curl -O http://download.microsoft.com/download/E/6/7/E675FFFC-2A6D-4AB0-B3EB-27C9F8C8F696/PowerPointViewer.exe cabextract -L -F ppviewer.cab PowerPointViewer.exe cabextract ppviewer.cab if [ ! -d ~/fonts ]; then mkdir ~/.fonts fi mv *.TTF ~/.fonts/ cd ~/.fonts mkfontscale mkfontdir xset fp+ ~/.fonts xset fp rehash echo "Successfully installed Microsoft Fonts"
83042027fe74ffe200d0bdaa79b0529af54ae6dc
addons/website/__openerp__.py
addons/website/__openerp__.py
{ 'name': 'Website Builder', 'category': 'Website', 'sequence': 50, 'summary': 'Build Your Enterprise Website', 'website': 'https://www.odoo.com/page/website-builder', 'version': '1.0', 'description': """ Odoo Website CMS =================== """, 'depends': ['base_setup', 'web', 'web_editor', 'web_planner'], 'installable': True, 'data': [ 'data/website_data.xml', 'data/web_planner_data.xml', 'security/ir.model.access.csv', 'security/website_security.xml', 'views/website_templates.xml', 'views/website_navbar_templates.xml', 'views/snippets.xml', 'views/website_views.xml', 'views/res_config_views.xml', 'views/ir_actions_views.xml', 'wizard/base_language_install_views.xml', ], 'demo': [ 'data/website_demo.xml', ], 'qweb': ['static/src/xml/website.backend.xml'], 'application': True, }
{ 'name': 'Website Builder', 'category': 'Website', 'sequence': 50, 'summary': 'Build Your Enterprise Website', 'website': 'https://www.odoo.com/page/website-builder', 'version': '1.0', 'description': """ Odoo Website CMS =================== """, 'depends': ['web', 'web_editor', 'web_planner'], 'installable': True, 'data': [ 'data/website_data.xml', 'data/web_planner_data.xml', 'security/ir.model.access.csv', 'security/website_security.xml', 'views/website_templates.xml', 'views/website_navbar_templates.xml', 'views/snippets.xml', 'views/website_views.xml', 'views/res_config_views.xml', 'views/ir_actions_views.xml', 'wizard/base_language_install_views.xml', ], 'demo': [ 'data/website_demo.xml', ], 'qweb': ['static/src/xml/website.backend.xml'], 'application': True, }
Revert "[FIX] website: add missing module dependency `base_setup`"
Revert "[FIX] website: add missing module dependency `base_setup`" This reverts commit d269eb0eb62d88e02c4fa33b84178d0e73d82ef1. The issue has been fixed in 61f2c90d507645492e1904c1005e8da6253788ea.
Python
agpl-3.0
ygol/odoo,dfang/odoo,hip-odoo/odoo,hip-odoo/odoo,ygol/odoo,ygol/odoo,dfang/odoo,dfang/odoo,ygol/odoo,hip-odoo/odoo,hip-odoo/odoo,ygol/odoo,ygol/odoo,ygol/odoo,dfang/odoo,hip-odoo/odoo,hip-odoo/odoo,dfang/odoo,dfang/odoo
python
## Code Before: { 'name': 'Website Builder', 'category': 'Website', 'sequence': 50, 'summary': 'Build Your Enterprise Website', 'website': 'https://www.odoo.com/page/website-builder', 'version': '1.0', 'description': """ Odoo Website CMS =================== """, 'depends': ['base_setup', 'web', 'web_editor', 'web_planner'], 'installable': True, 'data': [ 'data/website_data.xml', 'data/web_planner_data.xml', 'security/ir.model.access.csv', 'security/website_security.xml', 'views/website_templates.xml', 'views/website_navbar_templates.xml', 'views/snippets.xml', 'views/website_views.xml', 'views/res_config_views.xml', 'views/ir_actions_views.xml', 'wizard/base_language_install_views.xml', ], 'demo': [ 'data/website_demo.xml', ], 'qweb': ['static/src/xml/website.backend.xml'], 'application': True, } ## Instruction: Revert "[FIX] website: add missing module dependency `base_setup`" This reverts commit d269eb0eb62d88e02c4fa33b84178d0e73d82ef1. The issue has been fixed in 61f2c90d507645492e1904c1005e8da6253788ea. ## Code After: { 'name': 'Website Builder', 'category': 'Website', 'sequence': 50, 'summary': 'Build Your Enterprise Website', 'website': 'https://www.odoo.com/page/website-builder', 'version': '1.0', 'description': """ Odoo Website CMS =================== """, 'depends': ['web', 'web_editor', 'web_planner'], 'installable': True, 'data': [ 'data/website_data.xml', 'data/web_planner_data.xml', 'security/ir.model.access.csv', 'security/website_security.xml', 'views/website_templates.xml', 'views/website_navbar_templates.xml', 'views/snippets.xml', 'views/website_views.xml', 'views/res_config_views.xml', 'views/ir_actions_views.xml', 'wizard/base_language_install_views.xml', ], 'demo': [ 'data/website_demo.xml', ], 'qweb': ['static/src/xml/website.backend.xml'], 'application': True, }
6c9d685019fdd85a8aeaeee0cbbb82472563aac6
src/portable.rs
src/portable.rs
use std::io; use std::path::Path; #[cfg(windows)] use crate::fs::_remove_dir_contents; #[cfg(not(windows))] use crate::unix::_remove_dir_contents; /// Deletes the contents of `dir_path`, but not the directory iteself. /// /// If `dir_path` is a symlink to a directory, deletes the contents /// of that directory. Fails if `dir_path` does not exist. pub fn remove_dir_contents<P: AsRef<Path>>(path: P) -> io::Result<()> { // This wrapper function exists because the core function // for Windows, in crate::fs, returns a PathBuf, which our // caller shouldn't see. _remove_dir_contents(path)?; Ok(()) }
use std::io; use std::path::Path; #[cfg(windows)] use crate::fs::_remove_dir_contents; #[cfg(not(windows))] use crate::unix::_remove_dir_contents; /// Deletes the contents of `dir_path`, but not the directory iteself. /// /// If `dir_path` is a symlink to a directory, deletes the contents /// of that directory. Fails if `dir_path` does not exist. pub fn remove_dir_contents<P: AsRef<Path>>(path: P) -> io::Result<()> { // This wrapper function exists because the core function // for Windows, in crate::fs, returns a PathBuf, which our // caller shouldn't see. _remove_dir_contents(path)?; Ok(()) } #[cfg(test)] mod test { use tempfile::TempDir; use crate::remove_dir_all; use crate::remove_dir_contents; use std::fs::{self, File}; use std::io; fn expect_failure<T>(k: io::ErrorKind, r: io::Result<T>) -> io::Result<()> { match r { Err(e) if e.kind() == k => Ok(()), Err(e) => Err(e), Ok(_) => Err(io::Error::new( io::ErrorKind::Other, "unexpected success".to_string(), )), } } #[test] fn mkdir_rm() -> Result<(), io::Error> { let tmp = TempDir::new()?; let ours = tmp.path().join("t.mkdir"); let file = ours.join("file"); fs::create_dir(&ours)?; File::create(&file)?; File::open(&file)?; expect_failure(io::ErrorKind::Other, remove_dir_contents(&file))?; remove_dir_contents(&ours)?; expect_failure(io::ErrorKind::NotFound, File::open(&file))?; remove_dir_contents(&ours)?; remove_dir_all(&ours)?; expect_failure(io::ErrorKind::NotFound, remove_dir_contents(&ours))?; Ok(()) } }
Add test case for remove_dir_contents
Add test case for remove_dir_contents We check a few error behaviours too. Signed-off-by: Ian Jackson <[email protected]>
Rust
apache-2.0
XAMPPRocky/remove_dir_all
rust
## Code Before: use std::io; use std::path::Path; #[cfg(windows)] use crate::fs::_remove_dir_contents; #[cfg(not(windows))] use crate::unix::_remove_dir_contents; /// Deletes the contents of `dir_path`, but not the directory iteself. /// /// If `dir_path` is a symlink to a directory, deletes the contents /// of that directory. Fails if `dir_path` does not exist. pub fn remove_dir_contents<P: AsRef<Path>>(path: P) -> io::Result<()> { // This wrapper function exists because the core function // for Windows, in crate::fs, returns a PathBuf, which our // caller shouldn't see. _remove_dir_contents(path)?; Ok(()) } ## Instruction: Add test case for remove_dir_contents We check a few error behaviours too. Signed-off-by: Ian Jackson <[email protected]> ## Code After: use std::io; use std::path::Path; #[cfg(windows)] use crate::fs::_remove_dir_contents; #[cfg(not(windows))] use crate::unix::_remove_dir_contents; /// Deletes the contents of `dir_path`, but not the directory iteself. /// /// If `dir_path` is a symlink to a directory, deletes the contents /// of that directory. Fails if `dir_path` does not exist. pub fn remove_dir_contents<P: AsRef<Path>>(path: P) -> io::Result<()> { // This wrapper function exists because the core function // for Windows, in crate::fs, returns a PathBuf, which our // caller shouldn't see. _remove_dir_contents(path)?; Ok(()) } #[cfg(test)] mod test { use tempfile::TempDir; use crate::remove_dir_all; use crate::remove_dir_contents; use std::fs::{self, File}; use std::io; fn expect_failure<T>(k: io::ErrorKind, r: io::Result<T>) -> io::Result<()> { match r { Err(e) if e.kind() == k => Ok(()), Err(e) => Err(e), Ok(_) => Err(io::Error::new( io::ErrorKind::Other, "unexpected success".to_string(), )), } } #[test] fn mkdir_rm() -> Result<(), io::Error> { let tmp = TempDir::new()?; let ours = tmp.path().join("t.mkdir"); let file = ours.join("file"); fs::create_dir(&ours)?; File::create(&file)?; File::open(&file)?; expect_failure(io::ErrorKind::Other, remove_dir_contents(&file))?; remove_dir_contents(&ours)?; expect_failure(io::ErrorKind::NotFound, File::open(&file))?; remove_dir_contents(&ours)?; remove_dir_all(&ours)?; expect_failure(io::ErrorKind::NotFound, remove_dir_contents(&ours))?; Ok(()) } }
022f2cc6d067769a6c8e56601c0238aac69ec9ab
jfr_playoff/settings.py
jfr_playoff/settings.py
import glob, json, os, readline, sys def complete_filename(text, state): return (glob.glob(text+'*')+[None])[state] class PlayoffSettings: def __init__(self): self.interactive = False self.settings_file = None if len(sys.argv) > 1: self.settings_file = sys.argv[1] else: self.interactive = True def load(self): if self.settings_file is None: readline.set_completer_delims(' \t\n;') readline.parse_and_bind("tab: complete") readline.set_completer(complete_filename) self.settings_file = raw_input('JSON settings file: ') self.settings = json.load(open(self.settings_file)) def has_section(self, key): self.load() return key in self.settings def get(self, *keys): self.load() section = self.settings for key in keys: section = section[key] return section
import glob, json, os, readline, sys def complete_filename(text, state): return (glob.glob(text+'*')+[None])[state] class PlayoffSettings: def __init__(self): self.settings = None self.interactive = False self.settings_file = None if len(sys.argv) > 1: self.settings_file = sys.argv[1] else: self.interactive = True def load(self): if self.settings_file is None: readline.set_completer_delims(' \t\n;') readline.parse_and_bind("tab: complete") readline.set_completer(complete_filename) self.settings_file = raw_input('JSON settings file: ') if self.settings is None: self.settings = json.load(open(self.settings_file)) def has_section(self, key): self.load() return key in self.settings def get(self, *keys): self.load() section = self.settings for key in keys: section = section[key] return section
Load config file only once
Load config file only once
Python
bsd-2-clause
emkael/jfrteamy-playoff,emkael/jfrteamy-playoff
python
## Code Before: import glob, json, os, readline, sys def complete_filename(text, state): return (glob.glob(text+'*')+[None])[state] class PlayoffSettings: def __init__(self): self.interactive = False self.settings_file = None if len(sys.argv) > 1: self.settings_file = sys.argv[1] else: self.interactive = True def load(self): if self.settings_file is None: readline.set_completer_delims(' \t\n;') readline.parse_and_bind("tab: complete") readline.set_completer(complete_filename) self.settings_file = raw_input('JSON settings file: ') self.settings = json.load(open(self.settings_file)) def has_section(self, key): self.load() return key in self.settings def get(self, *keys): self.load() section = self.settings for key in keys: section = section[key] return section ## Instruction: Load config file only once ## Code After: import glob, json, os, readline, sys def complete_filename(text, state): return (glob.glob(text+'*')+[None])[state] class PlayoffSettings: def __init__(self): self.settings = None self.interactive = False self.settings_file = None if len(sys.argv) > 1: self.settings_file = sys.argv[1] else: self.interactive = True def load(self): if self.settings_file is None: readline.set_completer_delims(' \t\n;') readline.parse_and_bind("tab: complete") readline.set_completer(complete_filename) self.settings_file = raw_input('JSON settings file: ') if self.settings is None: self.settings = json.load(open(self.settings_file)) def has_section(self, key): self.load() return key in self.settings def get(self, *keys): self.load() section = self.settings for key in keys: section = section[key] return section
ffb6faee2dbc27d04ad42e68693bb972108e65e1
source/assets/stylesheets/blog.css.scss
source/assets/stylesheets/blog.css.scss
@import "settings"; ul.index { list-style-type:none; margin-left: 0; li { display:block; padding:0.5rem 0; &:first-child { padding-top:0; } a { font-weight: bold; } } li:first-child {padding-top:0;} } .index-post { padding-bottom: 0rem; margin-bottom: 1rem; border: none; .comments { margin-top: 1rem; } } article { @include base-font; header { @include base-font; margin-bottom: 2.25rem; } h3,h4,h5 { margin-top: 3rem; margin-bottom: 1.111rem; } p { color: $mainColor; letter-spacing: 0.15px; line-height: 1.75; } p:last-child { margin-bottom: 0; } .icon-image { width: 10%; &.inline { width: 3%; } } } .profile { padding-bottom: 2rem; text-align: center; img { width: 30%; } .profile-name { font-weight: bold; text-transform: uppercase; } }
@import "settings"; ul.index { list-style-type:none; margin-left: 0; li { display:block; padding:0.5rem 0; &:first-child { padding-top:0; } a { font-weight: bold; } } li:first-child {padding-top:0;} } .index-post { padding-bottom: 0rem; margin-bottom: 1rem; border: none; .comments { margin-top: 1rem; } } article { @include base-font; header { @include base-font; margin-bottom: 2.25rem; } h3,h4,h5 { margin-top: 3rem; margin-bottom: 1.111rem; } &.index-post { h3,h4,h5 { margin-top: 1.111rem; } } p { color: $mainColor; letter-spacing: 0.15px; line-height: 1.75; } p:last-child { margin-bottom: 0; } .icon-image { width: 10%; &.inline { width: 3%; } } } .profile { padding-bottom: 2rem; text-align: center; img { width: 30%; } .profile-name { font-weight: bold; text-transform: uppercase; } }
Adjust blog index header margin
Adjust blog index header margin
SCSS
mit
rossta/rossta.github.com,rossta/rossta.github.com,rossta/rossta.github.com
scss
## Code Before: @import "settings"; ul.index { list-style-type:none; margin-left: 0; li { display:block; padding:0.5rem 0; &:first-child { padding-top:0; } a { font-weight: bold; } } li:first-child {padding-top:0;} } .index-post { padding-bottom: 0rem; margin-bottom: 1rem; border: none; .comments { margin-top: 1rem; } } article { @include base-font; header { @include base-font; margin-bottom: 2.25rem; } h3,h4,h5 { margin-top: 3rem; margin-bottom: 1.111rem; } p { color: $mainColor; letter-spacing: 0.15px; line-height: 1.75; } p:last-child { margin-bottom: 0; } .icon-image { width: 10%; &.inline { width: 3%; } } } .profile { padding-bottom: 2rem; text-align: center; img { width: 30%; } .profile-name { font-weight: bold; text-transform: uppercase; } } ## Instruction: Adjust blog index header margin ## Code After: @import "settings"; ul.index { list-style-type:none; margin-left: 0; li { display:block; padding:0.5rem 0; &:first-child { padding-top:0; } a { font-weight: bold; } } li:first-child {padding-top:0;} } .index-post { padding-bottom: 0rem; margin-bottom: 1rem; border: none; .comments { margin-top: 1rem; } } article { @include base-font; header { @include base-font; margin-bottom: 2.25rem; } h3,h4,h5 { margin-top: 3rem; margin-bottom: 1.111rem; } &.index-post { h3,h4,h5 { margin-top: 1.111rem; } } p { color: $mainColor; letter-spacing: 0.15px; line-height: 1.75; } p:last-child { margin-bottom: 0; } .icon-image { width: 10%; &.inline { width: 3%; } } } .profile { padding-bottom: 2rem; text-align: center; img { width: 30%; } .profile-name { font-weight: bold; text-transform: uppercase; } }
8bfc8b3af551ff1b934d75c006fb47baefa3f57a
.travis.yml
.travis.yml
language: java cache: directories: - $HOME/.gradle/caches/modules-2/ - $HOME/.gradle/wrapper/ - $HOME/.m2/ - plugin-maven/build/localMavenRepository/ before_cache: - rm -f $HOME/.gradle/caches/modules-2/modules-2.lock - rm -fr plugin-maven/build/localMavenRepository/com/diffplug/spotless/ env: - NODE_VERSION="6.10.2" before_install: - nvm install $NODE_VERSION install: true jdk: - openjdk8 - openjdk11 jobs: include: - stage: test jdk: openjdk8 script: ./gradlew build --build-cache && ./gradlew npmTest --build-cache - stage: test jdk: openjdk11 script: ./gradlew build --build-cache && ./gradlew npmTest --build-cache
language: java jdk: - openjdk8 - openjdk11 env: - NODE_VERSION="6.10.2" before_install: - nvm install $NODE_VERSION - bash -c "$(curl -fsSL https://raw.githubusercontent.com/ZacSweers/check-gradle-checksums/c8dc2ae0756a8041e240cdc6fa6c38c256dfeab0/check-gradle-checksums.sh)" install: true script: - ./gradlew build --build-cache && ./gradlew npmTest --build-cache before_cache: - rm -f $HOME/.gradle/caches/modules-2/modules-2.lock - rm -fr plugin-maven/build/localMavenRepository/com/diffplug/spotless/ cache: directories: - $HOME/.gradle/caches/modules-2/ - $HOME/.gradle/wrapper/ - $HOME/.m2/ - plugin-maven/build/localMavenRepository/
Fix extra tests in Travis.
Fix extra tests in Travis.
YAML
apache-2.0
diffplug/spotless,diffplug/spotless,diffplug/spotless,diffplug/spotless,diffplug/spotless,diffplug/spotless
yaml
## Code Before: language: java cache: directories: - $HOME/.gradle/caches/modules-2/ - $HOME/.gradle/wrapper/ - $HOME/.m2/ - plugin-maven/build/localMavenRepository/ before_cache: - rm -f $HOME/.gradle/caches/modules-2/modules-2.lock - rm -fr plugin-maven/build/localMavenRepository/com/diffplug/spotless/ env: - NODE_VERSION="6.10.2" before_install: - nvm install $NODE_VERSION install: true jdk: - openjdk8 - openjdk11 jobs: include: - stage: test jdk: openjdk8 script: ./gradlew build --build-cache && ./gradlew npmTest --build-cache - stage: test jdk: openjdk11 script: ./gradlew build --build-cache && ./gradlew npmTest --build-cache ## Instruction: Fix extra tests in Travis. ## Code After: language: java jdk: - openjdk8 - openjdk11 env: - NODE_VERSION="6.10.2" before_install: - nvm install $NODE_VERSION - bash -c "$(curl -fsSL https://raw.githubusercontent.com/ZacSweers/check-gradle-checksums/c8dc2ae0756a8041e240cdc6fa6c38c256dfeab0/check-gradle-checksums.sh)" install: true script: - ./gradlew build --build-cache && ./gradlew npmTest --build-cache before_cache: - rm -f $HOME/.gradle/caches/modules-2/modules-2.lock - rm -fr plugin-maven/build/localMavenRepository/com/diffplug/spotless/ cache: directories: - $HOME/.gradle/caches/modules-2/ - $HOME/.gradle/wrapper/ - $HOME/.m2/ - plugin-maven/build/localMavenRepository/
709d387db96b8c9891d68085ed958d4a002d0047
includes/Words.php
includes/Words.php
<?php namespace SpellingDictionary; class Words { public function addWord() { $dbw = Database::getConnection( DB_MASTER ); } }
<?php namespace SpellingDictionary; class Words { static function addWord( $formData ) { // $user = $this->getUser(); global $wgSpellingDictionaryDatabase; $dbw = wfGetDB( DB_MASTER, array(), $wgSpellingDictionaryDatabase ); $user = "ABC"; $values = array( 'sd_word' => $formData['word'], 'sd_language' => $formData['language'], 'sd_user' => $user, 'sd_timestamp' => $dbw->timestamp(), ); $dbw->insert( 'spell_dict_word_list', $values, __METHOD__ ); return true; // return 'Try again'; } }
Insert the word submitted into the table
Insert the word submitted into the table -Presently used a dummy variable as user, to be changed to the current session user
PHP
lgpl-2.1
ankitashukla/mediawiki-spelling-dictionary,wikimedia/mediawiki-extensions-SpellingDictionary,wikimedia/mediawiki-extensions-SpellingDictionary,ankitashukla/mediawiki-spelling-dictionary
php
## Code Before: <?php namespace SpellingDictionary; class Words { public function addWord() { $dbw = Database::getConnection( DB_MASTER ); } } ## Instruction: Insert the word submitted into the table -Presently used a dummy variable as user, to be changed to the current session user ## Code After: <?php namespace SpellingDictionary; class Words { static function addWord( $formData ) { // $user = $this->getUser(); global $wgSpellingDictionaryDatabase; $dbw = wfGetDB( DB_MASTER, array(), $wgSpellingDictionaryDatabase ); $user = "ABC"; $values = array( 'sd_word' => $formData['word'], 'sd_language' => $formData['language'], 'sd_user' => $user, 'sd_timestamp' => $dbw->timestamp(), ); $dbw->insert( 'spell_dict_word_list', $values, __METHOD__ ); return true; // return 'Try again'; } }
bb55b287101d82a70446552bb8fea7eb51a6837d
build/filter.sh
build/filter.sh
git clone https://github.com/geramirez/concourse-filter pushd concourse-filter go build exec &> >(./concourse-filter) popd
go install github.com/geramirez/concourse-filter export CREDENTIAL_FILTER_WHITELIST=`env | cut -d '=' -f 1 | grep -v '^_$' | xargs echo | tr ' ' ','` exec &> >($GOPATH/bin/concourse-filter)
Create initial Concourse Filter whitelist from docker bootup env vars
Create initial Concourse Filter whitelist from docker bootup env vars [#120319485] Signed-off-by: James Wen <[email protected]>
Shell
apache-2.0
cloudfoundry/buildpacks-ci,orange-cloudfoundry/buildpacks-ci,orange-cloudfoundry/buildpacks-ci,cloudfoundry/buildpacks-ci,cloudfoundry/buildpacks-ci,orange-cloudfoundry/buildpacks-ci,cloudfoundry/buildpacks-ci,cloudfoundry/buildpacks-ci
shell
## Code Before: git clone https://github.com/geramirez/concourse-filter pushd concourse-filter go build exec &> >(./concourse-filter) popd ## Instruction: Create initial Concourse Filter whitelist from docker bootup env vars [#120319485] Signed-off-by: James Wen <[email protected]> ## Code After: go install github.com/geramirez/concourse-filter export CREDENTIAL_FILTER_WHITELIST=`env | cut -d '=' -f 1 | grep -v '^_$' | xargs echo | tr ' ' ','` exec &> >($GOPATH/bin/concourse-filter)
b423e55f7b47a73fcfae11880f75244ef7bf4ae8
content/messenger-overlay.js
content/messenger-overlay.js
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ (function (aGlobal) { var Cc = Components.classes; var Ci = Components.interfaces; var process = Cc["@mozilla.org/process/util;1"] .createInstance(Ci.nsIProcess); var file = Cc["@mozilla.org/file/local;1"] .createInstance(Ci.nsILocalFile); const kIEPath = "C:\\Program Files\\Internet Explorer\\iexplore.exe"; const kChromePath = "C:\\Program Files (x86)\\Google\\Chrome\\Application\\chrome.exe"; var SwitchLinkExternalHandler = { startIE: function startIE() { this.startExternalProcess(kIEPath); }, startChrome: function startChrome() { this.startExternalProcess(kChromePath); }, startExternalProcess: function startExternalProcess(aPath) { file.initWithPath(aPath); process.init(file); var args = ["https://dev.mozilla.jp"]; process.run(false, args, args.length); }, run: function run() { }, }; aGlobal.SwitchLinkExternalHandler = SwitchLinkExternalHandler; })(this);
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ (function (aGlobal) { var Cc = Components.classes; var Ci = Components.interfaces; const kIEPath = "C:\\Program Files\\Internet Explorer\\iexplore.exe"; const kChromePath = "C:\\Program Files (x86)\\Google\\Chrome\\Application\\chrome.exe"; var SwitchLinkExternalHandler = { startIE: function startIE() { this.startExternalProcess(kIEPath); }, startChrome: function startChrome() { this.startExternalProcess(kChromePath); }, startExternalProcess: function startExternalProcess(aPath) { var process = Cc["@mozilla.org/process/util;1"] .createInstance(Ci.nsIProcess); var file = Cc["@mozilla.org/file/local;1"] .createInstance(Ci.nsILocalFile); file.initWithPath(aPath); process.init(file); var args = ["https://dev.mozilla.jp"]; process.run(false, args, args.length); }, run: function run() { }, }; aGlobal.SwitchLinkExternalHandler = SwitchLinkExternalHandler; })(this);
Create file and process instance every time
Create file and process instance every time
JavaScript
mpl-2.0
clear-code/tb-switch-link-external-handler
javascript
## Code Before: /* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ (function (aGlobal) { var Cc = Components.classes; var Ci = Components.interfaces; var process = Cc["@mozilla.org/process/util;1"] .createInstance(Ci.nsIProcess); var file = Cc["@mozilla.org/file/local;1"] .createInstance(Ci.nsILocalFile); const kIEPath = "C:\\Program Files\\Internet Explorer\\iexplore.exe"; const kChromePath = "C:\\Program Files (x86)\\Google\\Chrome\\Application\\chrome.exe"; var SwitchLinkExternalHandler = { startIE: function startIE() { this.startExternalProcess(kIEPath); }, startChrome: function startChrome() { this.startExternalProcess(kChromePath); }, startExternalProcess: function startExternalProcess(aPath) { file.initWithPath(aPath); process.init(file); var args = ["https://dev.mozilla.jp"]; process.run(false, args, args.length); }, run: function run() { }, }; aGlobal.SwitchLinkExternalHandler = SwitchLinkExternalHandler; })(this); ## Instruction: Create file and process instance every time ## Code After: /* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ (function (aGlobal) { var Cc = Components.classes; var Ci = Components.interfaces; const kIEPath = "C:\\Program Files\\Internet Explorer\\iexplore.exe"; const kChromePath = "C:\\Program Files (x86)\\Google\\Chrome\\Application\\chrome.exe"; var SwitchLinkExternalHandler = { startIE: function startIE() { this.startExternalProcess(kIEPath); }, startChrome: function startChrome() { this.startExternalProcess(kChromePath); }, startExternalProcess: function startExternalProcess(aPath) { var process = Cc["@mozilla.org/process/util;1"] .createInstance(Ci.nsIProcess); var file = Cc["@mozilla.org/file/local;1"] .createInstance(Ci.nsILocalFile); file.initWithPath(aPath); process.init(file); var args = ["https://dev.mozilla.jp"]; process.run(false, args, args.length); }, run: function run() { }, }; aGlobal.SwitchLinkExternalHandler = SwitchLinkExternalHandler; })(this);
0a6819597d794df7c570e41ab348d02bf7b09497
thinkvim.d/plugins.yaml
thinkvim.d/plugins.yaml
- repo: terryma/vim-multiple-cursors lazy: 0 - repo: Konfekt/FastFold lazy: 0 - repo: m42e/arxml.vim lazy: 0 - repo: dkprice/vim-easygrep lazy: 0 - repo: tpope/vim-scriptease lazy: 0 - repo: weirongxu/plantuml-previewer.vim lazy: 0 - repo: tyru/open-browser.vim lazy: 0 - repo: aklt/plantuml-syntax lazy: 0 - repo: gu-fan/riv.vim lazy: 0
- repo: terryma/vim-multiple-cursors lazy: 0 - repo: Konfekt/FastFold lazy: 0 - repo: m42e/arxml.vim lazy: 0 - repo: dkprice/vim-easygrep lazy: 0 - repo: tpope/vim-scriptease lazy: 0 - repo: weirongxu/plantuml-previewer.vim rev: 994689a lazy: 0 - repo: tyru/open-browser.vim lazy: 0 - repo: aklt/plantuml-syntax lazy: 0 - repo: gu-fan/riv.vim lazy: 0
Set specific Git commit for plantuml_previewer
Set specific Git commit for plantuml_previewer File dumping location is broken due to the next commit after HEAD, see https://github.com/weirongxu/plantuml-previewer.vim/issues/24
YAML
mit
ubmarco/dotfiles,ubmarco/dotfiles
yaml
## Code Before: - repo: terryma/vim-multiple-cursors lazy: 0 - repo: Konfekt/FastFold lazy: 0 - repo: m42e/arxml.vim lazy: 0 - repo: dkprice/vim-easygrep lazy: 0 - repo: tpope/vim-scriptease lazy: 0 - repo: weirongxu/plantuml-previewer.vim lazy: 0 - repo: tyru/open-browser.vim lazy: 0 - repo: aklt/plantuml-syntax lazy: 0 - repo: gu-fan/riv.vim lazy: 0 ## Instruction: Set specific Git commit for plantuml_previewer File dumping location is broken due to the next commit after HEAD, see https://github.com/weirongxu/plantuml-previewer.vim/issues/24 ## Code After: - repo: terryma/vim-multiple-cursors lazy: 0 - repo: Konfekt/FastFold lazy: 0 - repo: m42e/arxml.vim lazy: 0 - repo: dkprice/vim-easygrep lazy: 0 - repo: tpope/vim-scriptease lazy: 0 - repo: weirongxu/plantuml-previewer.vim rev: 994689a lazy: 0 - repo: tyru/open-browser.vim lazy: 0 - repo: aklt/plantuml-syntax lazy: 0 - repo: gu-fan/riv.vim lazy: 0
63a8d3a10a55a5668fb16974e1567693be655cfa
.bp-config/options.json
.bp-config/options.json
{ "WEB_SERVER": "nginx", "LIBDIR": ".", "WEBDIR": "web", "NGINX_VERSION": "{NGINX_16_LATEST}", "PHP_MODULES": ["pear"], "PHP_EXTENSIONS": ["bz2", "zlib", "curl", "mcrypt", "mongo"] }
{ "WEB_SERVER": "nginx", "LIBDIR": ".", "WEBDIR": "web", "NGINX_VERSION": "{NGINX_16_LATEST}", "PHP_MODULES": ["pear"], "PHP_EXTENSIONS": ["bz2", "zlib", "curl", "mcrypt", "mongo", "pdo", "pdo_mysql", "mysql", "zip", "fileinfo"] }
Add missing libs needed by new buildpack.
Add missing libs needed by new buildpack.
JSON
apache-2.0
bbreck3/dsp-core,neroxing/dsp-core,bbreck3/dsp-core,neroxing/dsp-core,bbreck3/dsp-core,neroxing/dsp-core,bbreck3/dsp-core,neroxing/dsp-core
json
## Code Before: { "WEB_SERVER": "nginx", "LIBDIR": ".", "WEBDIR": "web", "NGINX_VERSION": "{NGINX_16_LATEST}", "PHP_MODULES": ["pear"], "PHP_EXTENSIONS": ["bz2", "zlib", "curl", "mcrypt", "mongo"] } ## Instruction: Add missing libs needed by new buildpack. ## Code After: { "WEB_SERVER": "nginx", "LIBDIR": ".", "WEBDIR": "web", "NGINX_VERSION": "{NGINX_16_LATEST}", "PHP_MODULES": ["pear"], "PHP_EXTENSIONS": ["bz2", "zlib", "curl", "mcrypt", "mongo", "pdo", "pdo_mysql", "mysql", "zip", "fileinfo"] }
cd27849acae57a0382f66116771491576177a39e
setup.py
setup.py
from distutils.core import setup __VERSION__ = '0.2.4' long_description = "See https://furtive.readthedocs.org" setup(name='Furtive', version=__VERSION__, description='File Integrity Verification System', author='Derrick Bryant', author_email='[email protected]', long_description=long_description, license='MIT', packages=['furtive'], scripts=['scripts/furtive'], url='https://furtive.readthedocs.org', install_requires=[ 'PyYAML==3.11', 'argparse==1.4.0' ] )
from distutils.core import setup __VERSION__ = '0.2.4' long_description = """ Github: https://github.com/dbryant4/furtive """ setup(name='Furtive', version=__VERSION__, description='File Integrity Verification System', author='Derrick Bryant', author_email='[email protected]', long_description=long_description, license='MIT', packages=['furtive'], scripts=['scripts/furtive'], url='https://furtive.readthedocs.org', download_url='https://github.com/dbryant4/furtive', install_requires=[ 'PyYAML==3.11', 'argparse==1.4.0' ] )
Add links to github project page
Add links to github project page
Python
mit
dbryant4/furtive
python
## Code Before: from distutils.core import setup __VERSION__ = '0.2.4' long_description = "See https://furtive.readthedocs.org" setup(name='Furtive', version=__VERSION__, description='File Integrity Verification System', author='Derrick Bryant', author_email='[email protected]', long_description=long_description, license='MIT', packages=['furtive'], scripts=['scripts/furtive'], url='https://furtive.readthedocs.org', install_requires=[ 'PyYAML==3.11', 'argparse==1.4.0' ] ) ## Instruction: Add links to github project page ## Code After: from distutils.core import setup __VERSION__ = '0.2.4' long_description = """ Github: https://github.com/dbryant4/furtive """ setup(name='Furtive', version=__VERSION__, description='File Integrity Verification System', author='Derrick Bryant', author_email='[email protected]', long_description=long_description, license='MIT', packages=['furtive'], scripts=['scripts/furtive'], url='https://furtive.readthedocs.org', download_url='https://github.com/dbryant4/furtive', install_requires=[ 'PyYAML==3.11', 'argparse==1.4.0' ] )
f09b5411d905187f0fb70977cbc53da158095eb9
.travis.yml
.travis.yml
language: php php: - 5.5 - 5.6 script: - composer install - phpunit after_success: - curl -OL https://squizlabs.github.io/PHP_CodeSniffer/phpcs.phar - php phpcs.phar -n --standard=PSR1,PSR2 src/
language: php php: - 5.5 - 5.6 script: - composer install - composer require codeclimate/php-test-reporter - phpunit --coverage-clover build/logs/clover.xml after_success: - curl -OL https://squizlabs.github.io/PHP_CodeSniffer/phpcs.phar - php phpcs.phar -n --standard=PSR1,PSR2 src/
Revert "Removing code climate coverage."
Revert "Removing code climate coverage." This reverts commit 1cfb62ea2a9e6cc0c0d476759c3bf79c67a9a13c.
YAML
mit
tailwindsllc/modus,modusphp/framework
yaml
## Code Before: language: php php: - 5.5 - 5.6 script: - composer install - phpunit after_success: - curl -OL https://squizlabs.github.io/PHP_CodeSniffer/phpcs.phar - php phpcs.phar -n --standard=PSR1,PSR2 src/ ## Instruction: Revert "Removing code climate coverage." This reverts commit 1cfb62ea2a9e6cc0c0d476759c3bf79c67a9a13c. ## Code After: language: php php: - 5.5 - 5.6 script: - composer install - composer require codeclimate/php-test-reporter - phpunit --coverage-clover build/logs/clover.xml after_success: - curl -OL https://squizlabs.github.io/PHP_CodeSniffer/phpcs.phar - php phpcs.phar -n --standard=PSR1,PSR2 src/
c313edcdb9334bf0621a7ce62823c60a5b238d62
src/main/java/it/reply/orchestrator/service/commands/GetSlam.java
src/main/java/it/reply/orchestrator/service/commands/GetSlam.java
package it.reply.orchestrator.service.commands; import it.reply.orchestrator.dto.CloudProvider; import it.reply.orchestrator.dto.RankCloudProvidersMessage; import it.reply.orchestrator.dto.slam.Service; import it.reply.orchestrator.dto.slam.Sla; import it.reply.orchestrator.service.SlamService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @Component public class GetSlam extends BaseRankCloudProvidersCommand { @Autowired private SlamService slamService; @Override protected RankCloudProvidersMessage customExecute(RankCloudProvidersMessage rankCloudProvidersMessage) { rankCloudProvidersMessage.setSlamPreferences(slamService.getCustomerPreferences()); // Get VO (customer) preferences and SLAs (infer available Cloud Providers from it) for (Sla sla : rankCloudProvidersMessage.getSlamPreferences().getSla()) { // Create Cloud Provider, add to the list CloudProvider cp = new CloudProvider(sla.getProvider()); rankCloudProvidersMessage.getCloudProviders().put(sla.getProvider(), cp); // Get provider's services for (Service service : sla.getServices()) { cp.getCmdbProviderServices().put(service.getServiceId(), null); } } return rankCloudProvidersMessage; } }
package it.reply.orchestrator.service.commands; import it.reply.orchestrator.dto.CloudProvider; import it.reply.orchestrator.dto.RankCloudProvidersMessage; import it.reply.orchestrator.dto.slam.Service; import it.reply.orchestrator.dto.slam.Sla; import it.reply.orchestrator.service.SlamService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @Component public class GetSlam extends BaseRankCloudProvidersCommand { @Autowired private SlamService slamService; @Override protected RankCloudProvidersMessage customExecute( RankCloudProvidersMessage rankCloudProvidersMessage) { rankCloudProvidersMessage.setSlamPreferences(slamService.getCustomerPreferences()); // Get VO (customer) preferences and SLAs (infer available Cloud Providers from it) for (Sla sla : rankCloudProvidersMessage.getSlamPreferences().getSla()) { // Create Cloud Provider, add to the list CloudProvider cp = rankCloudProvidersMessage.getCloudProviders().get(sla.getProvider()); if (cp == null) { cp = new CloudProvider(sla.getProvider()); rankCloudProvidersMessage.getCloudProviders().put(sla.getProvider(), cp); } // Get provider's services for (Service service : sla.getServices()) { cp.getCmdbProviderServices().put(service.getServiceId(), null); } } return rankCloudProvidersMessage; } }
Support multiple SLAs for cloud providers
Support multiple SLAs for cloud providers Fixes #110
Java
apache-2.0
indigo-dc/orchestrator,indigo-dc/orchestrator
java
## Code Before: package it.reply.orchestrator.service.commands; import it.reply.orchestrator.dto.CloudProvider; import it.reply.orchestrator.dto.RankCloudProvidersMessage; import it.reply.orchestrator.dto.slam.Service; import it.reply.orchestrator.dto.slam.Sla; import it.reply.orchestrator.service.SlamService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @Component public class GetSlam extends BaseRankCloudProvidersCommand { @Autowired private SlamService slamService; @Override protected RankCloudProvidersMessage customExecute(RankCloudProvidersMessage rankCloudProvidersMessage) { rankCloudProvidersMessage.setSlamPreferences(slamService.getCustomerPreferences()); // Get VO (customer) preferences and SLAs (infer available Cloud Providers from it) for (Sla sla : rankCloudProvidersMessage.getSlamPreferences().getSla()) { // Create Cloud Provider, add to the list CloudProvider cp = new CloudProvider(sla.getProvider()); rankCloudProvidersMessage.getCloudProviders().put(sla.getProvider(), cp); // Get provider's services for (Service service : sla.getServices()) { cp.getCmdbProviderServices().put(service.getServiceId(), null); } } return rankCloudProvidersMessage; } } ## Instruction: Support multiple SLAs for cloud providers Fixes #110 ## Code After: package it.reply.orchestrator.service.commands; import it.reply.orchestrator.dto.CloudProvider; import it.reply.orchestrator.dto.RankCloudProvidersMessage; import it.reply.orchestrator.dto.slam.Service; import it.reply.orchestrator.dto.slam.Sla; import it.reply.orchestrator.service.SlamService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @Component public class GetSlam extends BaseRankCloudProvidersCommand { @Autowired private SlamService slamService; @Override protected RankCloudProvidersMessage customExecute( RankCloudProvidersMessage rankCloudProvidersMessage) { rankCloudProvidersMessage.setSlamPreferences(slamService.getCustomerPreferences()); // Get VO (customer) preferences and SLAs (infer available Cloud Providers from it) for (Sla sla : rankCloudProvidersMessage.getSlamPreferences().getSla()) { // Create Cloud Provider, add to the list CloudProvider cp = rankCloudProvidersMessage.getCloudProviders().get(sla.getProvider()); if (cp == null) { cp = new CloudProvider(sla.getProvider()); rankCloudProvidersMessage.getCloudProviders().put(sla.getProvider(), cp); } // Get provider's services for (Service service : sla.getServices()) { cp.getCmdbProviderServices().put(service.getServiceId(), null); } } return rankCloudProvidersMessage; } }
6625d0312d4772e70677dc296e23f50ee675cd69
settings.rb
settings.rb
DB = Sequel.connect("postgres://antifa:[email protected]/antifa") CACHE_CLIENT = Dalli::Client.new( '127.0.0.1:11211', :value_max_bytes => 5242880 ) SPHINX = Sequel.connect("mysql2://127.0.0.1/sphinx?port=9306") SPHINX_T = "doc1"
DB = Sequel.connect("postgres://antifa:[email protected]/antifa") CACHE_CLIENT = Dalli::Client.new( '127.0.0.1:11211', :value_max_bytes => 5242880, :namespace => "antifa" ) SPHINX = Sequel.connect("mysql2://127.0.0.1/sphinx?port=9306") SPHINX_T = "doc1"
Add configurable namespace for memcached client
Add configurable namespace for memcached client
Ruby
bsd-2-clause
gnwp/zalgo_v3
ruby
## Code Before: DB = Sequel.connect("postgres://antifa:[email protected]/antifa") CACHE_CLIENT = Dalli::Client.new( '127.0.0.1:11211', :value_max_bytes => 5242880 ) SPHINX = Sequel.connect("mysql2://127.0.0.1/sphinx?port=9306") SPHINX_T = "doc1" ## Instruction: Add configurable namespace for memcached client ## Code After: DB = Sequel.connect("postgres://antifa:[email protected]/antifa") CACHE_CLIENT = Dalli::Client.new( '127.0.0.1:11211', :value_max_bytes => 5242880, :namespace => "antifa" ) SPHINX = Sequel.connect("mysql2://127.0.0.1/sphinx?port=9306") SPHINX_T = "doc1"
e0b983b6bde8025884edc7b78d65b0c938ffc4b0
catalog/Active_Record_Plugins/rails_search.yml
catalog/Active_Record_Plugins/rails_search.yml
name: Rails Search description: projects: - acts_as_ferret - acts_as_fulltextable - acts_as_indexed - acts_as_solr - acts_as_solr_reloaded - delsolr - elasticsearch - elasticsearch-rails - elastictastic - ferret - pg_search - redis-search - rsolr - ruby_simple_search - scoped_search - searchkick - searchlight - slingshot-rb - solr_query - sunspot - sunspot_rails - sunspot_solr - texticle - thinking-sphinx - tire - xapian_db - xapit
name: Rails Search description: projects: - acts_as_ferret - acts_as_fulltextable - acts_as_indexed - acts_as_solr - acts_as_solr_reloaded - chewy - delsolr - elasticsearch - elasticsearch-rails - elastictastic - ferret - pg_search - redis-search - rsolr - ruby_simple_search - scoped_search - searchkick - searchlight - slingshot-rb - solr_query - sunspot - sunspot_rails - sunspot_solr - texticle - thinking-sphinx - tire - xapian_db - xapit
Add chewy to Rails search
Add chewy to Rails search
YAML
mit
rubytoolbox/catalog
yaml
## Code Before: name: Rails Search description: projects: - acts_as_ferret - acts_as_fulltextable - acts_as_indexed - acts_as_solr - acts_as_solr_reloaded - delsolr - elasticsearch - elasticsearch-rails - elastictastic - ferret - pg_search - redis-search - rsolr - ruby_simple_search - scoped_search - searchkick - searchlight - slingshot-rb - solr_query - sunspot - sunspot_rails - sunspot_solr - texticle - thinking-sphinx - tire - xapian_db - xapit ## Instruction: Add chewy to Rails search ## Code After: name: Rails Search description: projects: - acts_as_ferret - acts_as_fulltextable - acts_as_indexed - acts_as_solr - acts_as_solr_reloaded - chewy - delsolr - elasticsearch - elasticsearch-rails - elastictastic - ferret - pg_search - redis-search - rsolr - ruby_simple_search - scoped_search - searchkick - searchlight - slingshot-rb - solr_query - sunspot - sunspot_rails - sunspot_solr - texticle - thinking-sphinx - tire - xapian_db - xapit
e9fecf28334d9822dde0b67fcd064c2490ac6c97
src/Sandbox/AdminBundle/Resources/views/CRUD/tree.html.twig
src/Sandbox/AdminBundle/Resources/views/CRUD/tree.html.twig
<script type="text/javascript"> $(document).ready(function() { initTree({ "selector": "#tree", "path": { "expanded": [ "/cms" ], "preloaded": [ "/cms" ], }, "icon": { "document": "{{ asset('bundles/symfonycmftree/images/document.png') }}", "folder": "{{ asset('bundles/symfonycmftree/images/folder.png') }}" }, "route": { "create": 'admin_sandbox_main_editablestaticcontent_create', "edit": 'admin_sandbox_main_editablestaticcontent_edit', "erase": 'admin_sandbox_main_editablestaticcontent_delete' } }); }); </script>
<script type="text/javascript"> $(document).ready(function() { initTree({ "selector": "#tree", "path": { "expanded": [ "/cms" ], "preloaded": [ "/cms" ], }, "icon": { "document": "{{ asset('bundles/symfonycmftree/images/document.png') }}", "folder": "{{ asset('bundles/symfonycmftree/images/folder.png') }}" }, "route": { "edit": 'admin_sandbox_main_editablestaticcontent_edit', "erase": 'admin_sandbox_main_editablestaticcontent_delete' }, "doctypes": { "blogpost": { "label": 'BlogPost', "action": function (node) { window.location = Routing.generate('admin_sandbox_main_editablestaticcontent_create', { "type": "blogpost" }) } }, "static": { "label": 'StaticContent', "action": function (node) { window.location = Routing.generate('admin_sandbox_main_editablestaticcontent_create', { "type": "static" }) } } } }); }); </script>
Document creation actions are now in a submenu and injected.
Document creation actions are now in a submenu and injected.
Twig
mit
jerram/symfony-cmf,Ma27/cmf-sandbox,revollat/cmf-sandbox,h4cc/zacd-cms,Ma27/cmf-sandbox,damz/platform-sandbox,revollat/cmf-sandbox,foopang/cmf-sandbox,maveric/noname,damz/platform-sandbox,foopang/cmf-sandbox,Ma27/cmf-sandbox,damz/platform-sandbox,revollat/cmf-sandbox,ElectricMaxxx/cmf-sandbox,jerram/symfony-cmf,damz/platform-sandbox,jerram/symfony-cmf,maveric/noname,peterkokot/cmf-sandbox,maveric/noname,foopang/cmf-sandbox,revollat/cmf-sandbox,foopang/cmf-sandbox,ElectricMaxxx/cmf-sandbox,jerram/symfony-cmf,h4cc/zacd-cms,peterkokot/cmf-sandbox,maveric/noname,revollat/cmf-sandbox,ElectricMaxxx/cmf-sandbox,h4cc/zacd-cms,Ma27/cmf-sandbox,h4cc/zacd-cms,ElectricMaxxx/cmf-sandbox,ElectricMaxxx/cmf-sandbox,peterkokot/cmf-sandbox,Ma27/cmf-sandbox,foopang/cmf-sandbox,peterkokot/cmf-sandbox
twig
## Code Before: <script type="text/javascript"> $(document).ready(function() { initTree({ "selector": "#tree", "path": { "expanded": [ "/cms" ], "preloaded": [ "/cms" ], }, "icon": { "document": "{{ asset('bundles/symfonycmftree/images/document.png') }}", "folder": "{{ asset('bundles/symfonycmftree/images/folder.png') }}" }, "route": { "create": 'admin_sandbox_main_editablestaticcontent_create', "edit": 'admin_sandbox_main_editablestaticcontent_edit', "erase": 'admin_sandbox_main_editablestaticcontent_delete' } }); }); </script> ## Instruction: Document creation actions are now in a submenu and injected. ## Code After: <script type="text/javascript"> $(document).ready(function() { initTree({ "selector": "#tree", "path": { "expanded": [ "/cms" ], "preloaded": [ "/cms" ], }, "icon": { "document": "{{ asset('bundles/symfonycmftree/images/document.png') }}", "folder": "{{ asset('bundles/symfonycmftree/images/folder.png') }}" }, "route": { "edit": 'admin_sandbox_main_editablestaticcontent_edit', "erase": 'admin_sandbox_main_editablestaticcontent_delete' }, "doctypes": { "blogpost": { "label": 'BlogPost', "action": function (node) { window.location = Routing.generate('admin_sandbox_main_editablestaticcontent_create', { "type": "blogpost" }) } }, "static": { "label": 'StaticContent', "action": function (node) { window.location = Routing.generate('admin_sandbox_main_editablestaticcontent_create', { "type": "static" }) } } } }); }); </script>
2f30071dea69368b6ec5c9f7bd80bb8035a79b74
server/models.js
server/models.js
var mongoose = require( 'mongoose' ), Schema = mongoose.Schema; var schemas = { user: { _id: String, password: String, }, page: { _id: Number, title: String, author: String, dates: { created: Date, updated: Date } } } var checkModel = function( name, schema ) { if( mongoose.models[name] ) { return mongoose.model( name ); } else { return mongoose.model( name, schema ); } } var getModel = function( name ) { var fields = schemas[name]; var me = new Schema( fields, { versionKey: false }); return checkModel( name, me ); } module.exports = getModel;
var mongoose = require( 'mongoose' ), Schema = mongoose.Schema; var schemas = { user: { _id: String, password: String, } } var checkModel = function( name, schema ) { if( mongoose.models[name] ) { return mongoose.model( name ); } else { return mongoose.model( name, schema ); } } var getModel = function( name ) { var fields = schemas[name]; var me = new Schema( fields, { versionKey: false }); return checkModel( name, me ); } module.exports = getModel;
Remove API model for pages
Remove API model for pages
JavaScript
mit
muffin/server,kunni80/server
javascript
## Code Before: var mongoose = require( 'mongoose' ), Schema = mongoose.Schema; var schemas = { user: { _id: String, password: String, }, page: { _id: Number, title: String, author: String, dates: { created: Date, updated: Date } } } var checkModel = function( name, schema ) { if( mongoose.models[name] ) { return mongoose.model( name ); } else { return mongoose.model( name, schema ); } } var getModel = function( name ) { var fields = schemas[name]; var me = new Schema( fields, { versionKey: false }); return checkModel( name, me ); } module.exports = getModel; ## Instruction: Remove API model for pages ## Code After: var mongoose = require( 'mongoose' ), Schema = mongoose.Schema; var schemas = { user: { _id: String, password: String, } } var checkModel = function( name, schema ) { if( mongoose.models[name] ) { return mongoose.model( name ); } else { return mongoose.model( name, schema ); } } var getModel = function( name ) { var fields = schemas[name]; var me = new Schema( fields, { versionKey: false }); return checkModel( name, me ); } module.exports = getModel;
593e379d82d891e046db70bb467050f079ef9fb0
frontend/app/templates/components/task-notifications.hbs
frontend/app/templates/components/task-notifications.hbs
{{#if showForm}} {{#bs-form class="task-notifications" formLayout="horizontal" horizontalLabelGridClass="col-md-2" action=onSave}} {{bs-form-element label=(t 'tasks.notifications.to') value="@responsibles" disabled=true}} {{bs-form-element label=(t 'tasks.notifications.cc') value=task.notificationEmail disabled=showing }} {{bs-form-element label=(t 'tasks.notifications.subject') value=task.notificationSubject disabled=showing}} {{bs-form-element label=(t 'tasks.notifications.body') controlType="textarea" value=task.notificationBody disabled=showing}} {{#if editing }} <div class="btn-toolbar"> {{bs-button defaultText=(t 'tasks.notifications.save.button') type="primary" buttonType="submit" class="pull-right"}} {{bs-button defaultText=(t 'tasks.notifications.cancel.button') type="default" action=onCancel class="pull-right"}} </div> {{/if}} {{/bs-form}} {{/if}} {{#unless showForm }} {{#bs-alert type="info"}} {{ t 'tasks.notifications.empty.message' }} {{/bs-alert}} {{#bs-button type="primary" action=onEdit}} {{ t 'tasks.notifications.empty.button' }} {{/bs-button}} {{/unless}}
{{#if showForm}} {{#bs-form class="task-notifications" formLayout="horizontal" horizontalLabelGridClass="col-md-2" action=onSave}} {{#if task.turns }} {{bs-form-element label=(t 'tasks.notifications.to') value="@responsibles" disabled=true}} {{/if}} {{bs-form-element label=(t (concat 'tasks.notifications.' (if task.turns 'cc' 'to'))) value=task.notificationEmail disabled=showing }} {{bs-form-element label=(t 'tasks.notifications.subject') value=task.notificationSubject disabled=showing}} {{bs-form-element label=(t 'tasks.notifications.body') controlType="textarea" value=task.notificationBody disabled=showing}} {{#if editing }} <div class="btn-toolbar"> {{bs-button defaultText=(t 'tasks.notifications.save.button') type="primary" buttonType="submit" class="pull-right"}} {{bs-button defaultText=(t 'tasks.notifications.cancel.button') type="default" action=onCancel class="pull-right"}} </div> {{/if}} {{/bs-form}} {{/if}} {{#unless showForm }} {{#bs-alert type="info"}} {{ t 'tasks.notifications.empty.message' }} {{/bs-alert}} {{#bs-button type="primary" action=onEdit}} {{ t 'tasks.notifications.empty.button' }} {{/bs-button}} {{/unless}}
Change email_notification label when task has not turns
Change email_notification label when task has not turns Closes #62
Handlebars
agpl-3.0
singularities/circular-works,singularities/circular-work,singularities/circular-works,singularities/circular-works,singularities/circular-work,singularities/circular-work
handlebars
## Code Before: {{#if showForm}} {{#bs-form class="task-notifications" formLayout="horizontal" horizontalLabelGridClass="col-md-2" action=onSave}} {{bs-form-element label=(t 'tasks.notifications.to') value="@responsibles" disabled=true}} {{bs-form-element label=(t 'tasks.notifications.cc') value=task.notificationEmail disabled=showing }} {{bs-form-element label=(t 'tasks.notifications.subject') value=task.notificationSubject disabled=showing}} {{bs-form-element label=(t 'tasks.notifications.body') controlType="textarea" value=task.notificationBody disabled=showing}} {{#if editing }} <div class="btn-toolbar"> {{bs-button defaultText=(t 'tasks.notifications.save.button') type="primary" buttonType="submit" class="pull-right"}} {{bs-button defaultText=(t 'tasks.notifications.cancel.button') type="default" action=onCancel class="pull-right"}} </div> {{/if}} {{/bs-form}} {{/if}} {{#unless showForm }} {{#bs-alert type="info"}} {{ t 'tasks.notifications.empty.message' }} {{/bs-alert}} {{#bs-button type="primary" action=onEdit}} {{ t 'tasks.notifications.empty.button' }} {{/bs-button}} {{/unless}} ## Instruction: Change email_notification label when task has not turns Closes #62 ## Code After: {{#if showForm}} {{#bs-form class="task-notifications" formLayout="horizontal" horizontalLabelGridClass="col-md-2" action=onSave}} {{#if task.turns }} {{bs-form-element label=(t 'tasks.notifications.to') value="@responsibles" disabled=true}} {{/if}} {{bs-form-element label=(t (concat 'tasks.notifications.' (if task.turns 'cc' 'to'))) value=task.notificationEmail disabled=showing }} {{bs-form-element label=(t 'tasks.notifications.subject') value=task.notificationSubject disabled=showing}} {{bs-form-element label=(t 'tasks.notifications.body') controlType="textarea" value=task.notificationBody disabled=showing}} {{#if editing }} <div class="btn-toolbar"> {{bs-button defaultText=(t 'tasks.notifications.save.button') type="primary" buttonType="submit" class="pull-right"}} {{bs-button defaultText=(t 'tasks.notifications.cancel.button') type="default" action=onCancel class="pull-right"}} </div> {{/if}} {{/bs-form}} {{/if}} {{#unless showForm }} {{#bs-alert type="info"}} {{ t 'tasks.notifications.empty.message' }} {{/bs-alert}} {{#bs-button type="primary" action=onEdit}} {{ t 'tasks.notifications.empty.button' }} {{/bs-button}} {{/unless}}
c66c264af7203074d083b25271ae038ae65d758e
docs/how_to_use_it.md
docs/how_to_use_it.md
These instructions utilize the tarball created in [how_to_build_it.md](./how_to_build_it.md), copied to the home directory of a user system. 1. Install run-time dependencies: * Ubuntu 14.04 ``` sudo apt-get install libqt5xmlpatterns5 sudo apt-get install libsecret-1-0 ``` * OpenSUSE 13.2 ``` sudo zypper install libQt5WebKitWidgets5 sudo zypper install libQt5Xml5 ``` * CentOS 7 ``` wget http://dl.fedoraproject.org/pub/epel/7/x86_64/e/epel-release-7-5.noarch.rpm sudo rpm -ivh epel-release-7-5.noarch.rpm sudo yum --enablerepo=epel install qt5-qtwebkit ``` 2. Unpack the tarball: ``` cd ~ mkdir ~/sample_apps tar -xzf ~/sample_apps.tar.gz -C ~/sample_apps ``` 3. Run the apps: ``` cd ~/sample_apps export LD_LIBRARY_PATH=`pwd` ./rms_sample # RMS sample ./rmsauth_sample # auth sample ```
NOTE: A setup workaround is needed at this time. Follow these steps before running your application: - Install the Powershell cmdlets from http://www.microsoft.com/en-us/download/details.aspx?id=30339 (see prerequisites here - https://technet.microsoft.com/library/jj585012.aspx) - Open an elevated Powershell window and run the following commands: ``` Import-Module AADRM Connect-AadrmService (type-in tenant admin credentials) Enable-AadrmDevicePlatform –Web ``` These instructions utilize the tarball created in [how_to_build_it.md](./how_to_build_it.md), copied to the home directory of a user system. 1. Install run-time dependencies: * Ubuntu 14.04 ``` sudo apt-get install libqt5xmlpatterns5 sudo apt-get install libsecret-1-0 ``` * OpenSUSE 13.2 ``` sudo zypper install libQt5WebKitWidgets5 sudo zypper install libQt5Xml5 ``` * CentOS 7 ``` wget http://dl.fedoraproject.org/pub/epel/7/x86_64/e/epel-release-7-5.noarch.rpm sudo rpm -ivh epel-release-7-5.noarch.rpm sudo yum --enablerepo=epel install qt5-qtwebkit ``` 2. Unpack the tarball: ``` cd ~ mkdir ~/sample_apps tar -xzf ~/sample_apps.tar.gz -C ~/sample_apps ``` 3. Run the apps: ``` cd ~/sample_apps export LD_LIBRARY_PATH=`pwd` ./rms_sample # RMS sample ./rmsauth_sample # auth sample ```
Add work around steps for connecting to AADRM service
Add work around steps for connecting to AADRM service
Markdown
mit
AzureAD/rms-sdk-for-cpp,AzureAD/rms-sdk-for-cpp,AzureAD/rms-sdk-for-cpp
markdown
## Code Before: These instructions utilize the tarball created in [how_to_build_it.md](./how_to_build_it.md), copied to the home directory of a user system. 1. Install run-time dependencies: * Ubuntu 14.04 ``` sudo apt-get install libqt5xmlpatterns5 sudo apt-get install libsecret-1-0 ``` * OpenSUSE 13.2 ``` sudo zypper install libQt5WebKitWidgets5 sudo zypper install libQt5Xml5 ``` * CentOS 7 ``` wget http://dl.fedoraproject.org/pub/epel/7/x86_64/e/epel-release-7-5.noarch.rpm sudo rpm -ivh epel-release-7-5.noarch.rpm sudo yum --enablerepo=epel install qt5-qtwebkit ``` 2. Unpack the tarball: ``` cd ~ mkdir ~/sample_apps tar -xzf ~/sample_apps.tar.gz -C ~/sample_apps ``` 3. Run the apps: ``` cd ~/sample_apps export LD_LIBRARY_PATH=`pwd` ./rms_sample # RMS sample ./rmsauth_sample # auth sample ``` ## Instruction: Add work around steps for connecting to AADRM service ## Code After: NOTE: A setup workaround is needed at this time. Follow these steps before running your application: - Install the Powershell cmdlets from http://www.microsoft.com/en-us/download/details.aspx?id=30339 (see prerequisites here - https://technet.microsoft.com/library/jj585012.aspx) - Open an elevated Powershell window and run the following commands: ``` Import-Module AADRM Connect-AadrmService (type-in tenant admin credentials) Enable-AadrmDevicePlatform –Web ``` These instructions utilize the tarball created in [how_to_build_it.md](./how_to_build_it.md), copied to the home directory of a user system. 1. Install run-time dependencies: * Ubuntu 14.04 ``` sudo apt-get install libqt5xmlpatterns5 sudo apt-get install libsecret-1-0 ``` * OpenSUSE 13.2 ``` sudo zypper install libQt5WebKitWidgets5 sudo zypper install libQt5Xml5 ``` * CentOS 7 ``` wget http://dl.fedoraproject.org/pub/epel/7/x86_64/e/epel-release-7-5.noarch.rpm sudo rpm -ivh epel-release-7-5.noarch.rpm sudo yum --enablerepo=epel install qt5-qtwebkit ``` 2. Unpack the tarball: ``` cd ~ mkdir ~/sample_apps tar -xzf ~/sample_apps.tar.gz -C ~/sample_apps ``` 3. Run the apps: ``` cd ~/sample_apps export LD_LIBRARY_PATH=`pwd` ./rms_sample # RMS sample ./rmsauth_sample # auth sample ```
d50c13bd4216e0496bcf7d67ae7f77db4b099d0f
CMakeLists.txt
CMakeLists.txt
cmake_minimum_required(VERSION 2.8) project(bootstrap-dht-bot) set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} ${CMAKE_CURRENT_SOURCE_DIR}/cmake/Modules/) set(CMAKE_CXX_FLAGS "-std=c++11 -Wall -Werror -Wno-deprecated-declarations -DBOOST_ASIO_DYN_LINK ${CMAKE_CXX_FLAGS}") find_package(Boost REQUIRED COMPONENTS system) find_package(LibTorrentRasterbar REQUIRED) find_package(Threads REQUIRED) set(SOURCES main.cpp) include_directories( ${CMAKE_BINARY_DIR}/include ${Boost_INCLUDE_DIRS} ${LibtorrentRasterbar_INCLUDE_DIRS}) add_executable(bootstrap-dht-bot ${SOURCES}) target_link_libraries(bootstrap-dht-bot ${Boost_LIBRARIES} ${CMAKE_THREAD_LIBS_INIT} ${LibtorrentRasterbar_LIBRARIES})
cmake_minimum_required(VERSION 2.8) project(bootstrap-dht-bot) set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} ${CMAKE_CURRENT_SOURCE_DIR}/cmake/Modules/) set(CMAKE_CXX_FLAGS "-std=c++11 -Wall -Werror -Wno-deprecated-declarations ${CMAKE_CXX_FLAGS}") find_package(Boost REQUIRED COMPONENTS system) find_package(LibTorrentRasterbar REQUIRED) find_package(Threads REQUIRED) set(SOURCES main.cpp) include_directories( ${CMAKE_BINARY_DIR}/include ${Boost_INCLUDE_DIRS} ${LibtorrentRasterbar_INCLUDE_DIRS}) add_executable(bootstrap-dht-bot ${SOURCES}) target_link_libraries(bootstrap-dht-bot ${Boost_LIBRARIES} ${CMAKE_THREAD_LIBS_INIT} ${LibtorrentRasterbar_LIBRARIES})
Fix build to no longer need dynamic boost linking for asio.
Fix build to no longer need dynamic boost linking for asio.
Text
mit
davidchappelle/bootstrap-dht-bot
text
## Code Before: cmake_minimum_required(VERSION 2.8) project(bootstrap-dht-bot) set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} ${CMAKE_CURRENT_SOURCE_DIR}/cmake/Modules/) set(CMAKE_CXX_FLAGS "-std=c++11 -Wall -Werror -Wno-deprecated-declarations -DBOOST_ASIO_DYN_LINK ${CMAKE_CXX_FLAGS}") find_package(Boost REQUIRED COMPONENTS system) find_package(LibTorrentRasterbar REQUIRED) find_package(Threads REQUIRED) set(SOURCES main.cpp) include_directories( ${CMAKE_BINARY_DIR}/include ${Boost_INCLUDE_DIRS} ${LibtorrentRasterbar_INCLUDE_DIRS}) add_executable(bootstrap-dht-bot ${SOURCES}) target_link_libraries(bootstrap-dht-bot ${Boost_LIBRARIES} ${CMAKE_THREAD_LIBS_INIT} ${LibtorrentRasterbar_LIBRARIES}) ## Instruction: Fix build to no longer need dynamic boost linking for asio. ## Code After: cmake_minimum_required(VERSION 2.8) project(bootstrap-dht-bot) set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} ${CMAKE_CURRENT_SOURCE_DIR}/cmake/Modules/) set(CMAKE_CXX_FLAGS "-std=c++11 -Wall -Werror -Wno-deprecated-declarations ${CMAKE_CXX_FLAGS}") find_package(Boost REQUIRED COMPONENTS system) find_package(LibTorrentRasterbar REQUIRED) find_package(Threads REQUIRED) set(SOURCES main.cpp) include_directories( ${CMAKE_BINARY_DIR}/include ${Boost_INCLUDE_DIRS} ${LibtorrentRasterbar_INCLUDE_DIRS}) add_executable(bootstrap-dht-bot ${SOURCES}) target_link_libraries(bootstrap-dht-bot ${Boost_LIBRARIES} ${CMAKE_THREAD_LIBS_INIT} ${LibtorrentRasterbar_LIBRARIES})
97f03fe11d5f0a4b9787f2b3465ac67092642278
views/sale_coupon_program_views.xml
views/sale_coupon_program_views.xml
<?xml version="1.0" encoding="utf-8"?> <odoo> <record id="sale_coupon_program_view_form" model="ir.ui.view"> <field name="name">sale.coupon.program.view.form</field> <field name="model">sale.coupon.program</field> <field name="inherit_id" ref="sale_coupon.sale_coupon_program_view_promo_program_form"/> <field name="arch" type="xml"> <field name="rule_partners_domain" position="before"> <field name="is_public_included"/> </field> </field> </record> </odoo>
<?xml version="1.0" encoding="utf-8"?> <odoo> <record id="sale_coupon_program_view_form" model="ir.ui.view"> <field name="name">sale.coupon.program.view.form</field> <field name="model">sale.coupon.program</field> <field name="inherit_id" ref="sale_coupon.sale_coupon_program_view_promo_program_form"/> <field name="arch" type="xml"> <field name="rule_partners_domain" position="before"> <field name="is_public_included"/> </field> </field> </record> <menuitem action="sale_coupon.sale_coupon_program_action_promo_program" id="menu_promotion_type_config" parent="website_sale.menu_catalog" groups="sales_team.group_sale_manager" sequence="50"/> <menuitem id="menu_coupon_type_config" action="sale_coupon.sale_coupon_program_action_coupon_program" parent="website_sale.menu_catalog" groups="sales_team.group_sale_manager" sequence="51"/> </odoo>
Add configuration menuitems under Website Admin/Catalog
[FIX] website_sale_coupon: Add configuration menuitems under Website Admin/Catalog
XML
agpl-3.0
ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo
xml
## Code Before: <?xml version="1.0" encoding="utf-8"?> <odoo> <record id="sale_coupon_program_view_form" model="ir.ui.view"> <field name="name">sale.coupon.program.view.form</field> <field name="model">sale.coupon.program</field> <field name="inherit_id" ref="sale_coupon.sale_coupon_program_view_promo_program_form"/> <field name="arch" type="xml"> <field name="rule_partners_domain" position="before"> <field name="is_public_included"/> </field> </field> </record> </odoo> ## Instruction: [FIX] website_sale_coupon: Add configuration menuitems under Website Admin/Catalog ## Code After: <?xml version="1.0" encoding="utf-8"?> <odoo> <record id="sale_coupon_program_view_form" model="ir.ui.view"> <field name="name">sale.coupon.program.view.form</field> <field name="model">sale.coupon.program</field> <field name="inherit_id" ref="sale_coupon.sale_coupon_program_view_promo_program_form"/> <field name="arch" type="xml"> <field name="rule_partners_domain" position="before"> <field name="is_public_included"/> </field> </field> </record> <menuitem action="sale_coupon.sale_coupon_program_action_promo_program" id="menu_promotion_type_config" parent="website_sale.menu_catalog" groups="sales_team.group_sale_manager" sequence="50"/> <menuitem id="menu_coupon_type_config" action="sale_coupon.sale_coupon_program_action_coupon_program" parent="website_sale.menu_catalog" groups="sales_team.group_sale_manager" sequence="51"/> </odoo>
589378fecd155dc09bc422a2cf416da0f5a13568
.gitlab-ci.yml
.gitlab-ci.yml
.test_template: &test_definition stage: test script: - bundle install --jobs $(nproc) --without docs --path vendor - bundle exec rake tests - bundle exec rake spec # Test with version present on Debian Stretch test:puppet4.8: image: ruby:2.3 variables: PUPPET_VERSION: "~> 4.8.2" FACTER_VERSION: '~> 2.4.6' HIERA_VERSION: '~> 3.2.0' <<: *test_definition # Test with version present on Debian Buster test:puppet5.5: image: ruby:2.5 variables: PUPPET_VERSION: "~> 5.5.10" FACTER_VERSION: '~> 3.11.0' <<: *test_definition # Build and deploy docs on GitLab Pages pages: image: debian:stable stage: deploy script: - apt-get update && apt-get install -y puppet-strings puppet - puppet strings generate - mv doc public only: - master artifacts: paths: - public
.test_template: &test_definition stage: test script: - bundle install --jobs $(nproc) --without docs --path vendor - bundle exec rake tests - bundle exec rake spec # Test with version present on Debian Buster test:puppet5.5: image: ruby:2.5 variables: PUPPET_VERSION: "~> 5.5.10" FACTER_VERSION: '~> 3.11.0' <<: *test_definition # Test with the latest Puppet release test:puppetlatest: <<: *test_definition # Build and deploy docs on GitLab Pages pages: image: debian:stable stage: deploy script: - apt-get update && apt-get install -y puppet-strings puppet - puppet strings generate - mv doc public only: - master artifacts: paths: - public
Drop puppet 4.8 and test against the latest puppet release in the CI
Drop puppet 4.8 and test against the latest puppet release in the CI
YAML
agpl-3.0
duritong/puppet-tor,duritong/puppet-tor
yaml
## Code Before: .test_template: &test_definition stage: test script: - bundle install --jobs $(nproc) --without docs --path vendor - bundle exec rake tests - bundle exec rake spec # Test with version present on Debian Stretch test:puppet4.8: image: ruby:2.3 variables: PUPPET_VERSION: "~> 4.8.2" FACTER_VERSION: '~> 2.4.6' HIERA_VERSION: '~> 3.2.0' <<: *test_definition # Test with version present on Debian Buster test:puppet5.5: image: ruby:2.5 variables: PUPPET_VERSION: "~> 5.5.10" FACTER_VERSION: '~> 3.11.0' <<: *test_definition # Build and deploy docs on GitLab Pages pages: image: debian:stable stage: deploy script: - apt-get update && apt-get install -y puppet-strings puppet - puppet strings generate - mv doc public only: - master artifacts: paths: - public ## Instruction: Drop puppet 4.8 and test against the latest puppet release in the CI ## Code After: .test_template: &test_definition stage: test script: - bundle install --jobs $(nproc) --without docs --path vendor - bundle exec rake tests - bundle exec rake spec # Test with version present on Debian Buster test:puppet5.5: image: ruby:2.5 variables: PUPPET_VERSION: "~> 5.5.10" FACTER_VERSION: '~> 3.11.0' <<: *test_definition # Test with the latest Puppet release test:puppetlatest: <<: *test_definition # Build and deploy docs on GitLab Pages pages: image: debian:stable stage: deploy script: - apt-get update && apt-get install -y puppet-strings puppet - puppet strings generate - mv doc public only: - master artifacts: paths: - public
3a49890904fbc6ead3a1463cddfb41480712c203
CHANGELOG.md
CHANGELOG.md
Released 2015-mm-dd
Released 2016-02-10 - Initial release
Fix changelog for version 0.1.0
Fix changelog for version 0.1.0
Markdown
bsd-3-clause
CartoDB/turbo-cartocss
markdown
## Code Before: Released 2015-mm-dd ## Instruction: Fix changelog for version 0.1.0 ## Code After: Released 2016-02-10 - Initial release
d71ec708483a561293c4d6d64f6b101ac9a95912
travis/wait_for_appveyor.sh
travis/wait_for_appveyor.sh
set -euo pipefail REPO_SLUG=$TRAVIS_REPO_SLUG GIT_COMMIT=$TRAVIS_COMMIT if [ -z "${TRAVIS_TAG:-}" ]; then GIT_BRANCH=${TRAVIS_PULL_REQUEST_BRANCH:-$TRAVIS_BRANCH} else GIT_BRANCH=master fi function main() { URL="https://ci.appveyor.com/api/projects/$REPO_SLUG/history?recordsNumber=20&branch=$GIT_BRANCH" echo "Will poll $URL looking for commit $GIT_COMMIT." while true; do build=$(curl --fail --show-error --silent "$URL" | jq "[.builds | .[] | select(.commitId == \"$GIT_COMMIT\")][0]") || { sleep 10; continue; } id=$(echo "$build" | jq -r ".buildId") status=$(echo "$build" | jq -r ".status") if [ "$status" = "null" ]; then echo "Build has not started." else echo -n "https://ci.appveyor.com/project/charleskorn/batect/builds/$id: " case "$status" in success) echo "build succeeded." exit 0 ;; failed) echo "build failed." exit 1 ;; cancelled) echo "build was cancelled." exit 1 ;; *) echo "build has not completed: status is $status" ;; esac fi sleep 10 done } main
set -euo pipefail REPO_SLUG=${TRAVIS_PULL_REQUEST_SLUG:-TRAVIS_REPO_SLUG} GIT_COMMIT=${TRAVIS_PULL_REQUEST_SHA:-TRAVIS_COMMIT} if [ -z "${TRAVIS_TAG:-}" ]; then GIT_BRANCH=${TRAVIS_PULL_REQUEST_BRANCH:-$TRAVIS_BRANCH} else GIT_BRANCH=master fi function main() { URL="https://ci.appveyor.com/api/projects/$REPO_SLUG/history?recordsNumber=20&branch=$GIT_BRANCH" echo "Will poll $URL looking for commit $GIT_COMMIT." while true; do build=$(curl --fail --show-error --silent "$URL" | jq "[.builds | .[] | select(.commitId == \"$GIT_COMMIT\")][0]") || { sleep 10; continue; } id=$(echo "$build" | jq -r ".buildId") status=$(echo "$build" | jq -r ".status") if [ "$status" = "null" ]; then echo "Build has not started." else echo -n "https://ci.appveyor.com/project/charleskorn/batect/builds/$id: " case "$status" in success) echo "build succeeded." exit 0 ;; failed) echo "build failed." exit 1 ;; cancelled) echo "build was cancelled." exit 1 ;; *) echo "build has not completed: status is $status" ;; esac fi sleep 10 done } main
Fix the issue waiting for the AppVeyor build to finish when building a PR.
Fix the issue waiting for the AppVeyor build to finish when building a PR.
Shell
apache-2.0
charleskorn/batect,charleskorn/batect,charleskorn/batect,charleskorn/batect,charleskorn/batect
shell
## Code Before: set -euo pipefail REPO_SLUG=$TRAVIS_REPO_SLUG GIT_COMMIT=$TRAVIS_COMMIT if [ -z "${TRAVIS_TAG:-}" ]; then GIT_BRANCH=${TRAVIS_PULL_REQUEST_BRANCH:-$TRAVIS_BRANCH} else GIT_BRANCH=master fi function main() { URL="https://ci.appveyor.com/api/projects/$REPO_SLUG/history?recordsNumber=20&branch=$GIT_BRANCH" echo "Will poll $URL looking for commit $GIT_COMMIT." while true; do build=$(curl --fail --show-error --silent "$URL" | jq "[.builds | .[] | select(.commitId == \"$GIT_COMMIT\")][0]") || { sleep 10; continue; } id=$(echo "$build" | jq -r ".buildId") status=$(echo "$build" | jq -r ".status") if [ "$status" = "null" ]; then echo "Build has not started." else echo -n "https://ci.appveyor.com/project/charleskorn/batect/builds/$id: " case "$status" in success) echo "build succeeded." exit 0 ;; failed) echo "build failed." exit 1 ;; cancelled) echo "build was cancelled." exit 1 ;; *) echo "build has not completed: status is $status" ;; esac fi sleep 10 done } main ## Instruction: Fix the issue waiting for the AppVeyor build to finish when building a PR. ## Code After: set -euo pipefail REPO_SLUG=${TRAVIS_PULL_REQUEST_SLUG:-TRAVIS_REPO_SLUG} GIT_COMMIT=${TRAVIS_PULL_REQUEST_SHA:-TRAVIS_COMMIT} if [ -z "${TRAVIS_TAG:-}" ]; then GIT_BRANCH=${TRAVIS_PULL_REQUEST_BRANCH:-$TRAVIS_BRANCH} else GIT_BRANCH=master fi function main() { URL="https://ci.appveyor.com/api/projects/$REPO_SLUG/history?recordsNumber=20&branch=$GIT_BRANCH" echo "Will poll $URL looking for commit $GIT_COMMIT." while true; do build=$(curl --fail --show-error --silent "$URL" | jq "[.builds | .[] | select(.commitId == \"$GIT_COMMIT\")][0]") || { sleep 10; continue; } id=$(echo "$build" | jq -r ".buildId") status=$(echo "$build" | jq -r ".status") if [ "$status" = "null" ]; then echo "Build has not started." else echo -n "https://ci.appveyor.com/project/charleskorn/batect/builds/$id: " case "$status" in success) echo "build succeeded." exit 0 ;; failed) echo "build failed." exit 1 ;; cancelled) echo "build was cancelled." exit 1 ;; *) echo "build has not completed: status is $status" ;; esac fi sleep 10 done } main
e4b37ddc8802386572aa496e8b37ca647839aac7
setup.py
setup.py
from distutils.core import setup from setuptools import find_packages setup( name='tango-shared-core', version='0.6.4', author=u'Tim Baxter', author_email='[email protected]', url='https://github.com/tBaxter/tango-shared-core', license='LICENSE', description='Tango shared/core functionality.', long_description=open('README.md').read(), packages=find_packages(), zip_safe=False, include_package_data=True, dependency_links = [ 'http://github.com/tBaxter/django-voting/tarball/master#egg=tango-voting', ] )
from setuptools import setup, find_packages with open('docs/requirements.txt') as f: required = f.read().splitlines() setup( name='tango-shared-core', version='0.6.5', author=u'Tim Baxter', author_email='[email protected]', description='Tango shared/core functionality.', long_description=open('README.md').read(), url='https://github.com/tBaxter/tango-shared-core', license='LICENSE', packages=find_packages(), zip_safe=False, include_package_data=True, dependency_links = [ 'http://github.com/tBaxter/django-voting/tarball/master#egg=tango-voting', ] )
Fix for requirements, and increment version
Fix for requirements, and increment version
Python
mit
tBaxter/tango-shared-core,tBaxter/tango-shared-core,tBaxter/tango-shared-core
python
## Code Before: from distutils.core import setup from setuptools import find_packages setup( name='tango-shared-core', version='0.6.4', author=u'Tim Baxter', author_email='[email protected]', url='https://github.com/tBaxter/tango-shared-core', license='LICENSE', description='Tango shared/core functionality.', long_description=open('README.md').read(), packages=find_packages(), zip_safe=False, include_package_data=True, dependency_links = [ 'http://github.com/tBaxter/django-voting/tarball/master#egg=tango-voting', ] ) ## Instruction: Fix for requirements, and increment version ## Code After: from setuptools import setup, find_packages with open('docs/requirements.txt') as f: required = f.read().splitlines() setup( name='tango-shared-core', version='0.6.5', author=u'Tim Baxter', author_email='[email protected]', description='Tango shared/core functionality.', long_description=open('README.md').read(), url='https://github.com/tBaxter/tango-shared-core', license='LICENSE', packages=find_packages(), zip_safe=False, include_package_data=True, dependency_links = [ 'http://github.com/tBaxter/django-voting/tarball/master#egg=tango-voting', ] )
af45e43c46a22f3168ab946bf914a45eae9ade19
avatar/urls.py
avatar/urls.py
try: from django.conf.urls import patterns, url except ImportError: # Django < 1.4 from django.conf.urls.defaults import patterns, url from avatar import views urlpatterns = patterns('', url(r'^add/$', views.add, name='avatar_add'), url(r'^change/$', views.change, name='avatar_change'), url(r'^delete/$', views.delete, name='avatar_delete'), url(r'^render_primary/(?P<user>[\w\d\@\.\-_]{3,30})/(?P<size>[\d]+)/$', views.render_primary, name='avatar_render_primary'), url(r'^list/(?P<username>[\+\w\@\.]+)/$', views.avatar_gallery, name='avatar_gallery'), url(r'^list/(?P<username>[\+\w\@\.]+)/(?P<id>[\d]+)/$', views.avatar, name='avatar'), )
try: from django.conf.urls import patterns, url except ImportError: # Django < 1.4 from django.conf.urls.defaults import url from avatar import views urlpatterns = [ url(r'^add/$', views.add, name='avatar_add'), url(r'^change/$', views.change, name='avatar_change'), url(r'^delete/$', views.delete, name='avatar_delete'), url(r'^render_primary/(?P<user>[\w\d\@\.\-_]{3,30})/(?P<size>[\d]+)/$', views.render_primary, name='avatar_render_primary'), url(r'^list/(?P<username>[\+\w\@\.]+)/$', views.avatar_gallery, name='avatar_gallery'), url(r'^list/(?P<username>[\+\w\@\.]+)/(?P<id>[\d]+)/$', views.avatar, name='avatar'), ]
Remove replace urlpatterns with simple array, make compatible with Django 1.9
Remove replace urlpatterns with simple array, make compatible with Django 1.9
Python
bsd-3-clause
ad-m/django-avatar,ad-m/django-avatar,grantmcconnaughey/django-avatar,grantmcconnaughey/django-avatar,jezdez/django-avatar,jezdez/django-avatar,MachineandMagic/django-avatar,MachineandMagic/django-avatar
python
## Code Before: try: from django.conf.urls import patterns, url except ImportError: # Django < 1.4 from django.conf.urls.defaults import patterns, url from avatar import views urlpatterns = patterns('', url(r'^add/$', views.add, name='avatar_add'), url(r'^change/$', views.change, name='avatar_change'), url(r'^delete/$', views.delete, name='avatar_delete'), url(r'^render_primary/(?P<user>[\w\d\@\.\-_]{3,30})/(?P<size>[\d]+)/$', views.render_primary, name='avatar_render_primary'), url(r'^list/(?P<username>[\+\w\@\.]+)/$', views.avatar_gallery, name='avatar_gallery'), url(r'^list/(?P<username>[\+\w\@\.]+)/(?P<id>[\d]+)/$', views.avatar, name='avatar'), ) ## Instruction: Remove replace urlpatterns with simple array, make compatible with Django 1.9 ## Code After: try: from django.conf.urls import patterns, url except ImportError: # Django < 1.4 from django.conf.urls.defaults import url from avatar import views urlpatterns = [ url(r'^add/$', views.add, name='avatar_add'), url(r'^change/$', views.change, name='avatar_change'), url(r'^delete/$', views.delete, name='avatar_delete'), url(r'^render_primary/(?P<user>[\w\d\@\.\-_]{3,30})/(?P<size>[\d]+)/$', views.render_primary, name='avatar_render_primary'), url(r'^list/(?P<username>[\+\w\@\.]+)/$', views.avatar_gallery, name='avatar_gallery'), url(r'^list/(?P<username>[\+\w\@\.]+)/(?P<id>[\d]+)/$', views.avatar, name='avatar'), ]
dfac86ef7d21b47274b5c28922dfb154986ad727
package.json
package.json
{ "name": "codesandnotes", "author": "Oskar Rough Mosumgaard", "version": "1.0.1", "engines": { "node": ">=0.12.0" }, "devDependencies": { "autoprefixer-core": "^6.0.1", "browser-sync": "^2.9.6", "del": "^2.0.2", "gulp": "^3.9.0", "gulp-babel": "^5.2.1", "gulp-cache": "^0.3.0", "gulp-concat": "^2.6.0", "gulp-csso": "^1.0.0", "gulp-filter": "^3.0.1", "gulp-if": "^1.2.5", "gulp-load-plugins": "^1.0.0-rc.1", "gulp-minify-css": "^1.2.1", "gulp-postcss": "^6.0.0", "gulp-sass": "^2.0.4", "gulp-shell": "^0.4.3", "gulp-size": "^2.0.0", "gulp-sourcemaps": "^1.5.2", "gulp-uglify": "^1.4.1", "rsyncwrapper": "^0.4.3" }, "dependencies": { "sanitize.css": "^2.0.0" } }
{ "name": "codesandnotes", "author": "Oskar Rough Mosumgaard", "version": "1.0.1", "private":true, "engines": { "node": ">=0.12.0" }, "devDependencies": { "autoprefixer-core": "^6.0.1", "browser-sync": "^2.9.6", "del": "^2.0.2", "gulp": "^3.9.0", "gulp-babel": "^5.2.1", "gulp-cache": "^0.3.0", "gulp-concat": "^2.6.0", "gulp-csso": "^1.0.0", "gulp-filter": "^3.0.1", "gulp-if": "^1.2.5", "gulp-load-plugins": "^1.0.0-rc.1", "gulp-minify-css": "^1.2.1", "gulp-postcss": "^6.0.0", "gulp-sass": "^2.0.4", "gulp-shell": "^0.4.3", "gulp-size": "^2.0.0", "gulp-sourcemaps": "^1.5.2", "gulp-uglify": "^1.4.1", "rsyncwrapper": "^0.4.3" }, "dependencies": { "sanitize.css": "^2.0.0" } }
Make sure we don't publish to npm
Make sure we don't publish to npm
JSON
mit
oskarrough/codesandnotes,oskarrough/codesandnotes
json
## Code Before: { "name": "codesandnotes", "author": "Oskar Rough Mosumgaard", "version": "1.0.1", "engines": { "node": ">=0.12.0" }, "devDependencies": { "autoprefixer-core": "^6.0.1", "browser-sync": "^2.9.6", "del": "^2.0.2", "gulp": "^3.9.0", "gulp-babel": "^5.2.1", "gulp-cache": "^0.3.0", "gulp-concat": "^2.6.0", "gulp-csso": "^1.0.0", "gulp-filter": "^3.0.1", "gulp-if": "^1.2.5", "gulp-load-plugins": "^1.0.0-rc.1", "gulp-minify-css": "^1.2.1", "gulp-postcss": "^6.0.0", "gulp-sass": "^2.0.4", "gulp-shell": "^0.4.3", "gulp-size": "^2.0.0", "gulp-sourcemaps": "^1.5.2", "gulp-uglify": "^1.4.1", "rsyncwrapper": "^0.4.3" }, "dependencies": { "sanitize.css": "^2.0.0" } } ## Instruction: Make sure we don't publish to npm ## Code After: { "name": "codesandnotes", "author": "Oskar Rough Mosumgaard", "version": "1.0.1", "private":true, "engines": { "node": ">=0.12.0" }, "devDependencies": { "autoprefixer-core": "^6.0.1", "browser-sync": "^2.9.6", "del": "^2.0.2", "gulp": "^3.9.0", "gulp-babel": "^5.2.1", "gulp-cache": "^0.3.0", "gulp-concat": "^2.6.0", "gulp-csso": "^1.0.0", "gulp-filter": "^3.0.1", "gulp-if": "^1.2.5", "gulp-load-plugins": "^1.0.0-rc.1", "gulp-minify-css": "^1.2.1", "gulp-postcss": "^6.0.0", "gulp-sass": "^2.0.4", "gulp-shell": "^0.4.3", "gulp-size": "^2.0.0", "gulp-sourcemaps": "^1.5.2", "gulp-uglify": "^1.4.1", "rsyncwrapper": "^0.4.3" }, "dependencies": { "sanitize.css": "^2.0.0" } }
245bc2bf935cb8aecba4c4e3707f1c5ebe1ec5e0
server/server.js
server/server.js
var express = require('express'); var passport = require('passport'); var util = require('./lib/utility.js'); // Load environment variables if (process.env.NODE_ENV !== 'integration') { require('dotenv').config({ path: './env/.env' }); } var app = express(); // Initial Configuration, Static Assets, & View Engine Configuration require('./config/initialize.js')(app, express); // Authentication Middleware: Express Sessions, Passport Strategy require('./config/auth.js')(app, express, passport); // Pre-Authentication Routes & OAuth Requests require('./routes/auth-routes.js')(app, passport); //Authentication check currently commented out, uncomment line to re-activate app.use(util.ensureAuthenticated); // View Routes require('./routes/view-routes.js')(app); // API Routes // require('./routes/api-routes.js')(app); // Wildcard route app.get('/*', function(req, res) { res.redirect('/'); }) app.listen(Number(process.env.PORT), process.env.HOST, function() { console.log(process.env.APP_NAME + ' is listening at ' + process.env.HOST + ' on port ' + process.env.PORT + '.') });
// Load environment variables if (process.env.NODE_ENV !== 'integration') { require('dotenv').config({ path: './env/.env' }); } var express = require('express'); var passport = require('passport'); var util = require('./lib/utility.js'); var app = express(); // Initial Configuration, Static Assets, & View Engine Configuration require('./config/initialize.js')(app, express); // Authentication Middleware: Express Sessions, Passport Strategy require('./config/auth.js')(app, express, passport); // Pre-Authentication Routes & OAuth Requests require('./routes/auth-routes.js')(app, passport); //Authentication check currently commented out, uncomment line to re-activate app.use(util.ensureAuthenticated); // View Routes require('./routes/view-routes.js')(app); // API Routes // require('./routes/api-routes.js')(app); // Wildcard route app.get('/*', function(req, res) { res.redirect('/'); }) app.listen(Number(process.env.PORT), process.env.HOST, function() { console.log(process.env.APP_NAME + ' is listening at ' + process.env.HOST + ' on port ' + process.env.PORT + '.') });
Load environment variable first in express app
Load environment variable first in express app
JavaScript
mit
formidable-coffee/masterfully,chkakaja/sentimize,formidable-coffee/masterfully,chkakaja/sentimize
javascript
## Code Before: var express = require('express'); var passport = require('passport'); var util = require('./lib/utility.js'); // Load environment variables if (process.env.NODE_ENV !== 'integration') { require('dotenv').config({ path: './env/.env' }); } var app = express(); // Initial Configuration, Static Assets, & View Engine Configuration require('./config/initialize.js')(app, express); // Authentication Middleware: Express Sessions, Passport Strategy require('./config/auth.js')(app, express, passport); // Pre-Authentication Routes & OAuth Requests require('./routes/auth-routes.js')(app, passport); //Authentication check currently commented out, uncomment line to re-activate app.use(util.ensureAuthenticated); // View Routes require('./routes/view-routes.js')(app); // API Routes // require('./routes/api-routes.js')(app); // Wildcard route app.get('/*', function(req, res) { res.redirect('/'); }) app.listen(Number(process.env.PORT), process.env.HOST, function() { console.log(process.env.APP_NAME + ' is listening at ' + process.env.HOST + ' on port ' + process.env.PORT + '.') }); ## Instruction: Load environment variable first in express app ## Code After: // Load environment variables if (process.env.NODE_ENV !== 'integration') { require('dotenv').config({ path: './env/.env' }); } var express = require('express'); var passport = require('passport'); var util = require('./lib/utility.js'); var app = express(); // Initial Configuration, Static Assets, & View Engine Configuration require('./config/initialize.js')(app, express); // Authentication Middleware: Express Sessions, Passport Strategy require('./config/auth.js')(app, express, passport); // Pre-Authentication Routes & OAuth Requests require('./routes/auth-routes.js')(app, passport); //Authentication check currently commented out, uncomment line to re-activate app.use(util.ensureAuthenticated); // View Routes require('./routes/view-routes.js')(app); // API Routes // require('./routes/api-routes.js')(app); // Wildcard route app.get('/*', function(req, res) { res.redirect('/'); }) app.listen(Number(process.env.PORT), process.env.HOST, function() { console.log(process.env.APP_NAME + ' is listening at ' + process.env.HOST + ' on port ' + process.env.PORT + '.') });
63852c1fbb996823b2fa74b471470328b7541af7
README.md
README.md
` make main ` ` make mjpg_streamer_instance ` ` make gnuplot_fps ARGS="-c 1 -n 60 -m 30` ` make gnuplot_vision GRAPH_DIR=30_deg ARGS="-c 4 -n 30 -m 60` ` make send_start_signal ` ` make send_stop_signal ` ### Contributors - Min Hoo Lee - Michael Wan - Rahul Amara - Pravin Suranthiran - Jeremy Tien - Sahas Munamala
- Min Hoo Lee - Pravin Suranthiran - Sahas Munamala ### Sample run commands ` make main ` ` make mjpg_streamer_instance ` ` make gnuplot_fps PLOT_FPS_ARGS="-c 1 -n 60 -m 30` ` make gnuplot_vision ` ` make gnuplot_vision PLOT_VISION_ARGS="-c 4 -n 30 -m 60` ` make send_start_signal ` ` make send_stop_signal ` ` make camera_calib ` ### Contributors - Min Hoo Lee - Michael Wan - Rahul Amara - Pravin Suranthiran - Jeremy Tien - Sahas Munamala
Add in new examples and update contributors
Add in new examples and update contributors Former-commit-id: 695b5a4bb7a35bb6eb6a81ec761f3481be7dffd8
Markdown
bsd-2-clause
valkyrierobotics/vision2017,valkyrierobotics/vision2017,valkyrierobotics/vision2017
markdown
## Code Before: ` make main ` ` make mjpg_streamer_instance ` ` make gnuplot_fps ARGS="-c 1 -n 60 -m 30` ` make gnuplot_vision GRAPH_DIR=30_deg ARGS="-c 4 -n 30 -m 60` ` make send_start_signal ` ` make send_stop_signal ` ### Contributors - Min Hoo Lee - Michael Wan - Rahul Amara - Pravin Suranthiran - Jeremy Tien - Sahas Munamala ## Instruction: Add in new examples and update contributors Former-commit-id: 695b5a4bb7a35bb6eb6a81ec761f3481be7dffd8 ## Code After: - Min Hoo Lee - Pravin Suranthiran - Sahas Munamala ### Sample run commands ` make main ` ` make mjpg_streamer_instance ` ` make gnuplot_fps PLOT_FPS_ARGS="-c 1 -n 60 -m 30` ` make gnuplot_vision ` ` make gnuplot_vision PLOT_VISION_ARGS="-c 4 -n 30 -m 60` ` make send_start_signal ` ` make send_stop_signal ` ` make camera_calib ` ### Contributors - Min Hoo Lee - Michael Wan - Rahul Amara - Pravin Suranthiran - Jeremy Tien - Sahas Munamala
ed279b7f2cfcfd4abdf1da36d8406a3f63603529
dss/mobile/__init__.py
dss/mobile/__init__.py
try: import SocketServer as socketserver except ImportError: import socketserver from dss.tools import thread, show from dss.config import config from .handler import MediaHandler class ThreadedTCPServer(socketserver.ThreadingMixIn, socketserver.TCPServer): daemon_threads = True is_running = False class TCPServer(object): def __init__(self): self.host = config.get('local', 'addr') self.port = config.getint('local', 'tcp_port') self.cond = thread.Condition() self._server = None def start(self, create_thread=True): if not create_thread: self.run_server() return with self.cond: thread.Thread(self.run_server, name='TCP Server').start() self.cond.wait() return self def run_server(self): self._server = ThreadedTCPServer((self.host, self.port), MediaHandler) show('Listening at {0.host}:{0.port} (tcp)'.format(self)) with self.cond: self.cond.notify_all() self._server.is_running = True self._server.serve_forever() def stop(self): self._server.is_running = False self._server.shutdown()
try: import SocketServer as socketserver except ImportError: import socketserver from dss.tools import thread, show from dss.config import config from dss.storage import db from .handler import MediaHandler # If some streams are active, the program did no close properly. db.mobile.update({'active': True}, {'active': False}) class ThreadedTCPServer(socketserver.ThreadingMixIn, socketserver.TCPServer): daemon_threads = True is_running = False class TCPServer(object): def __init__(self): self.host = config.get('local', 'addr') self.port = config.getint('local', 'tcp_port') self.cond = thread.Condition() self._server = None def start(self, create_thread=True): if not create_thread: self.run_server() return with self.cond: thread.Thread(self.run_server, name='TCP Server').start() self.cond.wait() return self def run_server(self): self._server = ThreadedTCPServer((self.host, self.port), MediaHandler) show('Listening at {0.host}:{0.port} (tcp)'.format(self)) with self.cond: self.cond.notify_all() self._server.is_running = True self._server.serve_forever() def stop(self): self._server.is_running = False self._server.shutdown()
Mark all mobile streams as inactive when the program starts.
Mark all mobile streams as inactive when the program starts.
Python
bsd-3-clause
terabit-software/dynamic-stream-server,hmoraes/dynamic-stream-server,terabit-software/dynamic-stream-server,hmoraes/dynamic-stream-server,terabit-software/dynamic-stream-server,terabit-software/dynamic-stream-server,hmoraes/dynamic-stream-server,hmoraes/dynamic-stream-server
python
## Code Before: try: import SocketServer as socketserver except ImportError: import socketserver from dss.tools import thread, show from dss.config import config from .handler import MediaHandler class ThreadedTCPServer(socketserver.ThreadingMixIn, socketserver.TCPServer): daemon_threads = True is_running = False class TCPServer(object): def __init__(self): self.host = config.get('local', 'addr') self.port = config.getint('local', 'tcp_port') self.cond = thread.Condition() self._server = None def start(self, create_thread=True): if not create_thread: self.run_server() return with self.cond: thread.Thread(self.run_server, name='TCP Server').start() self.cond.wait() return self def run_server(self): self._server = ThreadedTCPServer((self.host, self.port), MediaHandler) show('Listening at {0.host}:{0.port} (tcp)'.format(self)) with self.cond: self.cond.notify_all() self._server.is_running = True self._server.serve_forever() def stop(self): self._server.is_running = False self._server.shutdown() ## Instruction: Mark all mobile streams as inactive when the program starts. ## Code After: try: import SocketServer as socketserver except ImportError: import socketserver from dss.tools import thread, show from dss.config import config from dss.storage import db from .handler import MediaHandler # If some streams are active, the program did no close properly. db.mobile.update({'active': True}, {'active': False}) class ThreadedTCPServer(socketserver.ThreadingMixIn, socketserver.TCPServer): daemon_threads = True is_running = False class TCPServer(object): def __init__(self): self.host = config.get('local', 'addr') self.port = config.getint('local', 'tcp_port') self.cond = thread.Condition() self._server = None def start(self, create_thread=True): if not create_thread: self.run_server() return with self.cond: thread.Thread(self.run_server, name='TCP Server').start() self.cond.wait() return self def run_server(self): self._server = ThreadedTCPServer((self.host, self.port), MediaHandler) show('Listening at {0.host}:{0.port} (tcp)'.format(self)) with self.cond: self.cond.notify_all() self._server.is_running = True self._server.serve_forever() def stop(self): self._server.is_running = False self._server.shutdown()
ead2ca0044136c8e8b4756e7380a4422a57648bf
TODO.md
TODO.md
- varargs (how to pass them, etc) - floats - vectors - modifiers on everything - pass by value - structs constants - looks like Typedef can have more than two things in it (strct-pack-1.ll) - fixing printing 0x values (regstack-1.ll) - aliases - numbered types - unimplimented stuff in parser - consider renaming types - blocks not being set correctly - maybe comparisons at top of K
- floats - primitive operations working - unable to bitcast floats - varargs - `printf` is working - submit vararg patch to clang - aliases - We might be done with this. - standard library - create repo for modified musl - add back printf float support - memory - simplify memory model (symByte idea) - handle arbitrary-width integers (iN) - `gcc-torture/20001111-1.opt.ll`: stuck on sizeof i1 - vectors - modifiers on everything - pass by value - structs constants - looks like Typedef can have more than two things in it (strct-pack-1.ll) - fixing printing 0x values (regstack-1.ll) - numbered types - unimplimented stuff in parser - consider renaming types - blocks not being set correctly - maybe comparisons at top of K
Add more items to the todo file.
Add more items to the todo file.
Markdown
mit
davidlazar/llvm-semantics,kframework/llvm-semantics-old
markdown
## Code Before: - varargs (how to pass them, etc) - floats - vectors - modifiers on everything - pass by value - structs constants - looks like Typedef can have more than two things in it (strct-pack-1.ll) - fixing printing 0x values (regstack-1.ll) - aliases - numbered types - unimplimented stuff in parser - consider renaming types - blocks not being set correctly - maybe comparisons at top of K ## Instruction: Add more items to the todo file. ## Code After: - floats - primitive operations working - unable to bitcast floats - varargs - `printf` is working - submit vararg patch to clang - aliases - We might be done with this. - standard library - create repo for modified musl - add back printf float support - memory - simplify memory model (symByte idea) - handle arbitrary-width integers (iN) - `gcc-torture/20001111-1.opt.ll`: stuck on sizeof i1 - vectors - modifiers on everything - pass by value - structs constants - looks like Typedef can have more than two things in it (strct-pack-1.ll) - fixing printing 0x values (regstack-1.ll) - numbered types - unimplimented stuff in parser - consider renaming types - blocks not being set correctly - maybe comparisons at top of K
d79c5f1785a1cca9f6b1637db43f5161b7b33004
index.php
index.php
<?php session_start(); // Load required lib files require_once('twitteroauth/twitteroauth.php'); require_once('config.php'); // Prepare a connection using a preset token function getConnectionWithAccessToken($oauth_token, $oauth_token_secret) { $connection = new TwitterOAuth(CONSUMER_KEY, CONSUMER_SECRET, $oauth_token, $oauth_token_secret); return $connection; } $connection = getConnectionWithAccessToken(OAUTH_TOKEN, OAUTH_TOKEN_SECRET); $content = $connection->get("search/tweets", array( 'q' => 'twitter', 'lang' => 'en', 'count' => '5', 'result_type' => 'mixed', 'include_entities' => 'true' ) ); /* Include HTML to display on the page */ include('html.inc');
<?php session_start( session_id() ); // Include session_id() to maintain single session // Load required lib files require_once('twitteroauth/twitteroauth.php'); require_once('config.php'); // Prepare a connection using a preset token function getConnectionWithAccessToken($oauth_token, $oauth_token_secret) { $connection = new TwitterOAuth(CONSUMER_KEY, CONSUMER_SECRET, $oauth_token, $oauth_token_secret); return $connection; } // Open connection $connection = getConnectionWithAccessToken(OAUTH_TOKEN, OAUTH_TOKEN_SECRET); // Save connection to session for use in includes files $_SESSION['tweetoes_connection'] = $connection; $content = $connection->get("search/tweets", array( 'q' => 'twitter', 'lang' => 'en', 'count' => '5', 'result_type' => 'mixed', 'include_entities' => 'true' ) ); /* Include HTML to display on the page */ include('html.inc');
Set session id + Save connection to session
Set session id + Save connection to session In order for additional PHP files to make requests to Twitter without opening an entirely new connection (e.g, includes requested via AJAX), the initial connection must be stored somewhere. This is best handled by session variables. In order for other PHP files to access the same session, the session id must be set at session_start(). Each PHP file that requires access to this session must open with the same id (i.e, session_id() ). Now that the session_id() is set and the OAuth connection is saved, twitter.php can be updated.
PHP
mit
tannerhodges/mode-twitter-prototype,tannerhodges/tweetoes,tannerhodges/tweetoes
php
## Code Before: <?php session_start(); // Load required lib files require_once('twitteroauth/twitteroauth.php'); require_once('config.php'); // Prepare a connection using a preset token function getConnectionWithAccessToken($oauth_token, $oauth_token_secret) { $connection = new TwitterOAuth(CONSUMER_KEY, CONSUMER_SECRET, $oauth_token, $oauth_token_secret); return $connection; } $connection = getConnectionWithAccessToken(OAUTH_TOKEN, OAUTH_TOKEN_SECRET); $content = $connection->get("search/tweets", array( 'q' => 'twitter', 'lang' => 'en', 'count' => '5', 'result_type' => 'mixed', 'include_entities' => 'true' ) ); /* Include HTML to display on the page */ include('html.inc'); ## Instruction: Set session id + Save connection to session In order for additional PHP files to make requests to Twitter without opening an entirely new connection (e.g, includes requested via AJAX), the initial connection must be stored somewhere. This is best handled by session variables. In order for other PHP files to access the same session, the session id must be set at session_start(). Each PHP file that requires access to this session must open with the same id (i.e, session_id() ). Now that the session_id() is set and the OAuth connection is saved, twitter.php can be updated. ## Code After: <?php session_start( session_id() ); // Include session_id() to maintain single session // Load required lib files require_once('twitteroauth/twitteroauth.php'); require_once('config.php'); // Prepare a connection using a preset token function getConnectionWithAccessToken($oauth_token, $oauth_token_secret) { $connection = new TwitterOAuth(CONSUMER_KEY, CONSUMER_SECRET, $oauth_token, $oauth_token_secret); return $connection; } // Open connection $connection = getConnectionWithAccessToken(OAUTH_TOKEN, OAUTH_TOKEN_SECRET); // Save connection to session for use in includes files $_SESSION['tweetoes_connection'] = $connection; $content = $connection->get("search/tweets", array( 'q' => 'twitter', 'lang' => 'en', 'count' => '5', 'result_type' => 'mixed', 'include_entities' => 'true' ) ); /* Include HTML to display on the page */ include('html.inc');
fff34ecad33d9795fbcca83f66b781a9c9fd65b3
spec/classes/params_spec.rb
spec/classes/params_spec.rb
require 'spec_helper' describe 'apt::params', :type => :class do let(:facts) { { :lsbdistid => 'Debian', :osfamily => 'Debian', :lsbdistcodename => 'wheezy', :puppetversion => Puppet.version, } } let (:title) { 'my_package' } it { is_expected.to contain_apt__params } # There are 4 resources in this class currently # there should not be any more resources because it is a params class # The resources are class[apt::params], class[main], class[settings], stage[main] it "Should not contain any resources" do expect(subject.call.resources.size).to eq(4) end describe "With lsb-release not installed" do let(:facts) { { :osfamily => 'Debian', :puppetversion => Puppet.version, } } let (:title) { 'my_package' } it do expect { subject.call }.to raise_error(Puppet::Error, /Unable to determine lsbdistid, please install lsb-release first/) end end end
require 'spec_helper' describe 'apt::params', :type => :class do let(:facts) { { :lsbdistid => 'Debian', :osfamily => 'Debian', :lsbdistcodename => 'wheezy', :puppetversion => Puppet.version, } } # There are 4 resources in this class currently # there should not be any more resources because it is a params class # The resources are class[apt::params], class[main], class[settings], stage[main] it "Should not contain any resources" do expect(subject.call.resources.size).to eq(4) end describe "With lsb-release not installed" do let(:facts) { { :osfamily => 'Debian', :puppetversion => Puppet.version, } } let (:title) { 'my_package' } it do expect { subject.call }.to raise_error(Puppet::Error, /Unable to determine lsbdistid, please install lsb-release first/) end end end
Remove useless, failing test; cleanup whitespace
Remove useless, failing test; cleanup whitespace
Ruby
apache-2.0
imphil/puppetlabs-apt,puppetlabs/puppetlabs-apt,imphil/puppetlabs-apt
ruby
## Code Before: require 'spec_helper' describe 'apt::params', :type => :class do let(:facts) { { :lsbdistid => 'Debian', :osfamily => 'Debian', :lsbdistcodename => 'wheezy', :puppetversion => Puppet.version, } } let (:title) { 'my_package' } it { is_expected.to contain_apt__params } # There are 4 resources in this class currently # there should not be any more resources because it is a params class # The resources are class[apt::params], class[main], class[settings], stage[main] it "Should not contain any resources" do expect(subject.call.resources.size).to eq(4) end describe "With lsb-release not installed" do let(:facts) { { :osfamily => 'Debian', :puppetversion => Puppet.version, } } let (:title) { 'my_package' } it do expect { subject.call }.to raise_error(Puppet::Error, /Unable to determine lsbdistid, please install lsb-release first/) end end end ## Instruction: Remove useless, failing test; cleanup whitespace ## Code After: require 'spec_helper' describe 'apt::params', :type => :class do let(:facts) { { :lsbdistid => 'Debian', :osfamily => 'Debian', :lsbdistcodename => 'wheezy', :puppetversion => Puppet.version, } } # There are 4 resources in this class currently # there should not be any more resources because it is a params class # The resources are class[apt::params], class[main], class[settings], stage[main] it "Should not contain any resources" do expect(subject.call.resources.size).to eq(4) end describe "With lsb-release not installed" do let(:facts) { { :osfamily => 'Debian', :puppetversion => Puppet.version, } } let (:title) { 'my_package' } it do expect { subject.call }.to raise_error(Puppet::Error, /Unable to determine lsbdistid, please install lsb-release first/) end end end
2c95afd0345b9f41fe16ade6ceaae4fc39e0ebe2
alura/algoritmos/TestaMenorPreco.java
alura/algoritmos/TestaMenorPreco.java
public class TestaMenorPreco { public static void main(String[] args) { Produto produtos[] = new Produto[5]; produtos[0] = new Produto("Lamborghini", 1000000); produtos[1] = new Produto("Jipe", 46000); produtos[2] = new Produto("Brasília", 16000); produtos[3] = new Produto("Smart", 46000); produtos[4] = new Produto("Fusca", 17000); int maisBarato = 0; for(int atual = 0; atual <= 4; atual++) { if(produtos[atual].getPreco() < produtos[maisBarato].getPreco()) { maisBarato = atual; } } System.out.println(maisBarato); System.out.println("O carro " + produtos[maisBarato].getNome() + " é o mais barato, e custa " + produtos[maisBarato].getPreco()); } }
public class TestaMenorPreco { public static void main(String[] args) { Produto produtos[] = { new Produto("Lamborghini", 1000000), new Produto("Jipe", 46000), new Produto("Brasília", 16000), new Produto("Smart", 46000), new Produto("Fusca", 17000) }; int maisBarato = 0; for(int atual = 0; atual <= 4; atual++) { if(produtos[atual].getPreco() < produtos[maisBarato].getPreco()) { maisBarato = atual; } } System.out.println(maisBarato); System.out.println("O carro " + produtos[maisBarato].getNome() + " é o mais barato, e custa " + produtos[maisBarato].getPreco()); } }
Insert new files, Alura, Projeto de Algoritmos 1, Aula 10
Insert new files, Alura, Projeto de Algoritmos 1, Aula 10
Java
mit
fabriciofmsilva/labs,fabriciofmsilva/labs,fabriciofmsilva/labs,fabriciofmsilva/labs,fabriciofmsilva/labs,fabriciofmsilva/labs,fabriciofmsilva/labs,fabriciofmsilva/labs,fabriciofmsilva/labs,fabriciofmsilva/labs,fabriciofmsilva/labs,fabriciofmsilva/labs,fabriciofmsilva/labs,fabriciofmsilva/labs
java
## Code Before: public class TestaMenorPreco { public static void main(String[] args) { Produto produtos[] = new Produto[5]; produtos[0] = new Produto("Lamborghini", 1000000); produtos[1] = new Produto("Jipe", 46000); produtos[2] = new Produto("Brasília", 16000); produtos[3] = new Produto("Smart", 46000); produtos[4] = new Produto("Fusca", 17000); int maisBarato = 0; for(int atual = 0; atual <= 4; atual++) { if(produtos[atual].getPreco() < produtos[maisBarato].getPreco()) { maisBarato = atual; } } System.out.println(maisBarato); System.out.println("O carro " + produtos[maisBarato].getNome() + " é o mais barato, e custa " + produtos[maisBarato].getPreco()); } } ## Instruction: Insert new files, Alura, Projeto de Algoritmos 1, Aula 10 ## Code After: public class TestaMenorPreco { public static void main(String[] args) { Produto produtos[] = { new Produto("Lamborghini", 1000000), new Produto("Jipe", 46000), new Produto("Brasília", 16000), new Produto("Smart", 46000), new Produto("Fusca", 17000) }; int maisBarato = 0; for(int atual = 0; atual <= 4; atual++) { if(produtos[atual].getPreco() < produtos[maisBarato].getPreco()) { maisBarato = atual; } } System.out.println(maisBarato); System.out.println("O carro " + produtos[maisBarato].getNome() + " é o mais barato, e custa " + produtos[maisBarato].getPreco()); } }
c34039bbc9e403ed0b435e5c3b819c36bd77c801
README.md
README.md
The OS2KITOS was programmed by IT Minds ApS (http://it-minds.dk) for OS2 - Offentligt digitaliseringsfællesskab (http://os2web.dk). Copyright (c) 2014, OS2 - Offentligt digitaliseringsfællesskab. The OS2KITOS is free software; you may use, study, modify and distribute it under the terms of version 2.0 of the Mozilla Public License. See the LICENSE file for details. If a copy of the MPL was not distributed with this file, You can obtain one at http://mozilla.org/MPL/2.0/. All source code in this and the underlying directories is subject to the terms of the Mozilla Public License, v. 2.0. [![Build status](https://ci.appveyor.com/api/projects/status/ryacihsuw13o1qcw/branch/master?svg=true)](https://ci.appveyor.com/project/Kitos/kitos/branch/master) [![codecov.io](https://codecov.io/github/os2kitos/kitos/coverage.svg?branch=master)](https://codecov.io/github/os2kitos/kitos?branch=master) This website uses [BrowserStack](https://www.browserstack.com/) for testing. ![BrowserStackLogo](https://cdn.rawgit.com/os2kitos/kitos/e2e/project-edit/Assets/BrowserStack.svg)
The OS2KITOS was programmed by IT Minds ApS (http://it-minds.dk) for OS2 - Offentligt digitaliseringsfællesskab (http://os2web.dk). Copyright (c) 2014, OS2 - Offentligt digitaliseringsfællesskab. The OS2KITOS is free software; you may use, study, modify and distribute it under the terms of version 2.0 of the Mozilla Public License. See the LICENSE file for details. If a copy of the MPL was not distributed with this file, You can obtain one at http://mozilla.org/MPL/2.0/. All source code in this and the underlying directories is subject to the terms of the Mozilla Public License, v. 2.0. [![Build status](https://ci.appveyor.com/api/projects/status/ryacihsuw13o1qcw/branch/master?svg=true)](https://ci.appveyor.com/project/Kitos/kitos/branch/master) [![codecov.io](https://codecov.io/github/os2kitos/kitos/coverage.svg?branch=master)](https://codecov.io/github/os2kitos/kitos?branch=master) This website uses [BrowserStack](https://www.browserstack.com/) for testing. ![BrowserStackLogo](https://cdn.rawgit.com/os2kitos/kitos/master/Assets/BrowserStack.svg)
Change URL to BrowserStack logo.
Change URL to BrowserStack logo.
Markdown
mpl-2.0
os2kitos/kitos,miracle-as/kitos,os2kitos/kitos,os2kitos/kitos,miracle-as/kitos,miracle-as/kitos,miracle-as/kitos,os2kitos/kitos
markdown
## Code Before: The OS2KITOS was programmed by IT Minds ApS (http://it-minds.dk) for OS2 - Offentligt digitaliseringsfællesskab (http://os2web.dk). Copyright (c) 2014, OS2 - Offentligt digitaliseringsfællesskab. The OS2KITOS is free software; you may use, study, modify and distribute it under the terms of version 2.0 of the Mozilla Public License. See the LICENSE file for details. If a copy of the MPL was not distributed with this file, You can obtain one at http://mozilla.org/MPL/2.0/. All source code in this and the underlying directories is subject to the terms of the Mozilla Public License, v. 2.0. [![Build status](https://ci.appveyor.com/api/projects/status/ryacihsuw13o1qcw/branch/master?svg=true)](https://ci.appveyor.com/project/Kitos/kitos/branch/master) [![codecov.io](https://codecov.io/github/os2kitos/kitos/coverage.svg?branch=master)](https://codecov.io/github/os2kitos/kitos?branch=master) This website uses [BrowserStack](https://www.browserstack.com/) for testing. ![BrowserStackLogo](https://cdn.rawgit.com/os2kitos/kitos/e2e/project-edit/Assets/BrowserStack.svg) ## Instruction: Change URL to BrowserStack logo. ## Code After: The OS2KITOS was programmed by IT Minds ApS (http://it-minds.dk) for OS2 - Offentligt digitaliseringsfællesskab (http://os2web.dk). Copyright (c) 2014, OS2 - Offentligt digitaliseringsfællesskab. The OS2KITOS is free software; you may use, study, modify and distribute it under the terms of version 2.0 of the Mozilla Public License. See the LICENSE file for details. If a copy of the MPL was not distributed with this file, You can obtain one at http://mozilla.org/MPL/2.0/. All source code in this and the underlying directories is subject to the terms of the Mozilla Public License, v. 2.0. [![Build status](https://ci.appveyor.com/api/projects/status/ryacihsuw13o1qcw/branch/master?svg=true)](https://ci.appveyor.com/project/Kitos/kitos/branch/master) [![codecov.io](https://codecov.io/github/os2kitos/kitos/coverage.svg?branch=master)](https://codecov.io/github/os2kitos/kitos?branch=master) This website uses [BrowserStack](https://www.browserstack.com/) for testing. ![BrowserStackLogo](https://cdn.rawgit.com/os2kitos/kitos/master/Assets/BrowserStack.svg)
e582e122d11b33e6e9f39ca76ce0e978a22361ab
build/webpack.config.js
build/webpack.config.js
const path = require('path'); const webpack = require('webpack'); const ExtractTextPlugin = require("extract-text-webpack-plugin"); const CopyWebpackPlugin = require('copy-webpack-plugin'); const HtmlWebpackPlugin = require('html-webpack-plugin'); const rules = require('./rules'); const config = require('./config'); const webpackConfig = { entry: { app: ['./src/app/index.js'] }, output: { path: __dirname + '/dist/', filename: 'app.js', publicPath: '/' }, module: { rules }, devServer: config.dev, plugins: [ new ExtractTextPlugin({ filename: 'style.css', disable: false, allChunks: true }), new CopyWebpackPlugin([ {from: './src/page', to: './page' } ]), new HtmlWebpackPlugin({ filename: 'index.html', template: 'index.html', hash: process.env.NODE_ENV === 'production' }), new webpack.HotModuleReplacementPlugin(), new webpack.NoEmitOnErrorsPlugin() ] }; if (process.env.NODE_ENV === 'production') { config.plugins = config.plugins.concat([ new webpack.optimize.UglifyJsPlugin({ compress: { warnings: false } }) ]); } module.exports = webpackConfig;
const path = require('path'); const webpack = require('webpack'); const ExtractTextPlugin = require("extract-text-webpack-plugin"); const CopyWebpackPlugin = require('copy-webpack-plugin'); const HtmlWebpackPlugin = require('html-webpack-plugin'); const rules = require('./rules'); const config = require('./config'); const webpackConfig = { entry: { app: ['./src/app/index.js'] }, output: { path: __dirname + '/dist/', filename: 'app.js', publicPath: '/' }, module: { rules }, devServer: config.dev, plugins: [ new ExtractTextPlugin({ filename: 'style.css', disable: false, allChunks: true }), new CopyWebpackPlugin([ {from: './src/page', to: './page' } ]), new HtmlWebpackPlugin({ filename: 'index.html', template: 'index.html', hash: process.env.NODE_ENV === 'production' }), new webpack.HotModuleReplacementPlugin(), new webpack.NoEmitOnErrorsPlugin() ] }; if (process.env.NODE_ENV === 'production') { config.plugins = config.plugins.concat([ new webpack.DefinePlugin({ 'process.env.NODE_ENV': JSON.stringify('production') }), new webpack.optimize.UglifyJsPlugin({ compress: { warnings: false } }) ]); } module.exports = webpackConfig;
Add 'NODE_ENV' environment variable plugin.
Add 'NODE_ENV' environment variable plugin.
JavaScript
mit
hejianxian/framework7-webpack-demo,Alex-fun/framework7-webpack-demo,Alex-fun/framework7-webpack-demo,hejianxian/framework7-webpack-demo
javascript
## Code Before: const path = require('path'); const webpack = require('webpack'); const ExtractTextPlugin = require("extract-text-webpack-plugin"); const CopyWebpackPlugin = require('copy-webpack-plugin'); const HtmlWebpackPlugin = require('html-webpack-plugin'); const rules = require('./rules'); const config = require('./config'); const webpackConfig = { entry: { app: ['./src/app/index.js'] }, output: { path: __dirname + '/dist/', filename: 'app.js', publicPath: '/' }, module: { rules }, devServer: config.dev, plugins: [ new ExtractTextPlugin({ filename: 'style.css', disable: false, allChunks: true }), new CopyWebpackPlugin([ {from: './src/page', to: './page' } ]), new HtmlWebpackPlugin({ filename: 'index.html', template: 'index.html', hash: process.env.NODE_ENV === 'production' }), new webpack.HotModuleReplacementPlugin(), new webpack.NoEmitOnErrorsPlugin() ] }; if (process.env.NODE_ENV === 'production') { config.plugins = config.plugins.concat([ new webpack.optimize.UglifyJsPlugin({ compress: { warnings: false } }) ]); } module.exports = webpackConfig; ## Instruction: Add 'NODE_ENV' environment variable plugin. ## Code After: const path = require('path'); const webpack = require('webpack'); const ExtractTextPlugin = require("extract-text-webpack-plugin"); const CopyWebpackPlugin = require('copy-webpack-plugin'); const HtmlWebpackPlugin = require('html-webpack-plugin'); const rules = require('./rules'); const config = require('./config'); const webpackConfig = { entry: { app: ['./src/app/index.js'] }, output: { path: __dirname + '/dist/', filename: 'app.js', publicPath: '/' }, module: { rules }, devServer: config.dev, plugins: [ new ExtractTextPlugin({ filename: 'style.css', disable: false, allChunks: true }), new CopyWebpackPlugin([ {from: './src/page', to: './page' } ]), new HtmlWebpackPlugin({ filename: 'index.html', template: 'index.html', hash: process.env.NODE_ENV === 'production' }), new webpack.HotModuleReplacementPlugin(), new webpack.NoEmitOnErrorsPlugin() ] }; if (process.env.NODE_ENV === 'production') { config.plugins = config.plugins.concat([ new webpack.DefinePlugin({ 'process.env.NODE_ENV': JSON.stringify('production') }), new webpack.optimize.UglifyJsPlugin({ compress: { warnings: false } }) ]); } module.exports = webpackConfig;
3655a79c5280a5780cc9c7b5f924a2f658b708ec
README.md
README.md
[![Build Status](https://travis-ci.org/cookpad/kuroko2.svg?branch=master)](https://travis-ci.org/cookpad/kuroko2) [![Gem Version](https://badge.fury.io/rb/kuroko2.svg)](https://badge.fury.io/rb/kuroko2) ![](app/assets/images/kuroko2/kuroko-logo-horizontal.png) Kuroko2 is a web-based job scheduler / workflow engine. ## Installation ```bash $ rails new your_kuroko2_application --database=mysql --skip-turbolinks --skip-javascript -m https://raw.githubusercontent.com/cookpad/kuroko2/master/app_template.rb ``` And edits "config/kuroko2.yml" ## Documentation Documentation is available at [docs/index.md](docs/index.md). ## Authors - Naoto Takai - Eisuke Oishi - Kohei Suzuki - Taiki Ono - Takashi Kokubun ## Contributors - Shota Iguchi - Hiroyuki Inoue - hogelog - gfx - kaorimatz - makimoto - shigemk2 - winebarrel - t8m8 - yohfee ## License The gem is available as open source under the terms of the [MIT License](http://opensource.org/licenses/MIT).
[![Build Status](https://travis-ci.org/cookpad/kuroko2.svg?branch=master)](https://travis-ci.org/cookpad/kuroko2) [![Gem Version](https://badge.fury.io/rb/kuroko2.svg)](https://badge.fury.io/rb/kuroko2) ![](app/assets/images/kuroko2/kuroko-logo-horizontal.png) Kuroko2 is a web-based job scheduler / workflow engine. ## Installation ```bash $ rails new your_kuroko2_application --database=mysql --skip-turbolinks --skip-javascript -m https://raw.githubusercontent.com/cookpad/kuroko2/master/app_template.rb ``` And edits "config/kuroko2.yml" ## Documentation Documentation is available at [docs/index.md](docs/index.md). ## Authors - Naoto Takai - Eisuke Oishi - Kohei Suzuki - Taiki Ono - Takashi Kokubun ## Contributors - Shota Iguchi - Hiroyuki Inoue - hogelog - gfx - kaorimatz - makimoto - shigemk2 - winebarrel - t8m8 - yohfee - takonomura ## License The gem is available as open source under the terms of the [MIT License](http://opensource.org/licenses/MIT).
Add my ID to contributors
Add my ID to contributors
Markdown
mit
cookpad/kuroko2,cookpad/kuroko2,cookpad/kuroko2
markdown
## Code Before: [![Build Status](https://travis-ci.org/cookpad/kuroko2.svg?branch=master)](https://travis-ci.org/cookpad/kuroko2) [![Gem Version](https://badge.fury.io/rb/kuroko2.svg)](https://badge.fury.io/rb/kuroko2) ![](app/assets/images/kuroko2/kuroko-logo-horizontal.png) Kuroko2 is a web-based job scheduler / workflow engine. ## Installation ```bash $ rails new your_kuroko2_application --database=mysql --skip-turbolinks --skip-javascript -m https://raw.githubusercontent.com/cookpad/kuroko2/master/app_template.rb ``` And edits "config/kuroko2.yml" ## Documentation Documentation is available at [docs/index.md](docs/index.md). ## Authors - Naoto Takai - Eisuke Oishi - Kohei Suzuki - Taiki Ono - Takashi Kokubun ## Contributors - Shota Iguchi - Hiroyuki Inoue - hogelog - gfx - kaorimatz - makimoto - shigemk2 - winebarrel - t8m8 - yohfee ## License The gem is available as open source under the terms of the [MIT License](http://opensource.org/licenses/MIT). ## Instruction: Add my ID to contributors ## Code After: [![Build Status](https://travis-ci.org/cookpad/kuroko2.svg?branch=master)](https://travis-ci.org/cookpad/kuroko2) [![Gem Version](https://badge.fury.io/rb/kuroko2.svg)](https://badge.fury.io/rb/kuroko2) ![](app/assets/images/kuroko2/kuroko-logo-horizontal.png) Kuroko2 is a web-based job scheduler / workflow engine. ## Installation ```bash $ rails new your_kuroko2_application --database=mysql --skip-turbolinks --skip-javascript -m https://raw.githubusercontent.com/cookpad/kuroko2/master/app_template.rb ``` And edits "config/kuroko2.yml" ## Documentation Documentation is available at [docs/index.md](docs/index.md). ## Authors - Naoto Takai - Eisuke Oishi - Kohei Suzuki - Taiki Ono - Takashi Kokubun ## Contributors - Shota Iguchi - Hiroyuki Inoue - hogelog - gfx - kaorimatz - makimoto - shigemk2 - winebarrel - t8m8 - yohfee - takonomura ## License The gem is available as open source under the terms of the [MIT License](http://opensource.org/licenses/MIT).
a754093717067806d8e4b59072d12addcca8991d
SQL/Creation_Statements.sql
SQL/Creation_Statements.sql
CREATE TABLE Countries( Name VARCHAR(30) CLUSTERED PRIMARY KEY ); CREATE TABLE Persons( Name VARCHAR(30) CLUSTERED PRIMARY KEY, Country VARCHAR(30), FOREIGN KEY Country REFERENCES(Countries) ); CREATE TABLE Organizations( SID VARCHAR(10) CLUSTERED PRIMARY KEY, Name VARCHAR(30) NOT NULL, Icon VARCHAR(100),/* can be saved locally or as URL to RSI */ ); CREATE TABLE OrgsInCog( SID VARCHAR(10) CLUSTERED NOT NULL, Representative VARCHAR(30) NOT NULL, FOREIGN KEY SID REFERENCES(Organizations), FOREIGN KEY Representative REFERENCES(Persons) );
CREATE TABLE tbl_Countries( Name VARCHAR(30) PRIMARY KEY ENGINE=NDBCLUSTER ); CREATE TABLE tbl_Persons( Name VARCHAR(30) CLUSTERED PRIMARY KEY, Country VARCHAR(30), FOREIGN KEY Country REFERENCES(Countries) ); CREATE TABLE tbl_Organizations( SID VARCHAR(10) CLUSTERED PRIMARY KEY, Name VARCHAR(30) NOT NULL, Icon VARCHAR(100),/* can be saved locally or as URL to RSI */ ); CREATE TABLE tbl_OrgsInCog( SID VARCHAR(10) CLUSTERED NOT NULL, Representative VARCHAR(30) NOT NULL, FOREIGN KEY SID REFERENCES(Organizations), FOREIGN KEY Representative REFERENCES(Persons) );
Edit SQL statements to correct syntax
Edit SQL statements to correct syntax
SQL
agpl-3.0
CognitionCorporation/Cognition.corp,LucFauvel/Cognition.corp,LucFauvel/Cognition.corp,CognitionCorporation/Cognition.corp,LucFauvel/Cognition.corp,CognitionCorporation/Cognition.corp,LucFauvel/Cognition.corp,CognitionCorporation/Cognition.corp,CognitionCorporation/Cognition.corp,LucFauvel/Cognition.corp
sql
## Code Before: CREATE TABLE Countries( Name VARCHAR(30) CLUSTERED PRIMARY KEY ); CREATE TABLE Persons( Name VARCHAR(30) CLUSTERED PRIMARY KEY, Country VARCHAR(30), FOREIGN KEY Country REFERENCES(Countries) ); CREATE TABLE Organizations( SID VARCHAR(10) CLUSTERED PRIMARY KEY, Name VARCHAR(30) NOT NULL, Icon VARCHAR(100),/* can be saved locally or as URL to RSI */ ); CREATE TABLE OrgsInCog( SID VARCHAR(10) CLUSTERED NOT NULL, Representative VARCHAR(30) NOT NULL, FOREIGN KEY SID REFERENCES(Organizations), FOREIGN KEY Representative REFERENCES(Persons) ); ## Instruction: Edit SQL statements to correct syntax ## Code After: CREATE TABLE tbl_Countries( Name VARCHAR(30) PRIMARY KEY ENGINE=NDBCLUSTER ); CREATE TABLE tbl_Persons( Name VARCHAR(30) CLUSTERED PRIMARY KEY, Country VARCHAR(30), FOREIGN KEY Country REFERENCES(Countries) ); CREATE TABLE tbl_Organizations( SID VARCHAR(10) CLUSTERED PRIMARY KEY, Name VARCHAR(30) NOT NULL, Icon VARCHAR(100),/* can be saved locally or as URL to RSI */ ); CREATE TABLE tbl_OrgsInCog( SID VARCHAR(10) CLUSTERED NOT NULL, Representative VARCHAR(30) NOT NULL, FOREIGN KEY SID REFERENCES(Organizations), FOREIGN KEY Representative REFERENCES(Persons) );
3f66831c866766307dd505f35e80dbeae2e1d54f
packages/gh/ghc-datasize.yaml
packages/gh/ghc-datasize.yaml
homepage: http://felsin9.de/nnis/ghc-datasize changelog-type: '' hash: c39bddd766dd29d1c669304b27c14d5f3836c8c6a0cf3fe9606a67cccc008497 test-bench-deps: {} maintainer: Dennis Felsing <[email protected]> synopsis: Determine the size of data structures in GHC's memory changelog: '' basic-deps: ghc-lib-parser-ex: -any base: '>=4.12 && <4.15' ghc-prim: '>=0.5 && <0.7' deepseq: '>=1.3 && <1.5' all-versions: - '0.1' - 0.1.1 - 0.1.2 - 0.2.0 - 0.2.1 - 0.2.2 - 0.2.3 author: Dennis Felsing <[email protected]> latest: 0.2.3 description-type: haddock description: |- ghc-datasize is a tool to determine the size of data structures in GHC's memory. Determining the size of recursive data structures is supported. All sizes are in Bytes. license-name: BSD-3-Clause
homepage: http://felsin9.de/nnis/ghc-datasize changelog-type: '' hash: 84a644c9395f5890e01ec6619c04d330cb194c132677cc45cc8d5a7192c3663b test-bench-deps: {} maintainer: Dennis Felsing <[email protected]> synopsis: Determine the size of data structures in GHC's memory changelog: '' basic-deps: base: '>=4.12 && <4.15' ghc-lib-parser: '>=8.6 && <8.11' ghc-prim: '>=0.5 && <0.7' deepseq: '>=1.3 && <1.5' all-versions: - '0.1' - 0.1.1 - 0.1.2 - 0.2.0 - 0.2.1 - 0.2.2 - 0.2.3 - 0.2.4 author: Dennis Felsing <[email protected]> latest: 0.2.4 description-type: haddock description: |- ghc-datasize is a tool to determine the size of data structures in GHC's memory. Determining the size of recursive data structures is supported. All sizes are in Bytes. license-name: BSD-3-Clause
Update from Hackage at 2021-10-16T16:15:07Z
Update from Hackage at 2021-10-16T16:15:07Z
YAML
mit
commercialhaskell/all-cabal-metadata
yaml
## Code Before: homepage: http://felsin9.de/nnis/ghc-datasize changelog-type: '' hash: c39bddd766dd29d1c669304b27c14d5f3836c8c6a0cf3fe9606a67cccc008497 test-bench-deps: {} maintainer: Dennis Felsing <[email protected]> synopsis: Determine the size of data structures in GHC's memory changelog: '' basic-deps: ghc-lib-parser-ex: -any base: '>=4.12 && <4.15' ghc-prim: '>=0.5 && <0.7' deepseq: '>=1.3 && <1.5' all-versions: - '0.1' - 0.1.1 - 0.1.2 - 0.2.0 - 0.2.1 - 0.2.2 - 0.2.3 author: Dennis Felsing <[email protected]> latest: 0.2.3 description-type: haddock description: |- ghc-datasize is a tool to determine the size of data structures in GHC's memory. Determining the size of recursive data structures is supported. All sizes are in Bytes. license-name: BSD-3-Clause ## Instruction: Update from Hackage at 2021-10-16T16:15:07Z ## Code After: homepage: http://felsin9.de/nnis/ghc-datasize changelog-type: '' hash: 84a644c9395f5890e01ec6619c04d330cb194c132677cc45cc8d5a7192c3663b test-bench-deps: {} maintainer: Dennis Felsing <[email protected]> synopsis: Determine the size of data structures in GHC's memory changelog: '' basic-deps: base: '>=4.12 && <4.15' ghc-lib-parser: '>=8.6 && <8.11' ghc-prim: '>=0.5 && <0.7' deepseq: '>=1.3 && <1.5' all-versions: - '0.1' - 0.1.1 - 0.1.2 - 0.2.0 - 0.2.1 - 0.2.2 - 0.2.3 - 0.2.4 author: Dennis Felsing <[email protected]> latest: 0.2.4 description-type: haddock description: |- ghc-datasize is a tool to determine the size of data structures in GHC's memory. Determining the size of recursive data structures is supported. All sizes are in Bytes. license-name: BSD-3-Clause
175387274ce9ecd69f86d71944a7e954315d941b
plugin/textfile.cpp
plugin/textfile.cpp
TextFile::TextFile(QObject *parent) : QObject(parent), mValid(false) { } QString TextFile::text() const { return mText; } QString TextFile::source() const { return mSource; } void TextFile::setSource(const QString &file) { mValid = false; mSource = file; QFile f(file); if (!f.open(QIODevice::ReadOnly)) { return; } mText = f.readAll(); mValid = !mText.isEmpty(); emit sourceChanged(); emit textChanged(); emit validChanged(); }
TextFile::TextFile(QObject *parent) : QObject(parent), mValid(false) { } QString TextFile::text() const { return mText; } QString TextFile::source() const { return mSource; } void TextFile::setSource(const QString &file) { mValid = false; mSource = file; QFile f(file); if (!f.open(QIODevice::ReadOnly)) { return; } QTextStream stream(&f); mText = stream.readAll(); mValid = !mText.isEmpty(); emit sourceChanged(); emit textChanged(); emit validChanged(); }
Read the text file using QTextStream so that it is encoding-aware
Read the text file using QTextStream so that it is encoding-aware
C++
apache-2.0
meego-tablet-ux/meego-app-im,meego-tablet-ux/meego-app-im
c++
## Code Before: TextFile::TextFile(QObject *parent) : QObject(parent), mValid(false) { } QString TextFile::text() const { return mText; } QString TextFile::source() const { return mSource; } void TextFile::setSource(const QString &file) { mValid = false; mSource = file; QFile f(file); if (!f.open(QIODevice::ReadOnly)) { return; } mText = f.readAll(); mValid = !mText.isEmpty(); emit sourceChanged(); emit textChanged(); emit validChanged(); } ## Instruction: Read the text file using QTextStream so that it is encoding-aware ## Code After: TextFile::TextFile(QObject *parent) : QObject(parent), mValid(false) { } QString TextFile::text() const { return mText; } QString TextFile::source() const { return mSource; } void TextFile::setSource(const QString &file) { mValid = false; mSource = file; QFile f(file); if (!f.open(QIODevice::ReadOnly)) { return; } QTextStream stream(&f); mText = stream.readAll(); mValid = !mText.isEmpty(); emit sourceChanged(); emit textChanged(); emit validChanged(); }
0db7bd8ca0033c1530bcefcbd49002364dba908a
scripts/xz_wrap.sh
scripts/xz_wrap.sh
BCJ= LZMA2OPTS= case $ARCH in x86|x86_64) BCJ=--x86 ;; powerpc) BCJ=--powerpc ;; ia64) BCJ=--ia64; LZMA2OPTS=pb=4 ;; arm) BCJ=--arm ;; sparc) BCJ=--sparc ;; esac exec xz --check=crc32 $BCJ --lzma2=$LZMA2OPTS,dict=32MiB
BCJ= LZMA2OPTS= case $SRCARCH in x86) BCJ=--x86 ;; powerpc) BCJ=--powerpc ;; ia64) BCJ=--ia64; LZMA2OPTS=pb=4 ;; arm) BCJ=--arm ;; sparc) BCJ=--sparc ;; esac exec xz --check=crc32 $BCJ --lzma2=$LZMA2OPTS,dict=32MiB
Enable BCJ filters on SPARC and 32-bit x86
xz: Enable BCJ filters on SPARC and 32-bit x86 The BCJ filters were meant to be enabled already on these archs, but the xz_wrap.sh script was buggy. Enabling the filters should give smaller kernel images. xz_wrap.sh will now use $SRCARCH instead of $ARCH to detect the architecture. That way it doesn't need to care about the subarchs (like i386 vs. x86_64) since the BCJ filters don't care either. Signed-off-by: Lasse Collin <[email protected]> Acked-by: Jan Beulich <[email protected]> Acked-by: H. Peter Anvin <[email protected]> Signed-off-by: Linus Torvalds <[email protected]>
Shell
mit
KristFoundation/Programs,TeamVee-Kanas/android_kernel_samsung_kanas,TeamVee-Kanas/android_kernel_samsung_kanas,KristFoundation/Programs,KristFoundation/Programs,KristFoundation/Programs,KristFoundation/Programs,TeamVee-Kanas/android_kernel_samsung_kanas,TeamVee-Kanas/android_kernel_samsung_kanas,KristFoundation/Programs,TeamVee-Kanas/android_kernel_samsung_kanas
shell
## Code Before: BCJ= LZMA2OPTS= case $ARCH in x86|x86_64) BCJ=--x86 ;; powerpc) BCJ=--powerpc ;; ia64) BCJ=--ia64; LZMA2OPTS=pb=4 ;; arm) BCJ=--arm ;; sparc) BCJ=--sparc ;; esac exec xz --check=crc32 $BCJ --lzma2=$LZMA2OPTS,dict=32MiB ## Instruction: xz: Enable BCJ filters on SPARC and 32-bit x86 The BCJ filters were meant to be enabled already on these archs, but the xz_wrap.sh script was buggy. Enabling the filters should give smaller kernel images. xz_wrap.sh will now use $SRCARCH instead of $ARCH to detect the architecture. That way it doesn't need to care about the subarchs (like i386 vs. x86_64) since the BCJ filters don't care either. Signed-off-by: Lasse Collin <[email protected]> Acked-by: Jan Beulich <[email protected]> Acked-by: H. Peter Anvin <[email protected]> Signed-off-by: Linus Torvalds <[email protected]> ## Code After: BCJ= LZMA2OPTS= case $SRCARCH in x86) BCJ=--x86 ;; powerpc) BCJ=--powerpc ;; ia64) BCJ=--ia64; LZMA2OPTS=pb=4 ;; arm) BCJ=--arm ;; sparc) BCJ=--sparc ;; esac exec xz --check=crc32 $BCJ --lzma2=$LZMA2OPTS,dict=32MiB
33eb8004e43adafd3a6f43e4e3db115b2c7f5b8c
process/process.go
process/process.go
// Copyright 2015 Canonical Ltd. // Licensed under the AGPLv3, see LICENCE file for details. package process import ( "gopkg.in/juju/charm.v6-unstable" ) // Status represents the status of a worload process. type Status string // Status values specific to workload processes. const ( StatusPending Status = "pending" StatusActive Status = "active" StatusFailed Status = "failed" StatusStopped Status = "stopped" ) // ProcessInfo holds information about a process that Juju needs. type ProcessInfo struct { charm.Process // Status is the overall Juju status of the workload process. Status Status // Space is the networking space with which the process was started. Space string // EnvVars is the set of environment variables with which the // process was started. EnvVars map[string]string // Details is the information about the process which the plugin provided. Details ProcessDetails }
// Copyright 2015 Canonical Ltd. // Licensed under the AGPLv3, see LICENCE file for details. package process import ( "gopkg.in/juju/charm.v6-unstable" ) // Status values specific to workload processes. const ( StatusPending Status = iota StatusActive StatusFailed StatusStopped ) // Status represents the status of a worload process. type Status string // String implements fmt.Stringer. func (s Status) String() string { switch status { case StatusPending: return "pending" case StatusActive: return "active" case StatusFailed: return "failed" case StatusStopped: return "stopped" } return "Unknown" } // ProcessInfo holds information about a process that Juju needs. type ProcessInfo struct { charm.Process // Status is the overall Juju status of the workload process. Status Status // Space is the networking space with which the process was started. Space string // EnvVars is the set of environment variables with which the // process was started. EnvVars map[string]string // Details is the information about the process which the plugin provided. Details ProcessDetails }
Change Status to an int (with a String method).
Change Status to an int (with a String method).
Go
agpl-3.0
perrito666/juju,voidspace/juju,makyo/juju,perrito666/juju,dimitern/juju,axw/juju,howbazaar/juju,davecheney/juju,voidspace/juju,reedobrien/juju,bz2/juju,marcmolla/juju,frankban/juju,dimitern/juju,kat-co/juju,gabriel-samfira/juju,mjs/juju,kat-co/juju,howbazaar/juju,axw/juju,ericsnowcurrently/juju,anastasiamac/juju,mjs/juju,alesstimec/juju,frankban/juju,anastasiamac/juju,perrito666/juju,reedobrien/juju,dooferlad/juju,wwitzel3/juju,anastasiamac/juju,frankban/juju,bogdanteleaga/juju,mikemccracken/juju,kat-co/juju,marcmolla/juju,dimitern/juju,axw/juju,bz2/juju,kat-co/juju,axw/juju,AdamIsrael/juju,dooferlad/juju,bac/juju,ericsnowcurrently/juju,macgreagoir/juju,bogdanteleaga/juju,perrito666/juju,mikemccracken/juju,macgreagoir/juju,howbazaar/juju,dooferlad/juju,ericsnowcurrently/juju,reedobrien/juju,mjs/juju,ericsnowcurrently/juju,marcmolla/juju,bz2/juju,wwitzel3/juju,davecheney/juju,voidspace/juju,mjs/juju,perrito666/juju,gabriel-samfira/juju,mjs/juju,voidspace/juju,waigani/juju,howbazaar/juju,mikemccracken/juju,mikemccracken/juju,axw/juju,bac/juju,anastasiamac/juju,bac/juju,reedobrien/juju,ericsnowcurrently/juju,bogdanteleaga/juju,davecheney/juju,waigani/juju,alesstimec/juju,kat-co/juju,dimitern/juju,AdamIsrael/juju,gabriel-samfira/juju,marcmolla/juju,reedobrien/juju,makyo/juju,howbazaar/juju,wwitzel3/juju,waigani/juju,dooferlad/juju,fwereade/juju,gabriel-samfira/juju,marcmolla/juju,anastasiamac/juju,bz2/juju,gabriel-samfira/juju,alesstimec/juju,dimitern/juju,macgreagoir/juju,frankban/juju,fwereade/juju,waigani/juju,macgreagoir/juju,makyo/juju,mjs/juju,mikemccracken/juju,AdamIsrael/juju,bac/juju,alesstimec/juju,AdamIsrael/juju,dooferlad/juju,bac/juju,bogdanteleaga/juju,makyo/juju,bz2/juju,fwereade/juju,alesstimec/juju,frankban/juju,mjs/juju,fwereade/juju,davecheney/juju,wwitzel3/juju,macgreagoir/juju
go
## Code Before: // Copyright 2015 Canonical Ltd. // Licensed under the AGPLv3, see LICENCE file for details. package process import ( "gopkg.in/juju/charm.v6-unstable" ) // Status represents the status of a worload process. type Status string // Status values specific to workload processes. const ( StatusPending Status = "pending" StatusActive Status = "active" StatusFailed Status = "failed" StatusStopped Status = "stopped" ) // ProcessInfo holds information about a process that Juju needs. type ProcessInfo struct { charm.Process // Status is the overall Juju status of the workload process. Status Status // Space is the networking space with which the process was started. Space string // EnvVars is the set of environment variables with which the // process was started. EnvVars map[string]string // Details is the information about the process which the plugin provided. Details ProcessDetails } ## Instruction: Change Status to an int (with a String method). ## Code After: // Copyright 2015 Canonical Ltd. // Licensed under the AGPLv3, see LICENCE file for details. package process import ( "gopkg.in/juju/charm.v6-unstable" ) // Status values specific to workload processes. const ( StatusPending Status = iota StatusActive StatusFailed StatusStopped ) // Status represents the status of a worload process. type Status string // String implements fmt.Stringer. func (s Status) String() string { switch status { case StatusPending: return "pending" case StatusActive: return "active" case StatusFailed: return "failed" case StatusStopped: return "stopped" } return "Unknown" } // ProcessInfo holds information about a process that Juju needs. type ProcessInfo struct { charm.Process // Status is the overall Juju status of the workload process. Status Status // Space is the networking space with which the process was started. Space string // EnvVars is the set of environment variables with which the // process was started. EnvVars map[string]string // Details is the information about the process which the plugin provided. Details ProcessDetails }
22db418b789ef0c5fbdaa0792cebb15be8f89488
lib/bankscrap/exporters/csv.rb
lib/bankscrap/exporters/csv.rb
require 'csv' module BankScrap module Exporter class Csv HEADERS = %w(Date Description Amount).freeze def initialize(output = nil) @output = output || 'transactions.csv' end def write_to_file(data) CSV.open(@output, 'wb') do |csv| csv << HEADERS data.each { |line| csv << line.to_a } end end end end end
require 'csv' module BankScrap module Exporter class Csv HEADERS = %w(ID Date Description DescriptionDetails Amount).freeze def initialize(output = nil) @output = output || 'transactions.csv' end def write_to_file(data) CSV.open(@output, 'wb') do |csv| csv << HEADERS data.each { |line| csv << line.to_a } end end end end end
Fix headers for CSV exporter
Fix headers for CSV exporter
Ruby
mit
bankscrap/bankscrap,bank-scrap/bank_scrap
ruby
## Code Before: require 'csv' module BankScrap module Exporter class Csv HEADERS = %w(Date Description Amount).freeze def initialize(output = nil) @output = output || 'transactions.csv' end def write_to_file(data) CSV.open(@output, 'wb') do |csv| csv << HEADERS data.each { |line| csv << line.to_a } end end end end end ## Instruction: Fix headers for CSV exporter ## Code After: require 'csv' module BankScrap module Exporter class Csv HEADERS = %w(ID Date Description DescriptionDetails Amount).freeze def initialize(output = nil) @output = output || 'transactions.csv' end def write_to_file(data) CSV.open(@output, 'wb') do |csv| csv << HEADERS data.each { |line| csv << line.to_a } end end end end end
f9d539f814698636836a7c66c56c4b94385a1bed
example/tutorial/K2_callbacksInYourSketch/K2_callbacksInYourSketch.pde
example/tutorial/K2_callbacksInYourSketch/K2_callbacksInYourSketch.pde
import wordcram.*; void setup() { size(1000, 500); smooth(); background(255); new WordCram(this) .fromWebPage("http://wordcram.org") .drawAll(); } void wordsCounted(Word[] words) { println("counted " + words.length + " words!"); } void beginDraw() { println("beginDraw: drawing the sketch..."); } int wordsDrawn = 0; void wordDrawn(Word word) { //println("drew this word! " + word.word); wordsDrawn++; } int wordsSkipped = 0; void wordSkipped(Word word) { //println("skipped " + word.word + " because " + word.wasSkippedBecause()); wordsSkipped++; } void endDraw() { println("endDraw!"); println("- skipped: " + wordsSkipped); println("- drawn: " + wordsDrawn); }
import wordcram.*; void setup() { size(1000, 500); smooth(); background(255); new WordCram(this) .fromTextFile("../kari-the-elephant.txt") .drawAll(); } void wordsCounted(Word[] words) { println("counted " + words.length + " words!"); } void beginDraw() { println("beginDraw: drawing the sketch..."); } int wordsDrawn = 0; void wordDrawn(Word word) { wordsDrawn++; } int wordsSkipped = 0; void wordSkipped(Word word) { wordsSkipped++; } void endDraw() { println("endDraw!"); println("- skipped: " + wordsSkipped); println("- drawn: " + wordsDrawn); }
Clean up callbacks example sketch
Clean up callbacks example sketch
Processing
apache-2.0
danbernier/WordCram,danbernier/WordCram,danbernier/WordCram
processing
## Code Before: import wordcram.*; void setup() { size(1000, 500); smooth(); background(255); new WordCram(this) .fromWebPage("http://wordcram.org") .drawAll(); } void wordsCounted(Word[] words) { println("counted " + words.length + " words!"); } void beginDraw() { println("beginDraw: drawing the sketch..."); } int wordsDrawn = 0; void wordDrawn(Word word) { //println("drew this word! " + word.word); wordsDrawn++; } int wordsSkipped = 0; void wordSkipped(Word word) { //println("skipped " + word.word + " because " + word.wasSkippedBecause()); wordsSkipped++; } void endDraw() { println("endDraw!"); println("- skipped: " + wordsSkipped); println("- drawn: " + wordsDrawn); } ## Instruction: Clean up callbacks example sketch ## Code After: import wordcram.*; void setup() { size(1000, 500); smooth(); background(255); new WordCram(this) .fromTextFile("../kari-the-elephant.txt") .drawAll(); } void wordsCounted(Word[] words) { println("counted " + words.length + " words!"); } void beginDraw() { println("beginDraw: drawing the sketch..."); } int wordsDrawn = 0; void wordDrawn(Word word) { wordsDrawn++; } int wordsSkipped = 0; void wordSkipped(Word word) { wordsSkipped++; } void endDraw() { println("endDraw!"); println("- skipped: " + wordsSkipped); println("- drawn: " + wordsDrawn); }
7c21fd83672713f12b44e3adec676d3b31276822
src/backend/worker.js
src/backend/worker.js
import { errors } from "./lib/gcloud"; import * as store from "./lib/store"; import * as taskQueue from "./lib/taskqueue"; import * as resolve from "./lib/resolve"; taskQueue.subscribe("trap-topic", "trap-subscription", (err, msg) => { if (err) { return errors.report(err); } const data = msg.data; if (!data.info || !data.info.ip) { return msg.ack(); } const ip = data.info.ip; Promise.all([resolve.ipToASNs(ip), resolve.reverse(ip)]) .then(([asns, reverse]) => { data.info.reverse = reverse; data.info.asns = asns; return store.visit(data.target, data.timestamp, data.info); }) .then(() => msg.ack()) .catch(errors.report); }).catch(errors.report);
import { errors } from "./lib/gcloud"; import net from "net"; import { URL } from "url"; import request from "request"; import * as store from "./lib/store"; import * as taskQueue from "./lib/taskqueue"; import * as resolve from "./lib/resolve"; const WHEREABOUTS_URL = process.env.WHEREABOUTS_URL || "http://localhost:8080"; function whereabouts(ip) { return new Promise((resolve, reject) => { if (!net.isIP(ip)) { return resolve(undefined); } const url = new URL("api/whereabouts/" + ip, WHEREABOUTS_URL).toString(); return request({ url: String(url), json: true }, (err, res) => { if (err) { return reject(err); } const json = res.body; if (json && json.country && json.country.code) { return resolve(json.country.code); } resolve(undefined); }); }); } taskQueue.subscribe("trap-topic", "trap-subscription", (err, msg) => { if (err) { return errors.report(err); } const data = msg.data; if (!data.info || !data.info.ip) { return msg.ack(); } const ip = data.info.ip; Promise.all([resolve.ipToASNs(ip), resolve.reverse(ip), whereabouts(ip)]) .then(([asns, reverse, country]) => { data.info.reverse = reverse; data.info.asns = asns; data.info.country = country; return store.visit(data.target, data.timestamp, data.info); }) .then(() => msg.ack()) .catch(errors.report); }).catch(errors.report);
Resolve IP geolocations with hownetworks/whereabouts
Resolve IP geolocations with hownetworks/whereabouts
JavaScript
mit
HowNetWorks/uriteller
javascript
## Code Before: import { errors } from "./lib/gcloud"; import * as store from "./lib/store"; import * as taskQueue from "./lib/taskqueue"; import * as resolve from "./lib/resolve"; taskQueue.subscribe("trap-topic", "trap-subscription", (err, msg) => { if (err) { return errors.report(err); } const data = msg.data; if (!data.info || !data.info.ip) { return msg.ack(); } const ip = data.info.ip; Promise.all([resolve.ipToASNs(ip), resolve.reverse(ip)]) .then(([asns, reverse]) => { data.info.reverse = reverse; data.info.asns = asns; return store.visit(data.target, data.timestamp, data.info); }) .then(() => msg.ack()) .catch(errors.report); }).catch(errors.report); ## Instruction: Resolve IP geolocations with hownetworks/whereabouts ## Code After: import { errors } from "./lib/gcloud"; import net from "net"; import { URL } from "url"; import request from "request"; import * as store from "./lib/store"; import * as taskQueue from "./lib/taskqueue"; import * as resolve from "./lib/resolve"; const WHEREABOUTS_URL = process.env.WHEREABOUTS_URL || "http://localhost:8080"; function whereabouts(ip) { return new Promise((resolve, reject) => { if (!net.isIP(ip)) { return resolve(undefined); } const url = new URL("api/whereabouts/" + ip, WHEREABOUTS_URL).toString(); return request({ url: String(url), json: true }, (err, res) => { if (err) { return reject(err); } const json = res.body; if (json && json.country && json.country.code) { return resolve(json.country.code); } resolve(undefined); }); }); } taskQueue.subscribe("trap-topic", "trap-subscription", (err, msg) => { if (err) { return errors.report(err); } const data = msg.data; if (!data.info || !data.info.ip) { return msg.ack(); } const ip = data.info.ip; Promise.all([resolve.ipToASNs(ip), resolve.reverse(ip), whereabouts(ip)]) .then(([asns, reverse, country]) => { data.info.reverse = reverse; data.info.asns = asns; data.info.country = country; return store.visit(data.target, data.timestamp, data.info); }) .then(() => msg.ack()) .catch(errors.report); }).catch(errors.report);
4457fe3b7a2619287e2d87422041dd02a219cded
wu.sh
wu.sh
YEARS=1 for airport in `cat airports.txt` do mkdir -pv csv/$airport for dt in `python -c "from datetime import datetime, timedelta; print \ '\n'.join(((datetime.now() - timedelta(days=day)).strftime('%Y-%m-%d')) \ for day in range($YEARS * 365, 0, -1))"` do URL=`echo $airport $dt \ | sed 's/\-/ /g' \ | awk '{printf("https://www.wunderground.com/history/airport/%s/%d/%d/%d/DailyHistory.html?format=1\n", $1, $2, $3, $4)}'` if [[ ! -f csv/$airport/$dt.csv ]] then sleep 1 curl -s $URL | sed 's/<br \/>$//g' > csv/$airport/$dt.csv fi done done
YEARS=${1:-1} for airport in `cat airports.txt` do mkdir -pv csv/$airport for dt in `python -c "from datetime import datetime, timedelta; \ now = datetime.now(); \ print '\n'.join(((now - timedelta(days=day)).strftime('%Y-%m-%d')) \ for day in range((now - datetime(now.year - $YEARS, now.month, \ now.day)).days, 0, -1))"` do URL=`echo $airport $dt \ | sed 's/\-/ /g' \ | awk '{printf("https://www.wunderground.com/history/airport/%s/%d/%d/%d/DailyHistory.html?format=1\n", $1, $2, $3, $4)}'` if [[ ! -f csv/$airport/$dt.csv ]] then sleep 1 curl -s $URL | sed 's/<br \/>$//g' > csv/$airport/$dt.csv fi done done
Support for leap years when spanning multiple years
Support for leap years when spanning multiple years
Shell
mit
nrgetik/wu-tang,nrgetik/wu-tang
shell
## Code Before: YEARS=1 for airport in `cat airports.txt` do mkdir -pv csv/$airport for dt in `python -c "from datetime import datetime, timedelta; print \ '\n'.join(((datetime.now() - timedelta(days=day)).strftime('%Y-%m-%d')) \ for day in range($YEARS * 365, 0, -1))"` do URL=`echo $airport $dt \ | sed 's/\-/ /g' \ | awk '{printf("https://www.wunderground.com/history/airport/%s/%d/%d/%d/DailyHistory.html?format=1\n", $1, $2, $3, $4)}'` if [[ ! -f csv/$airport/$dt.csv ]] then sleep 1 curl -s $URL | sed 's/<br \/>$//g' > csv/$airport/$dt.csv fi done done ## Instruction: Support for leap years when spanning multiple years ## Code After: YEARS=${1:-1} for airport in `cat airports.txt` do mkdir -pv csv/$airport for dt in `python -c "from datetime import datetime, timedelta; \ now = datetime.now(); \ print '\n'.join(((now - timedelta(days=day)).strftime('%Y-%m-%d')) \ for day in range((now - datetime(now.year - $YEARS, now.month, \ now.day)).days, 0, -1))"` do URL=`echo $airport $dt \ | sed 's/\-/ /g' \ | awk '{printf("https://www.wunderground.com/history/airport/%s/%d/%d/%d/DailyHistory.html?format=1\n", $1, $2, $3, $4)}'` if [[ ! -f csv/$airport/$dt.csv ]] then sleep 1 curl -s $URL | sed 's/<br \/>$//g' > csv/$airport/$dt.csv fi done done
a61cbc24388a1438616cb354f16349cdfa262083
samples/run-tests.sh
samples/run-tests.sh
mkdir -p target/ ../sync-function-maker.rb sample-sync-doc-definitions.js target/test-sample-sync-function.js node_modules/.bin/mocha
mkdir -p target/ ../make-sync-function.rb sample-sync-doc-definitions.js target/test-sample-sync-function.js node_modules/.bin/mocha
Fix the filename of the make-sync-function script in the test runner
Fix the filename of the make-sync-function script in the test runner
Shell
mit
Kashoo/synctos,Kashoo/synctos
shell
## Code Before: mkdir -p target/ ../sync-function-maker.rb sample-sync-doc-definitions.js target/test-sample-sync-function.js node_modules/.bin/mocha ## Instruction: Fix the filename of the make-sync-function script in the test runner ## Code After: mkdir -p target/ ../make-sync-function.rb sample-sync-doc-definitions.js target/test-sample-sync-function.js node_modules/.bin/mocha
d0113df4c4cc9ed22b85da9a4404759277c93c75
packages/hekla-cli/src/commands/analyze.js
packages/hekla-cli/src/commands/analyze.js
module.exports = function analyze() { console.log('analyzing...'); };
'use strict'; const path = require('path'); const Analyzer = require('hekla-core').Analyzer; module.exports = function analyze() { let config; try { config = require(getHeklafilePath()); } catch (err) { console.error('Configuration error:', err.stack); process.exit(1); } const analyzer = new Analyzer(config); analyzer.run(); }; function getHeklafilePath() { return path.resolve(process.cwd(), 'Heklafile.js'); }
Connect CLI tool to Analyzer
Connect CLI tool to Analyzer
JavaScript
mit
andrewjensen/hekla,andrewjensen/hekla
javascript
## Code Before: module.exports = function analyze() { console.log('analyzing...'); }; ## Instruction: Connect CLI tool to Analyzer ## Code After: 'use strict'; const path = require('path'); const Analyzer = require('hekla-core').Analyzer; module.exports = function analyze() { let config; try { config = require(getHeklafilePath()); } catch (err) { console.error('Configuration error:', err.stack); process.exit(1); } const analyzer = new Analyzer(config); analyzer.run(); }; function getHeklafilePath() { return path.resolve(process.cwd(), 'Heklafile.js'); }
eced699f22c13af9215630a4b60fef6cd01afdf8
alerts/postgresql/metadata.yaml
alerts/postgresql/metadata.yaml
alert_policy_templates: - id: "high-connections-utilization" display_name: "Postgresql - High Connection Utilization" description: "Alert fires when active connections are near a threshold of 90. Around this point is where the instance may run into connection issues and may start refusing connections." version: 1 - id: "high-cpu-utilization" display_name: "Postgres - High CPU Utilization" description: "Alerts whenever the CPU utilization goes above 80% which usually indicates the instance's performance is heavily degraded and likely is going to impact applications reliant on postgres." version: 1 - id: "high-db-size" display_name: "Postgresql - High Database Size" description: "Alert fires when the database size is growing greater than expected (this value will be subject to instance size and utilization); defaulted to 93 GB but will be subject to instance size as well as connected storage." version: 1 - id: "high-db-size" display_name: "Postgresql - Reached Max Written Buffers" description: "Alert fires when the background writer attains 3 maxwritten errors i.e. it cannot flush buffers because it has written too many. This is an indication that if an outage were to occur, then any bytes pending may be subject to be lost." version: 1
alert_policy_templates: - id: "high-connections-utilization" display_name: "Postgresql - High Connection Utilization" description: "Alert fires when active connections are near a threshold of 90. Around this point is where the instance may run into connection issues and may start refusing connections." version: 1 - id: "high-cpu-utilization" display_name: "Postgres - High CPU Utilization" description: "Alerts whenever the CPU utilization goes above 80% which usually indicates the instance's performance is heavily degraded and likely is going to impact applications reliant on postgres." version: 1 - id: "high-db-size" display_name: "Postgresql - High Database Size" description: "Alert fires when the database size is growing greater than expected (this value will be subject to instance size and utilization); defaulted to 93 GB but will be subject to instance size as well as connected storage." version: 1 - id: "max-writes" display_name: "Postgresql - Reached Max Written Buffers" description: "Alert fires when the background writer attains 3 maxwritten errors i.e. it cannot flush buffers because it has written too many. This is an indication that if an outage were to occur, then any bytes pending may be subject to be lost." version: 1
Fix incorrect, duplicate alert policy template ID
Fix incorrect, duplicate alert policy template ID
YAML
apache-2.0
GoogleCloudPlatform/monitoring-dashboard-samples,GoogleCloudPlatform/monitoring-dashboard-samples
yaml
## Code Before: alert_policy_templates: - id: "high-connections-utilization" display_name: "Postgresql - High Connection Utilization" description: "Alert fires when active connections are near a threshold of 90. Around this point is where the instance may run into connection issues and may start refusing connections." version: 1 - id: "high-cpu-utilization" display_name: "Postgres - High CPU Utilization" description: "Alerts whenever the CPU utilization goes above 80% which usually indicates the instance's performance is heavily degraded and likely is going to impact applications reliant on postgres." version: 1 - id: "high-db-size" display_name: "Postgresql - High Database Size" description: "Alert fires when the database size is growing greater than expected (this value will be subject to instance size and utilization); defaulted to 93 GB but will be subject to instance size as well as connected storage." version: 1 - id: "high-db-size" display_name: "Postgresql - Reached Max Written Buffers" description: "Alert fires when the background writer attains 3 maxwritten errors i.e. it cannot flush buffers because it has written too many. This is an indication that if an outage were to occur, then any bytes pending may be subject to be lost." version: 1 ## Instruction: Fix incorrect, duplicate alert policy template ID ## Code After: alert_policy_templates: - id: "high-connections-utilization" display_name: "Postgresql - High Connection Utilization" description: "Alert fires when active connections are near a threshold of 90. Around this point is where the instance may run into connection issues and may start refusing connections." version: 1 - id: "high-cpu-utilization" display_name: "Postgres - High CPU Utilization" description: "Alerts whenever the CPU utilization goes above 80% which usually indicates the instance's performance is heavily degraded and likely is going to impact applications reliant on postgres." version: 1 - id: "high-db-size" display_name: "Postgresql - High Database Size" description: "Alert fires when the database size is growing greater than expected (this value will be subject to instance size and utilization); defaulted to 93 GB but will be subject to instance size as well as connected storage." version: 1 - id: "max-writes" display_name: "Postgresql - Reached Max Written Buffers" description: "Alert fires when the background writer attains 3 maxwritten errors i.e. it cannot flush buffers because it has written too many. This is an indication that if an outage were to occur, then any bytes pending may be subject to be lost." version: 1
c27178386eaed213ca1eb798cc479408ad03a298
src/main.c
src/main.c
void die(const char *s) { perror(s); exit(1); } int main() { struct sockaddr_in si_other; int s, slen = sizeof(si_other); char buf[BUFLEN]; if ((s = socket(AF_INET, SOCK_DGRAM, IPPROTO_UDP)) == -1) { die("socket"); } memset((char*)&si_other, 0, sizeof(si_other)); si_other.sin_family = AF_INET; si_other.sin_port = htons(PORT); if (inet_aton(SERVER, &si_other.sin_addr) == 0) { fprintf(stderr, "inet_aton() failed\n"); exit(1); } while (1) { printf("Enter message:\n"); const char* msg = "hello"; if (sendto(s, msg, strlen(msg), 0, (struct sockaddr*) &si_other, slen) == -1) { die("sendto()"); } memset(buf, '\0', BUFLEN); break; } close(s); return 0; }
__attribute__((noreturn)) void failed(const char* s) { perror(s); exit(1); } int main() { int s = socket(AF_INET, SOCK_DGRAM, IPPROTO_UDP); if (s == -1) { failed("socket()"); } }
Remove broken prototype, starting out clean
Remove broken prototype, starting out clean
C
mit
darthdeus/dit,darthdeus/dit,darthdeus/dit
c
## Code Before: void die(const char *s) { perror(s); exit(1); } int main() { struct sockaddr_in si_other; int s, slen = sizeof(si_other); char buf[BUFLEN]; if ((s = socket(AF_INET, SOCK_DGRAM, IPPROTO_UDP)) == -1) { die("socket"); } memset((char*)&si_other, 0, sizeof(si_other)); si_other.sin_family = AF_INET; si_other.sin_port = htons(PORT); if (inet_aton(SERVER, &si_other.sin_addr) == 0) { fprintf(stderr, "inet_aton() failed\n"); exit(1); } while (1) { printf("Enter message:\n"); const char* msg = "hello"; if (sendto(s, msg, strlen(msg), 0, (struct sockaddr*) &si_other, slen) == -1) { die("sendto()"); } memset(buf, '\0', BUFLEN); break; } close(s); return 0; } ## Instruction: Remove broken prototype, starting out clean ## Code After: __attribute__((noreturn)) void failed(const char* s) { perror(s); exit(1); } int main() { int s = socket(AF_INET, SOCK_DGRAM, IPPROTO_UDP); if (s == -1) { failed("socket()"); } }
be5ff5772deaa4520234ef0d08a05aca5a2c892b
tests/integration.sh
tests/integration.sh
errors=0 error_handler () { (( errors++ )) } trap error_handler ERR # Run a Stencila Docker container which provides all external language contexts docker run --detach --publish 2100:2000 stencila/alpha sleep 5 # Configured using Docker container as only peer STENCILA_PEERS=http://localhost:2100 node tools/runner.js tests/documents/external-language-cells.html # Configured using peer dicovery STENCILA_DISCOVER=30 node tools/runner.js tests/documents/external-language-cells.html # Exit with number of errors exit $errors
errors=0 error_handler () { (( errors++ )) } trap error_handler ERR # Run a Stencila Docker container which provides a Node Host (as have in Desktop) docker run --detach --publish 2000:2000 stencila/iota # Run a Stencila Docker container which provides several language Hosts docker run --detach --publish 2100:2000 stencila/alpha sleep 5 # Configured using one of the containers as only peer STENCILA_PEERS=http://localhost:2100 node tools/runner.js tests/documents/external-language-cells.html # Configured using peer dicovery (this is current configuration for Desktop) STENCILA_DISCOVER=30 node tools/runner.js tests/documents/external-language-cells.html # Exit with number of errors exit $errors
Make tests more like Desktop setup (multiple peers)
Make tests more like Desktop setup (multiple peers)
Shell
apache-2.0
stencila/stencila,stencila/stencila,stencila/stencila,stencila/stencila,stencila/stencila,stencila/stencila,stencila/stencila
shell
## Code Before: errors=0 error_handler () { (( errors++ )) } trap error_handler ERR # Run a Stencila Docker container which provides all external language contexts docker run --detach --publish 2100:2000 stencila/alpha sleep 5 # Configured using Docker container as only peer STENCILA_PEERS=http://localhost:2100 node tools/runner.js tests/documents/external-language-cells.html # Configured using peer dicovery STENCILA_DISCOVER=30 node tools/runner.js tests/documents/external-language-cells.html # Exit with number of errors exit $errors ## Instruction: Make tests more like Desktop setup (multiple peers) ## Code After: errors=0 error_handler () { (( errors++ )) } trap error_handler ERR # Run a Stencila Docker container which provides a Node Host (as have in Desktop) docker run --detach --publish 2000:2000 stencila/iota # Run a Stencila Docker container which provides several language Hosts docker run --detach --publish 2100:2000 stencila/alpha sleep 5 # Configured using one of the containers as only peer STENCILA_PEERS=http://localhost:2100 node tools/runner.js tests/documents/external-language-cells.html # Configured using peer dicovery (this is current configuration for Desktop) STENCILA_DISCOVER=30 node tools/runner.js tests/documents/external-language-cells.html # Exit with number of errors exit $errors
21b27894f9b389742827c4709dfeaf314bb7a245
wger/manager/templates/robots.txt
wger/manager/templates/robots.txt
Sitemap: http://wger.de/de/sitemap.xml Sitemap: http://wger.de/en/sitemap.xml
Sitemap: http://wger.de/de/sitemap.xml Sitemap: http://wger.de/en/sitemap.xml Sitemap: http://wger.de/gb/sitemap.xml
Add link to sitemap of Bulgarian version of website
Add link to sitemap of Bulgarian version of website
Text
agpl-3.0
kjagoo/wger_stark,kjagoo/wger_stark,DeveloperMal/wger,petervanderdoes/wger,DeveloperMal/wger,DeveloperMal/wger,wger-project/wger,rolandgeider/wger,wger-project/wger,rolandgeider/wger,rolandgeider/wger,kjagoo/wger_stark,kjagoo/wger_stark,petervanderdoes/wger,wger-project/wger,petervanderdoes/wger,wger-project/wger,DeveloperMal/wger,petervanderdoes/wger,rolandgeider/wger
text
## Code Before: Sitemap: http://wger.de/de/sitemap.xml Sitemap: http://wger.de/en/sitemap.xml ## Instruction: Add link to sitemap of Bulgarian version of website ## Code After: Sitemap: http://wger.de/de/sitemap.xml Sitemap: http://wger.de/en/sitemap.xml Sitemap: http://wger.de/gb/sitemap.xml
93f0d993dbfc7bbbd88c452c5aace505b73c761f
index.js
index.js
'use strict'; module.exports = function(){ };
'use strict'; var fs = require('fs'); var stream = require('stream'); var readline = require('readline'); var moment = require('moment'); function readFileContent(filename, callback){ var lines = []; var instream = fs.createReadStream(filename); var outstream = new stream; outstream.readable = true; outstream.writable = true; var rl = readline.createInterface({ input: instream, output: outstream, terminal: false }); rl.on('line', function(line){ lines.push(formatLine(line)); }); rl.on('close', function(){ callback(null, lines); }); } function formatLine(line) { // Remove empty lines if(!line || !line.length) { return; } var lineParts = line.split(': '); return messageDetails(lineParts); } function messageDetails(parts){ var date = formatDate(parts[0]); var details = { date: date }; if(parts[2]){ details.sender = parts[1]; // remove timestamp and sender info parts.splice(0, 2); details.message = parts.join(': '); return details; } details.message = parts[1]; details.announcement = true; return details; } function formatDate(timestamp){ if(timestamp.length !== 17){ throw new Error('Timestamp is of the wrong length:', timestamp); } return moment(timestamp, 'DD/MM/YY HH:mm:ss').format(); } module.exports = function(filename){ return readFileContent.apply(this, arguments); };
Add initial file-reading and line formatting
Add initial file-reading and line formatting
JavaScript
mit
matiassingers/whatsapp-log-parser
javascript
## Code Before: 'use strict'; module.exports = function(){ }; ## Instruction: Add initial file-reading and line formatting ## Code After: 'use strict'; var fs = require('fs'); var stream = require('stream'); var readline = require('readline'); var moment = require('moment'); function readFileContent(filename, callback){ var lines = []; var instream = fs.createReadStream(filename); var outstream = new stream; outstream.readable = true; outstream.writable = true; var rl = readline.createInterface({ input: instream, output: outstream, terminal: false }); rl.on('line', function(line){ lines.push(formatLine(line)); }); rl.on('close', function(){ callback(null, lines); }); } function formatLine(line) { // Remove empty lines if(!line || !line.length) { return; } var lineParts = line.split(': '); return messageDetails(lineParts); } function messageDetails(parts){ var date = formatDate(parts[0]); var details = { date: date }; if(parts[2]){ details.sender = parts[1]; // remove timestamp and sender info parts.splice(0, 2); details.message = parts.join(': '); return details; } details.message = parts[1]; details.announcement = true; return details; } function formatDate(timestamp){ if(timestamp.length !== 17){ throw new Error('Timestamp is of the wrong length:', timestamp); } return moment(timestamp, 'DD/MM/YY HH:mm:ss').format(); } module.exports = function(filename){ return readFileContent.apply(this, arguments); };
82d789f57af95c4b10afea10f5a654a7a5b65b80
_includes/bio.html
_includes/bio.html
<div id='author-bio'> <div class='gravatar'> <img src="https://www.gravatar.com/avatar/04fd01b158b5ca07858d023533b01b24?s=200"> </div> <div class='bio'> Hi, I’m Nithin Bekal. I work at <a href='https://www.shopify.com'>Shopify</a> in Ottawa, Canada. Previously, co-founder of <a href='https://www.crowdstudio.in/'>CrowdStudio.in</a> and <a href='https://wowmakers.com/'>WowMakers</a>. Ruby is my preferred programming language, and the topic of most of my articles here, but I'm also a big fan of Elixir. Tweet to me at <a href='https://twitter.com/nithinbekal'>@nithinbekal</a>. <br> <br> <div class='links'> <a id='bio-home-link' href='/'>Posts</a> <a id='bio-about-link' href='/about/'>About</a> <a id='bio-resume-link' href='/resume'>Resume</a> </div> </div> </div>
<div id='author-bio'> <div class='gravatar'> <img src="https://www.gravatar.com/avatar/04fd01b158b5ca07858d023533b01b24?s=200" alt="Nithin Bekal"> </div> <div class='bio'> Hi, I’m Nithin Bekal. I work at <a href='https://www.shopify.com'>Shopify</a> in Ottawa, Canada. Previously, co-founder of <a href='https://www.crowdstudio.in/'>CrowdStudio.in</a> and <a href='https://wowmakers.com/'>WowMakers</a>. Ruby is my preferred programming language, and the topic of most of my articles here, but I'm also a big fan of Elixir. Tweet to me at <a href='https://twitter.com/nithinbekal'>@nithinbekal</a>. <br> <br> <div class='links'> <a id='bio-home-link' href='/'>Posts</a> <a id='bio-about-link' href='/about/'>About</a> <a id='bio-resume-link' href='/resume'>Resume</a> </div> </div> </div>
Add alt text to gravatar
Add alt text to gravatar
HTML
mit
nithinbekal/nithinbekal.github.io,nithinbekal/nithinbekal.github.io
html
## Code Before: <div id='author-bio'> <div class='gravatar'> <img src="https://www.gravatar.com/avatar/04fd01b158b5ca07858d023533b01b24?s=200"> </div> <div class='bio'> Hi, I’m Nithin Bekal. I work at <a href='https://www.shopify.com'>Shopify</a> in Ottawa, Canada. Previously, co-founder of <a href='https://www.crowdstudio.in/'>CrowdStudio.in</a> and <a href='https://wowmakers.com/'>WowMakers</a>. Ruby is my preferred programming language, and the topic of most of my articles here, but I'm also a big fan of Elixir. Tweet to me at <a href='https://twitter.com/nithinbekal'>@nithinbekal</a>. <br> <br> <div class='links'> <a id='bio-home-link' href='/'>Posts</a> <a id='bio-about-link' href='/about/'>About</a> <a id='bio-resume-link' href='/resume'>Resume</a> </div> </div> </div> ## Instruction: Add alt text to gravatar ## Code After: <div id='author-bio'> <div class='gravatar'> <img src="https://www.gravatar.com/avatar/04fd01b158b5ca07858d023533b01b24?s=200" alt="Nithin Bekal"> </div> <div class='bio'> Hi, I’m Nithin Bekal. I work at <a href='https://www.shopify.com'>Shopify</a> in Ottawa, Canada. Previously, co-founder of <a href='https://www.crowdstudio.in/'>CrowdStudio.in</a> and <a href='https://wowmakers.com/'>WowMakers</a>. Ruby is my preferred programming language, and the topic of most of my articles here, but I'm also a big fan of Elixir. Tweet to me at <a href='https://twitter.com/nithinbekal'>@nithinbekal</a>. <br> <br> <div class='links'> <a id='bio-home-link' href='/'>Posts</a> <a id='bio-about-link' href='/about/'>About</a> <a id='bio-resume-link' href='/resume'>Resume</a> </div> </div> </div>
25b6134a2ced438ec2ab2dbfde6ff7a3003c1ab7
src/routes/names.js
src/routes/names.js
const thesaurus = require('powerthesaurus-api') module.exports = (router) => { router.use('/v1/names', (req, res, next) => { Promise.all(req.query.bandname.split(' ').map((name) => thesaurus(name))) .then((results) => { const wordlist = results.map((item) => { return item.map((data) => { return data.word }) }) res.json(wordlist) }) .catch(next) }) }
const thesaurus = require('powerthesaurus-api') const ignore = [ 'a', 'the', 'of', 'in', ] module.exports = (router) => { router.use('/v1/names', (req, res, next) => { Promise.all(req.query.bandname.split(' ').map((name) => { if (ignore.indexOf(name) !== -1) return name return thesaurus(name) })) .then((results) => { const wordlist = results.map((item) => { return item.map((data) => { return data.word }) }) res.json(wordlist) }) .catch(next) }) }
Add ignore list for common words
Add ignore list for common words
JavaScript
mit
signup-from-bandname/banGen,signup-from-bandname/banGen,signup-from-bandname/banGen
javascript
## Code Before: const thesaurus = require('powerthesaurus-api') module.exports = (router) => { router.use('/v1/names', (req, res, next) => { Promise.all(req.query.bandname.split(' ').map((name) => thesaurus(name))) .then((results) => { const wordlist = results.map((item) => { return item.map((data) => { return data.word }) }) res.json(wordlist) }) .catch(next) }) } ## Instruction: Add ignore list for common words ## Code After: const thesaurus = require('powerthesaurus-api') const ignore = [ 'a', 'the', 'of', 'in', ] module.exports = (router) => { router.use('/v1/names', (req, res, next) => { Promise.all(req.query.bandname.split(' ').map((name) => { if (ignore.indexOf(name) !== -1) return name return thesaurus(name) })) .then((results) => { const wordlist = results.map((item) => { return item.map((data) => { return data.word }) }) res.json(wordlist) }) .catch(next) }) }
b345af1d7f6380bb944d74b3b6c558bde913a1c6
src/ui/main/main.controller.js
src/ui/main/main.controller.js
angular.module('proxtop').controller('MainController', ['$scope', 'ipcManager', '$state', 'notification', '$mdToast', '$translate', 'settings', '$mdDialog', 'open', '$window', 'debounce', function($scope, ipcManager, $state, notification, $mdToast, $translate, settings, $mdDialog, open, $window, debounce) { const ipc = ipcManager($scope); ipc.once('check-login', function(ev, result) { if(result) { ipc.send('watchlist-update'); $state.go('profile'); } else { $state.go('login'); } }); $translate.use(settings.get('general').language); ipc.send('check-login'); }]);
angular.module('proxtop').controller('MainController', ['$scope', 'ipcManager', '$state', '$mdToast', '$translate', 'settings', function($scope, ipcManager, $state, $mdToast, $translate, settings) { const ipc = ipcManager($scope); ipc.once('check-login', function(ev, result) { if(result) { ipc.send('watchlist-update'); $state.go('profile'); } else { $state.go('login'); } }); $translate.use(settings.get('general').language); ipc.send('check-login'); }]);
Fix error in last commit
Fix error in last commit
JavaScript
mit
kumpelblase2/proxtop,kumpelblase2/proxtop,kumpelblase2/proxtop,kumpelblase2/proxtop
javascript
## Code Before: angular.module('proxtop').controller('MainController', ['$scope', 'ipcManager', '$state', 'notification', '$mdToast', '$translate', 'settings', '$mdDialog', 'open', '$window', 'debounce', function($scope, ipcManager, $state, notification, $mdToast, $translate, settings, $mdDialog, open, $window, debounce) { const ipc = ipcManager($scope); ipc.once('check-login', function(ev, result) { if(result) { ipc.send('watchlist-update'); $state.go('profile'); } else { $state.go('login'); } }); $translate.use(settings.get('general').language); ipc.send('check-login'); }]); ## Instruction: Fix error in last commit ## Code After: angular.module('proxtop').controller('MainController', ['$scope', 'ipcManager', '$state', '$mdToast', '$translate', 'settings', function($scope, ipcManager, $state, $mdToast, $translate, settings) { const ipc = ipcManager($scope); ipc.once('check-login', function(ev, result) { if(result) { ipc.send('watchlist-update'); $state.go('profile'); } else { $state.go('login'); } }); $translate.use(settings.get('general').language); ipc.send('check-login'); }]);
e472690c6795a9b240f3b2ffddf5e16f19d1e9fc
package.json
package.json
{ "name": "coffee-http-proxy", "version": "0.1.2", "author": "kaz080", "description": "Simple HTTP proxy server module", "repository": { "type": "git", "url": "git://github.com/kaz080/coffee-http-proxy.git" }, "main": "./lib/proxy.js", "bin": { "proxy": "./bin/proxy" }, "scripts": { "test": "grunt", "start": "./bin/proxy" }, "dependencies": { "colors": "0.6.0-1", "optimist": "0.3.5" }, "devDependencies": { "request": "2.12.0", "should": "1.2.1", "chai": "1.4.0", "sinon": "1.5.2", "sinon-chai": "2.2.0", "grunt-coffee": "0.0.6", "grunt-mocha-test": "0.0.1" } }
{ "name": "coffee-http-proxy", "version": "0.1.2", "author": "kaz080", "description": "Simple HTTP proxy server module", "repository": { "type": "git", "url": "git://github.com/kaz080/coffee-http-proxy.git" }, "main": "./lib/proxy.js", "bin": { "proxy": "./bin/proxy" }, "scripts": { "test": "grunt", "start": "./bin/proxy" }, "dependencies": { "colors": "0.6.0-1", "optimist": "0.3.5" }, "devDependencies": { "request": "2.12.0", "should": "1.2.1", "chai": "1.4.0", "sinon": "1.5.2", "sinon-chai": "2.2.0", "grunt": "", "grunt-coffee": "0.0.6", "grunt-mocha-test": "0.0.1" } }
Add grunt to devDependencies for Travis
Add grunt to devDependencies for Travis
JSON
bsd-2-clause
kaz080/coffee-http-proxy
json
## Code Before: { "name": "coffee-http-proxy", "version": "0.1.2", "author": "kaz080", "description": "Simple HTTP proxy server module", "repository": { "type": "git", "url": "git://github.com/kaz080/coffee-http-proxy.git" }, "main": "./lib/proxy.js", "bin": { "proxy": "./bin/proxy" }, "scripts": { "test": "grunt", "start": "./bin/proxy" }, "dependencies": { "colors": "0.6.0-1", "optimist": "0.3.5" }, "devDependencies": { "request": "2.12.0", "should": "1.2.1", "chai": "1.4.0", "sinon": "1.5.2", "sinon-chai": "2.2.0", "grunt-coffee": "0.0.6", "grunt-mocha-test": "0.0.1" } } ## Instruction: Add grunt to devDependencies for Travis ## Code After: { "name": "coffee-http-proxy", "version": "0.1.2", "author": "kaz080", "description": "Simple HTTP proxy server module", "repository": { "type": "git", "url": "git://github.com/kaz080/coffee-http-proxy.git" }, "main": "./lib/proxy.js", "bin": { "proxy": "./bin/proxy" }, "scripts": { "test": "grunt", "start": "./bin/proxy" }, "dependencies": { "colors": "0.6.0-1", "optimist": "0.3.5" }, "devDependencies": { "request": "2.12.0", "should": "1.2.1", "chai": "1.4.0", "sinon": "1.5.2", "sinon-chai": "2.2.0", "grunt": "", "grunt-coffee": "0.0.6", "grunt-mocha-test": "0.0.1" } }
0e5b107fc40623659c9367051d5448ace1a69497
public/stylesheets/style.css
public/stylesheets/style.css
section { border-top: 1px solid #e5e5e5; padding: 18px; }
body { margin: 20px; } section { border-top: 1px solid #e5e5e5; padding: 18px; }
Add some space around the body
Add some space around the body
CSS
mit
pads/isomorphic-js
css
## Code Before: section { border-top: 1px solid #e5e5e5; padding: 18px; } ## Instruction: Add some space around the body ## Code After: body { margin: 20px; } section { border-top: 1px solid #e5e5e5; padding: 18px; }
0d2c8d035ef89fc9753c2e3769f478a870ac1368
models/rating.js
models/rating.js
module.exports = function (sequelize, DataTypes) { const Rating = sequelize.define('Rating', { id: { primaryKey: true, type: DataTypes.UUID, defaultValue: DataTypes.UUIDV4 }, value: { type: DataTypes.INTEGER, allowNull: false, validate: { min: 0 } }, BikeId: { type: DataTypes.UUID, allowNull: false }, VoteId: { type: DataTypes.UUID, allowNull: false } }, { classMethods: { associate (models) { Rating.belongsTo(models.Bike) Rating.belongsTo(models.Vote) } } }) return Rating }
const _ = require('lodash') const createError = require('http-errors') module.exports = function (sequelize, DataTypes) { const Rating = sequelize.define('Rating', { id: { primaryKey: true, type: DataTypes.UUID, defaultValue: DataTypes.UUIDV4 }, value: { type: DataTypes.INTEGER, allowNull: false, validate: { min: 0 } }, BikeId: { type: DataTypes.UUID, allowNull: false }, VoteId: { type: DataTypes.UUID, allowNull: false } }, { hooks: { async beforeBulkCreate (ratings, opts) { // Check for missing values const missingValues = _.difference( _.range(1, ratings.length), _.pluck(ratings, 'value') ) if (missingValues.length !== 0) throw createError(422, `Missing values ${JSON.stringify(missingValues)}`, {expose: true}) } }, classMethods: { associate (models) { Rating.belongsTo(models.Bike) Rating.belongsTo(models.Vote) } } }) return Rating }
Add missing value check to Rating model's beforeBulkCreate
Add missing value check to Rating model's beforeBulkCreate
JavaScript
apache-2.0
cesarandreu/bshed,cesarandreu/bshed
javascript
## Code Before: module.exports = function (sequelize, DataTypes) { const Rating = sequelize.define('Rating', { id: { primaryKey: true, type: DataTypes.UUID, defaultValue: DataTypes.UUIDV4 }, value: { type: DataTypes.INTEGER, allowNull: false, validate: { min: 0 } }, BikeId: { type: DataTypes.UUID, allowNull: false }, VoteId: { type: DataTypes.UUID, allowNull: false } }, { classMethods: { associate (models) { Rating.belongsTo(models.Bike) Rating.belongsTo(models.Vote) } } }) return Rating } ## Instruction: Add missing value check to Rating model's beforeBulkCreate ## Code After: const _ = require('lodash') const createError = require('http-errors') module.exports = function (sequelize, DataTypes) { const Rating = sequelize.define('Rating', { id: { primaryKey: true, type: DataTypes.UUID, defaultValue: DataTypes.UUIDV4 }, value: { type: DataTypes.INTEGER, allowNull: false, validate: { min: 0 } }, BikeId: { type: DataTypes.UUID, allowNull: false }, VoteId: { type: DataTypes.UUID, allowNull: false } }, { hooks: { async beforeBulkCreate (ratings, opts) { // Check for missing values const missingValues = _.difference( _.range(1, ratings.length), _.pluck(ratings, 'value') ) if (missingValues.length !== 0) throw createError(422, `Missing values ${JSON.stringify(missingValues)}`, {expose: true}) } }, classMethods: { associate (models) { Rating.belongsTo(models.Bike) Rating.belongsTo(models.Vote) } } }) return Rating }
787cbc89769a6d2f2568187cc987ba9308346c21
app/services/search/global_service.rb
app/services/search/global_service.rb
module Search class GlobalService attr_accessor :current_user, :params attr_reader :default_project_filter def initialize(user, params) @current_user, @params = user, params.dup @default_project_filter = true end def execute Gitlab::SearchResults.new(current_user, projects, params[:search], default_project_filter: default_project_filter) end def projects @projects ||= ProjectsFinder.new(current_user: current_user).execute end def scope @scope ||= begin allowed_scopes = %w[issues merge_requests milestones] allowed_scopes.delete(params[:scope]) { 'projects' } end end end end
module Search class GlobalService include Gitlab::Utils::StrongMemoize attr_accessor :current_user, :params attr_reader :default_project_filter def initialize(user, params) @current_user, @params = user, params.dup @default_project_filter = true end def execute Gitlab::SearchResults.new(current_user, projects, params[:search], default_project_filter: default_project_filter) end def projects @projects ||= ProjectsFinder.new(current_user: current_user).execute end def allowed_scopes strong_memoize(:allowed_scopes) do %w[issues merge_requests milestones] end end def scope strong_memoize(:scope) do allowed_scopes.include?(params[:scope]) ? params[:scope] : 'projects' end end end end
Reduce diff with EE in Search::GlobalService
Reduce diff with EE in Search::GlobalService Signed-off-by: Rémy Coutable <[email protected]>
Ruby
mit
stoplightio/gitlabhq,mmkassem/gitlabhq,stoplightio/gitlabhq,mmkassem/gitlabhq,iiet/iiet-git,iiet/iiet-git,mmkassem/gitlabhq,mmkassem/gitlabhq,stoplightio/gitlabhq,iiet/iiet-git,stoplightio/gitlabhq,iiet/iiet-git
ruby
## Code Before: module Search class GlobalService attr_accessor :current_user, :params attr_reader :default_project_filter def initialize(user, params) @current_user, @params = user, params.dup @default_project_filter = true end def execute Gitlab::SearchResults.new(current_user, projects, params[:search], default_project_filter: default_project_filter) end def projects @projects ||= ProjectsFinder.new(current_user: current_user).execute end def scope @scope ||= begin allowed_scopes = %w[issues merge_requests milestones] allowed_scopes.delete(params[:scope]) { 'projects' } end end end end ## Instruction: Reduce diff with EE in Search::GlobalService Signed-off-by: Rémy Coutable <[email protected]> ## Code After: module Search class GlobalService include Gitlab::Utils::StrongMemoize attr_accessor :current_user, :params attr_reader :default_project_filter def initialize(user, params) @current_user, @params = user, params.dup @default_project_filter = true end def execute Gitlab::SearchResults.new(current_user, projects, params[:search], default_project_filter: default_project_filter) end def projects @projects ||= ProjectsFinder.new(current_user: current_user).execute end def allowed_scopes strong_memoize(:allowed_scopes) do %w[issues merge_requests milestones] end end def scope strong_memoize(:scope) do allowed_scopes.include?(params[:scope]) ? params[:scope] : 'projects' end end end end
28c5889b5b0b1e3228de29e1e2019eedfa5eea4a
app/assets/javascripts/templates/categories.jst.skim.erb
app/assets/javascripts/templates/categories.jst.skim.erb
.categories-header-container .container .categories-header-row .categories-header-col .col-md-4 img src="<%= image_path('cocoa-tree-320.png') %>" .col-md-8 p.alert.alert-warning | We need your help to add missing categories. Just send us your ' a href="https://github.com/cocoa-tree/categories" | pull-request | . h1 CocoaTree p | The CocoaTree is a comprehensive catalog of open source Objective-C projects. In essence, it is based on the popular ' a href="http://cocoapods.org" CocoaPods repository | . Additionally, popularity and activity ratings are calculated using data provided by a href="https://github.com" GitHub | . .categories-container .container .row .col-xs-12 h1 = @categories.length ' | Categories .categories-row - columns = 3 - categoriesPerColumn = parseInt(@categories.length / columns) - if categoriesPerColumn * columns < @categories.length - categoriesPerColumn++ - column = 0 - while column < columns .categories-col - i = categoriesPerColumn * column - while i < (categoriesPerColumn * (column + 1)) && i < @categories.length - category = @categories[i] a href="#pods/#{category.get('name')}" span = category.displayName() ' span.badge.badge-yellow = category.get('podsCount') - i++ - column++
.categories-header-container .container .categories-header-row .categories-header-col .col-md-4 img src="<%= image_path('cocoa-tree-320.png') %>" .col-md-8 p.alert.alert-warning | We need your help to add missing categories. Just send us your ' a href="https://github.com/cocoa-tree/categories/edit/master/cocoa_pods_categories.json" | pull-request | . h1 CocoaTree p | The CocoaTree is a comprehensive catalog of open source Objective-C projects. In essence, it is based on the popular ' a href="http://cocoapods.org" CocoaPods repository | . Additionally, popularity and activity ratings are calculated using data provided by a href="https://github.com" GitHub | . .categories-container .container .row .col-xs-12 h1 = @categories.length ' | Categories .categories-row - columns = 3 - categoriesPerColumn = parseInt(@categories.length / columns) - if categoriesPerColumn * columns < @categories.length - categoriesPerColumn++ - column = 0 - while column < columns .categories-col - i = categoriesPerColumn * column - while i < (categoriesPerColumn * (column + 1)) && i < @categories.length - category = @categories[i] a href="#pods/#{category.get('name')}" span = category.displayName() ' span.badge.badge-yellow = category.get('podsCount') - i++ - column++
Fix pull-request link on categories.
Fix pull-request link on categories.
HTML+ERB
mit
bsingr/cocoa-tree,bsingr/cocoa-tree,bsingr/cocoa-tree
html+erb
## Code Before: .categories-header-container .container .categories-header-row .categories-header-col .col-md-4 img src="<%= image_path('cocoa-tree-320.png') %>" .col-md-8 p.alert.alert-warning | We need your help to add missing categories. Just send us your ' a href="https://github.com/cocoa-tree/categories" | pull-request | . h1 CocoaTree p | The CocoaTree is a comprehensive catalog of open source Objective-C projects. In essence, it is based on the popular ' a href="http://cocoapods.org" CocoaPods repository | . Additionally, popularity and activity ratings are calculated using data provided by a href="https://github.com" GitHub | . .categories-container .container .row .col-xs-12 h1 = @categories.length ' | Categories .categories-row - columns = 3 - categoriesPerColumn = parseInt(@categories.length / columns) - if categoriesPerColumn * columns < @categories.length - categoriesPerColumn++ - column = 0 - while column < columns .categories-col - i = categoriesPerColumn * column - while i < (categoriesPerColumn * (column + 1)) && i < @categories.length - category = @categories[i] a href="#pods/#{category.get('name')}" span = category.displayName() ' span.badge.badge-yellow = category.get('podsCount') - i++ - column++ ## Instruction: Fix pull-request link on categories. ## Code After: .categories-header-container .container .categories-header-row .categories-header-col .col-md-4 img src="<%= image_path('cocoa-tree-320.png') %>" .col-md-8 p.alert.alert-warning | We need your help to add missing categories. Just send us your ' a href="https://github.com/cocoa-tree/categories/edit/master/cocoa_pods_categories.json" | pull-request | . h1 CocoaTree p | The CocoaTree is a comprehensive catalog of open source Objective-C projects. In essence, it is based on the popular ' a href="http://cocoapods.org" CocoaPods repository | . Additionally, popularity and activity ratings are calculated using data provided by a href="https://github.com" GitHub | . .categories-container .container .row .col-xs-12 h1 = @categories.length ' | Categories .categories-row - columns = 3 - categoriesPerColumn = parseInt(@categories.length / columns) - if categoriesPerColumn * columns < @categories.length - categoriesPerColumn++ - column = 0 - while column < columns .categories-col - i = categoriesPerColumn * column - while i < (categoriesPerColumn * (column + 1)) && i < @categories.length - category = @categories[i] a href="#pods/#{category.get('name')}" span = category.displayName() ' span.badge.badge-yellow = category.get('podsCount') - i++ - column++
a896071d4c282a23662e6aa3cfcaaf4a69cb901a
test/tzset.js
test/tzset.js
var should = require('should') , time = require('../') describe('tzset()', function () { beforeEach(function () { process.env.TZ = 'UTC' }) it('should work with no arguments', function () { process.env.TZ = 'US/Pacific' time.tzset() time.currentTimezone.should.equal('US/Pacific') }) it('should work with 1 argument', function () { time.tzset('US/Pacific') time.currentTimezone.should.equal('US/Pacific') }) it('should return a "zoneinfo" object', function () { var info = time.tzset() info.should.have.property('tzname').with.lengthOf(2) info.should.have.property('timezone') info.should.have.property('daylight') }) it('should set `process.env.TZ`', function () { time.tzset('US/Pacific') process.env.TZ.should.equal('US/Pacific') }) })
var should = require('should') , time = require('../') describe('tzset()', function () { beforeEach(function () { process.env.TZ = 'UTC' }) it('should work with no arguments', function () { process.env.TZ = 'US/Pacific' time.tzset() time.currentTimezone.should.equal('US/Pacific') }) it('should work with 1 argument', function () { time.tzset('US/Pacific') time.currentTimezone.should.equal('US/Pacific') }) it('should return a "zoneinfo" object', function () { var info = time.tzset() info.should.have.property('tzname').with.lengthOf(2) info.should.have.property('timezone') info.should.have.property('daylight') }) it('should set `process.env.TZ`', function () { time.tzset('US/Pacific') process.env.TZ.should.equal('US/Pacific') }) it('should work with known values', function () { var info info = time.tzset('UTC') info.tzname[0].should.equal('UTC') info.timezone.should.equal(0) info.daylight.should.equal(0) info = time.tzset('America/Los_Angeles') info.tzname[0].should.equal('PST') info.tzname[1].should.equal('PDT') info.timezone.should.not.equal(0) info = time.tzset('America/Phoenix') info.tzname[0].should.equal('MST') info.tzname[1].should.equal('MDT') info.timezone.should.not.equal(0) info = time.tzset('Europe/Copenhagen') info.tzname[0].should.equal('CET') info.tzname[1].should.equal('CEST') info.timezone.should.not.equal(0) }) })
Add a test testing some known timezone values.
Add a test testing some known timezone values.
JavaScript
mit
TooTallNate/node-time,TooTallNate/node-time,santigimeno/node-time,santigimeno/node-time,santigimeno/node-time,TooTallNate/node-time
javascript
## Code Before: var should = require('should') , time = require('../') describe('tzset()', function () { beforeEach(function () { process.env.TZ = 'UTC' }) it('should work with no arguments', function () { process.env.TZ = 'US/Pacific' time.tzset() time.currentTimezone.should.equal('US/Pacific') }) it('should work with 1 argument', function () { time.tzset('US/Pacific') time.currentTimezone.should.equal('US/Pacific') }) it('should return a "zoneinfo" object', function () { var info = time.tzset() info.should.have.property('tzname').with.lengthOf(2) info.should.have.property('timezone') info.should.have.property('daylight') }) it('should set `process.env.TZ`', function () { time.tzset('US/Pacific') process.env.TZ.should.equal('US/Pacific') }) }) ## Instruction: Add a test testing some known timezone values. ## Code After: var should = require('should') , time = require('../') describe('tzset()', function () { beforeEach(function () { process.env.TZ = 'UTC' }) it('should work with no arguments', function () { process.env.TZ = 'US/Pacific' time.tzset() time.currentTimezone.should.equal('US/Pacific') }) it('should work with 1 argument', function () { time.tzset('US/Pacific') time.currentTimezone.should.equal('US/Pacific') }) it('should return a "zoneinfo" object', function () { var info = time.tzset() info.should.have.property('tzname').with.lengthOf(2) info.should.have.property('timezone') info.should.have.property('daylight') }) it('should set `process.env.TZ`', function () { time.tzset('US/Pacific') process.env.TZ.should.equal('US/Pacific') }) it('should work with known values', function () { var info info = time.tzset('UTC') info.tzname[0].should.equal('UTC') info.timezone.should.equal(0) info.daylight.should.equal(0) info = time.tzset('America/Los_Angeles') info.tzname[0].should.equal('PST') info.tzname[1].should.equal('PDT') info.timezone.should.not.equal(0) info = time.tzset('America/Phoenix') info.tzname[0].should.equal('MST') info.tzname[1].should.equal('MDT') info.timezone.should.not.equal(0) info = time.tzset('Europe/Copenhagen') info.tzname[0].should.equal('CET') info.tzname[1].should.equal('CEST') info.timezone.should.not.equal(0) }) })
059d603acd4d26883fa0a69d98a0eba8f45d573d
resource/fixtures/index.html
resource/fixtures/index.html
<!doctype html> <title>Hello Resource!</title> <h1>Hello Resource!</h1>
<!doctype html> <title>Hello Resource!</title> <style> body { text-align: center } </style> <h1>Hello Resource!</h1>
Add style for subresource example
Add style for subresource example
HTML
mit
snuggs/snuggsi,snuggs/snuggsi,devpunks/snuggsi,devpunks/snuggsi,snuggs/snuggsi,devpunks/snuggsi,devpunks/snuggsi,snuggs/snuggsi,devpunks/snuggsi
html
## Code Before: <!doctype html> <title>Hello Resource!</title> <h1>Hello Resource!</h1> ## Instruction: Add style for subresource example ## Code After: <!doctype html> <title>Hello Resource!</title> <style> body { text-align: center } </style> <h1>Hello Resource!</h1>
68d6a9635792527da512b536864d2c752c99af93
lib/spectrum/config/formatted_page_range_field.rb
lib/spectrum/config/formatted_page_range_field.rb
module Spectrum module Config class FormattedPageRangeField < Field type 'formatted_page_range' attr_reader :fields def initialize_from_instance(i) super @fields = i.fields end def initialize_from_hash(args, config) super @fields = {} args['fields'].each_pair do |fname, fdef| @fields[fname] = Field.new( fdef.merge('id' => SecureRandom.uuid, 'metadata' => {}), config ) end end def value(data) start_page = @fields['start_page'].value(data) end_page = @fields['end_page'].value(data) if start_page if end_page && start_page != end_page "#{start_page} - #{end_page}" else start_page end else if end_page end_page end end end end end
module Spectrum module Config class FormattedPageRangeField < Field type 'formatted_page_range' attr_reader :fields def initialize_from_instance(i) super @fields = i.fields end def initialize_from_hash(args, config) super @fields = {} args['fields'].each_pair do |fname, fdef| @fields[fname] = Field.new( fdef.merge('id' => SecureRandom.uuid, 'metadata' => {}), config ) end end def value(data) start_page = @fields['start_page'].value(data) end_page = @fields['end_page'].value(data) if start_page if end_page && start_page != end_page "#{start_page} - #{end_page}" else start_page end else if end_page end_page end end end end end end
Fix a missing end tag
Fix a missing end tag
Ruby
bsd-3-clause
mlibrary/spectrum-config
ruby
## Code Before: module Spectrum module Config class FormattedPageRangeField < Field type 'formatted_page_range' attr_reader :fields def initialize_from_instance(i) super @fields = i.fields end def initialize_from_hash(args, config) super @fields = {} args['fields'].each_pair do |fname, fdef| @fields[fname] = Field.new( fdef.merge('id' => SecureRandom.uuid, 'metadata' => {}), config ) end end def value(data) start_page = @fields['start_page'].value(data) end_page = @fields['end_page'].value(data) if start_page if end_page && start_page != end_page "#{start_page} - #{end_page}" else start_page end else if end_page end_page end end end end end ## Instruction: Fix a missing end tag ## Code After: module Spectrum module Config class FormattedPageRangeField < Field type 'formatted_page_range' attr_reader :fields def initialize_from_instance(i) super @fields = i.fields end def initialize_from_hash(args, config) super @fields = {} args['fields'].each_pair do |fname, fdef| @fields[fname] = Field.new( fdef.merge('id' => SecureRandom.uuid, 'metadata' => {}), config ) end end def value(data) start_page = @fields['start_page'].value(data) end_page = @fields['end_page'].value(data) if start_page if end_page && start_page != end_page "#{start_page} - #{end_page}" else start_page end else if end_page end_page end end end end end end
cf21ac8e1f68461b8ead51874f77c2f3c62a4ca5
.travis.yml
.travis.yml
--- sudo: false before_script: - git clone https://github.com/exercism/problem-specifications.git - bin/fetch-configlet - docker pull rakudo-star:latest script: - bin/configlet lint . - docker run -e EXERCISM=1 -t -v $PWD:/exercism rakudo-star prove /exercism -re perl6
--- sudo: false before_script: - git clone https://github.com/exercism/problem-specifications.git - bin/fetch-configlet - docker pull rakudo-star:latest script: - bin/configlet lint . - docker run --env EXERCISM=1 --volume $PWD:/exercism rakudo-star prove /exercism --exec perl6 --recurse --jobs 2
Use parallel jobs with prove
Use parallel jobs with prove
YAML
mit
mienaikage/exercism-perl6,mienaikage/exercism-perl6,exercism/xperl6,mienaikage/xperl6,mienaikage/exercism-perl6,mienaikage/xperl6,exercism/xperl6,exercism/xperl6,mienaikage/xperl6
yaml
## Code Before: --- sudo: false before_script: - git clone https://github.com/exercism/problem-specifications.git - bin/fetch-configlet - docker pull rakudo-star:latest script: - bin/configlet lint . - docker run -e EXERCISM=1 -t -v $PWD:/exercism rakudo-star prove /exercism -re perl6 ## Instruction: Use parallel jobs with prove ## Code After: --- sudo: false before_script: - git clone https://github.com/exercism/problem-specifications.git - bin/fetch-configlet - docker pull rakudo-star:latest script: - bin/configlet lint . - docker run --env EXERCISM=1 --volume $PWD:/exercism rakudo-star prove /exercism --exec perl6 --recurse --jobs 2
29d929c4a4b07ec8c6d91df2e25bdc37e1ab3f80
README.rst
README.rst
Emmaus House Food Pantry Inventory Management ============================================= Food Pantry inventory management program for Emmaus House Episcopal Church. Designed for use with modern USB barcode scanners. Installation ------------ Simply download the program to your desktop and double-click. Development ----------- Building ........ :: python setup.py --help usage: setup.py [-h] [-c] [-p] [-b] Emmaus House Food Pantry Setup optional arguments: -h, --help show this help message and exit -c, --clean Clean all built files. -p, --pack Pack app files into archive. -b, --build Build executable. Webapp Testing .............. :: python webapp.py --package food_pantry.zip
Emmaus House Food Pantry Inventory Management ============================================= Food Pantry inventory management program for Emmaus House Episcopal Church. Designed for use with modern USB barcode scanners. Installation ------------ Simply download the program to your desktop and double-click. `Click here <https://github.com/downloads/grantjenks/emmaus_house_food_pantry/pantry.exe>`_ to download the latest version of the program. Development ----------- Building ........ :: python setup.py --help usage: setup.py [-h] [-c] [-p] [-b] Emmaus House Food Pantry Setup optional arguments: -h, --help show this help message and exit -c, --clean Clean all built files. -p, --pack Pack app files into archive. -b, --build Build executable. Webapp Testing .............. :: python webapp.py --package food_pantry.zip
Create link to latest version of pantry.
Create link to latest version of pantry.
reStructuredText
mit
grantjenks/emmaus_house_food_pantry,grantjenks/emmaus_house_food_pantry
restructuredtext
## Code Before: Emmaus House Food Pantry Inventory Management ============================================= Food Pantry inventory management program for Emmaus House Episcopal Church. Designed for use with modern USB barcode scanners. Installation ------------ Simply download the program to your desktop and double-click. Development ----------- Building ........ :: python setup.py --help usage: setup.py [-h] [-c] [-p] [-b] Emmaus House Food Pantry Setup optional arguments: -h, --help show this help message and exit -c, --clean Clean all built files. -p, --pack Pack app files into archive. -b, --build Build executable. Webapp Testing .............. :: python webapp.py --package food_pantry.zip ## Instruction: Create link to latest version of pantry. ## Code After: Emmaus House Food Pantry Inventory Management ============================================= Food Pantry inventory management program for Emmaus House Episcopal Church. Designed for use with modern USB barcode scanners. Installation ------------ Simply download the program to your desktop and double-click. `Click here <https://github.com/downloads/grantjenks/emmaus_house_food_pantry/pantry.exe>`_ to download the latest version of the program. Development ----------- Building ........ :: python setup.py --help usage: setup.py [-h] [-c] [-p] [-b] Emmaus House Food Pantry Setup optional arguments: -h, --help show this help message and exit -c, --clean Clean all built files. -p, --pack Pack app files into archive. -b, --build Build executable. Webapp Testing .............. :: python webapp.py --package food_pantry.zip
caf1cce23853955bf0a04fc4e255f23b730dca97
tests/test__utils.py
tests/test__utils.py
import pytest import numpy as np import dask.array as da import dask.array.utils as dau import dask_ndfourier._utils @pytest.mark.parametrize( "a, s, n, axis", [ (da.ones((3, 4), chunks=(3, 4)), da.ones((2,), chunks=(2,)), -1, -1), ] ) def test_norm_args(a, s, n, axis): s2, n2, axis2 = dask_ndfourier._utils._norm_args(a, s, n=n, axis=axis) assert isinstance(s2, da.Array)
import pytest import numpy as np import dask.array as da import dask.array.utils as dau import dask_ndfourier._utils @pytest.mark.parametrize( "a, s, n, axis", [ (da.ones((3, 4), chunks=(3, 4)), da.ones((2,), chunks=(2,)), -1, -1), ] ) def test_norm_args(a, s, n, axis): a2, s2, n2, axis2 = dask_ndfourier._utils._norm_args(a, s, n=n, axis=axis) assert isinstance(a2, da.Array) assert isinstance(s2, da.Array)
Update the argument normalization test
Update the argument normalization test Needs to make sure it unpacks the right number of return values. Also since we are changing the input array, it is good to add a check to make sure it is still of the expected type.
Python
bsd-3-clause
dask-image/dask-ndfourier
python
## Code Before: import pytest import numpy as np import dask.array as da import dask.array.utils as dau import dask_ndfourier._utils @pytest.mark.parametrize( "a, s, n, axis", [ (da.ones((3, 4), chunks=(3, 4)), da.ones((2,), chunks=(2,)), -1, -1), ] ) def test_norm_args(a, s, n, axis): s2, n2, axis2 = dask_ndfourier._utils._norm_args(a, s, n=n, axis=axis) assert isinstance(s2, da.Array) ## Instruction: Update the argument normalization test Needs to make sure it unpacks the right number of return values. Also since we are changing the input array, it is good to add a check to make sure it is still of the expected type. ## Code After: import pytest import numpy as np import dask.array as da import dask.array.utils as dau import dask_ndfourier._utils @pytest.mark.parametrize( "a, s, n, axis", [ (da.ones((3, 4), chunks=(3, 4)), da.ones((2,), chunks=(2,)), -1, -1), ] ) def test_norm_args(a, s, n, axis): a2, s2, n2, axis2 = dask_ndfourier._utils._norm_args(a, s, n=n, axis=axis) assert isinstance(a2, da.Array) assert isinstance(s2, da.Array)
f15d2b0d4fa29538048c5f3f773bcfdf49001e2c
custom/panel_templates/Default/core/widgets/online_users.tpl
custom/panel_templates/Default/core/widgets/online_users.tpl
<form action="" method="post"> <div class="form-group"> <label for="inputIncludeStaff">{$INCLUDE_STAFF}</label> <input class="js-switch" type="checkbox" name="staff" id="inputIncludeStaff" value="1"{if $INCLUDE_STAFF_VALUE eq 1} checked{/if} > </div> <div type="form-group"> <input type="hidden" name="token" value="{$TOKEN}"> <input type="submit" class="btn btn-primary" value="{$SUBMIT}"> </div> </form>
<form action="" method="post"> <div class="form-group"> <label for="inputIncludeStaff">{$INCLUDE_STAFF}</label> <input class="js-switch" type="checkbox" name="staff" id="inputIncludeStaff" value="1"{if $INCLUDE_STAFF_VALUE eq 1} checked{/if} > </br> <label for="inputShowNickname">{$SHOW_NICKNAME_INSTEAD}</label> <input class="js-switch" type="checkbox" name="nickname" id="inputShowNickname" value="1"{if $SHOW_NICKNAME_INSTEAD_VALUE eq 1} checked{/if} > </div> <div type="form-group"> <input type="hidden" name="token" value="{$TOKEN}"> <input type="submit" class="btn btn-primary" value="{$SUBMIT}"> </div> </form>
Add "Show nickname instead of username".
Add "Show nickname instead of username". Add "Show nickname instead of username" option for Online Users widget.
Smarty
mit
NamelessMC/Nameless,NamelessMC/Nameless,NamelessMC/Nameless,NamelessMC/Nameless
smarty
## Code Before: <form action="" method="post"> <div class="form-group"> <label for="inputIncludeStaff">{$INCLUDE_STAFF}</label> <input class="js-switch" type="checkbox" name="staff" id="inputIncludeStaff" value="1"{if $INCLUDE_STAFF_VALUE eq 1} checked{/if} > </div> <div type="form-group"> <input type="hidden" name="token" value="{$TOKEN}"> <input type="submit" class="btn btn-primary" value="{$SUBMIT}"> </div> </form> ## Instruction: Add "Show nickname instead of username". Add "Show nickname instead of username" option for Online Users widget. ## Code After: <form action="" method="post"> <div class="form-group"> <label for="inputIncludeStaff">{$INCLUDE_STAFF}</label> <input class="js-switch" type="checkbox" name="staff" id="inputIncludeStaff" value="1"{if $INCLUDE_STAFF_VALUE eq 1} checked{/if} > </br> <label for="inputShowNickname">{$SHOW_NICKNAME_INSTEAD}</label> <input class="js-switch" type="checkbox" name="nickname" id="inputShowNickname" value="1"{if $SHOW_NICKNAME_INSTEAD_VALUE eq 1} checked{/if} > </div> <div type="form-group"> <input type="hidden" name="token" value="{$TOKEN}"> <input type="submit" class="btn btn-primary" value="{$SUBMIT}"> </div> </form>
3051e0a8ddd43fa52798e6ed5cb485200b603929
client/ruby/solrb/lib/solr/exception.rb
client/ruby/solrb/lib/solr/exception.rb
module Solr class Exception < Exception attr_reader :message def initialize(message) @message = message end def to_s @message end end end
module Solr class Exception < Exception; end end
Simplify Solr::Exception - Exception already allows for a message
Simplify Solr::Exception - Exception already allows for a message git-svn-id: 3b1ff1236863b4d63a22e4dae568675c2e247730@501318 13f79535-47bb-0310-9956-ffa450edef68
Ruby
apache-2.0
apache/solr,apache/solr,apache/solr,apache/solr,apache/solr
ruby
## Code Before: module Solr class Exception < Exception attr_reader :message def initialize(message) @message = message end def to_s @message end end end ## Instruction: Simplify Solr::Exception - Exception already allows for a message git-svn-id: 3b1ff1236863b4d63a22e4dae568675c2e247730@501318 13f79535-47bb-0310-9956-ffa450edef68 ## Code After: module Solr class Exception < Exception; end end
60298939568499586ebf7660ab1ebba79c82041a
config/newemacs/settings/evil-nerd-commenter-settings.el
config/newemacs/settings/evil-nerd-commenter-settings.el
(use-package evil-nerd-commenter :config (evil-leader/set-key ";" 'evilnc-comment-or-uncomment-lines "ci" 'evilnc-comment-or-uncomment-lines "cl" 'evilnc-quick-comment-or-uncomment-to-the-line "cc" 'evilnc-copy-and-comment-lines "cp" 'evilnc-comment-or-uncomment-paragraphs "cr" 'comment-or-uncomment-region)) (provide 'evil-nerd-commenter-settings)
(use-package evil-nerd-commenter :config (evil-leader/set-key ";" 'evilnc-comment-or-uncomment-lines "ci" 'evilnc-comment-or-uncomment-lines "cl" 'evilnc-quick-comment-or-uncomment-to-the-line "cc" 'evilnc-copy-and-comment-lines "cp" 'evilnc-comment-or-uncomment-paragraphs "cr" 'comment-or-uncomment-region)) (add-hook 'c-mode-common-hook (lambda () ;; Preferred comment style (setq comment-start "// " comment-end ""))) (provide 'evil-nerd-commenter-settings)
Change default c-mode comment style to single line comments
Change default c-mode comment style to single line comments
Emacs Lisp
mit
rogerzanoni/dotfiles
emacs-lisp
## Code Before: (use-package evil-nerd-commenter :config (evil-leader/set-key ";" 'evilnc-comment-or-uncomment-lines "ci" 'evilnc-comment-or-uncomment-lines "cl" 'evilnc-quick-comment-or-uncomment-to-the-line "cc" 'evilnc-copy-and-comment-lines "cp" 'evilnc-comment-or-uncomment-paragraphs "cr" 'comment-or-uncomment-region)) (provide 'evil-nerd-commenter-settings) ## Instruction: Change default c-mode comment style to single line comments ## Code After: (use-package evil-nerd-commenter :config (evil-leader/set-key ";" 'evilnc-comment-or-uncomment-lines "ci" 'evilnc-comment-or-uncomment-lines "cl" 'evilnc-quick-comment-or-uncomment-to-the-line "cc" 'evilnc-copy-and-comment-lines "cp" 'evilnc-comment-or-uncomment-paragraphs "cr" 'comment-or-uncomment-region)) (add-hook 'c-mode-common-hook (lambda () ;; Preferred comment style (setq comment-start "// " comment-end ""))) (provide 'evil-nerd-commenter-settings)
d8d34b0ebf0af05fd03af936ea0e08ec32972ff8
prj1/include/geometry/geometry.h
prj1/include/geometry/geometry.h
class Geometry { public: virtual bool intersect(Ray &r) = 0; }; class Node { std::vector<std::shared_ptr<Node>> children; std::shared_ptr<Geometry> geometry; Transform transform; std::string name; public: /* * Create a node in the scene graph, placing some named geometry in * the scene */ Node(const std::shared_ptr<Geometry> &geom, const Transform &t, const std::string &name); const std::vector<std::shared_ptr<Node>>& get_children() const; std::vector<std::shared_ptr<Node>>& get_children(); const Geometry& get_geometry() const; const Transform& get_transform() const; Transform& get_transform(); }; #endif
class Geometry { public: /* * Test a ray for intersection with the geometry. * The ray should have been previously transformed into object space */ virtual bool intersect(Ray &r) = 0; }; class Node { std::vector<std::shared_ptr<Node>> children; std::shared_ptr<Geometry> geometry; Transform transform; std::string name; public: /* * Create a node in the scene graph, placing some named geometry in * the scene */ Node(const std::shared_ptr<Geometry> &geom, const Transform &t, const std::string &name); const std::vector<std::shared_ptr<Node>>& get_children() const; std::vector<std::shared_ptr<Node>>& get_children(); const Geometry& get_geometry() const; const Transform& get_transform() const; Transform& get_transform(); }; #endif
Add comment on what space the incoming ray should be in
Add comment on what space the incoming ray should be in
C
mit
Twinklebear/tray,Twinklebear/tray
c
## Code Before: class Geometry { public: virtual bool intersect(Ray &r) = 0; }; class Node { std::vector<std::shared_ptr<Node>> children; std::shared_ptr<Geometry> geometry; Transform transform; std::string name; public: /* * Create a node in the scene graph, placing some named geometry in * the scene */ Node(const std::shared_ptr<Geometry> &geom, const Transform &t, const std::string &name); const std::vector<std::shared_ptr<Node>>& get_children() const; std::vector<std::shared_ptr<Node>>& get_children(); const Geometry& get_geometry() const; const Transform& get_transform() const; Transform& get_transform(); }; #endif ## Instruction: Add comment on what space the incoming ray should be in ## Code After: class Geometry { public: /* * Test a ray for intersection with the geometry. * The ray should have been previously transformed into object space */ virtual bool intersect(Ray &r) = 0; }; class Node { std::vector<std::shared_ptr<Node>> children; std::shared_ptr<Geometry> geometry; Transform transform; std::string name; public: /* * Create a node in the scene graph, placing some named geometry in * the scene */ Node(const std::shared_ptr<Geometry> &geom, const Transform &t, const std::string &name); const std::vector<std::shared_ptr<Node>>& get_children() const; std::vector<std::shared_ptr<Node>>& get_children(); const Geometry& get_geometry() const; const Transform& get_transform() const; Transform& get_transform(); }; #endif
2f2ad4b5bbe45d080015ac5b33b4da1bfd8719d4
lib/brain.rb
lib/brain.rb
class Brain require "net/http" def self.request(path:, method: :get, payload: nil, access_token: nil) uri = URI("#{ENV["BRAIN_URL"]}#{path}") http = Net::HTTP.new(uri.host, uri.port) req = case method when :get Net::HTTP::Get.new(uri.path) when :post Net::HTTP::Post.new(uri.path, "Content-Type" => "application/json") else raise "unknown method" end if access_token req["Authorization"] = "Bearer #{access_token}" end if !%i(get head).include?(method) && payload req.body = payload.to_json end response = http.request req json_response = JSON.parse(response.body, symbolize_names: true) yield response, json_response if block_given? return response, json_response end end
class Brain require "net/http" def self.request(path:, method: :get, payload: nil, access_token: nil) uri = URI("#{ENV["BRAIN_URL"]}#{path}") http = Net::HTTP.new(uri.host, uri.port) req = case method when :get Net::HTTP::Get.new(uri.path) when :post Net::HTTP::Post.new(uri.path, "Content-Type" => "application/json") else raise "unknown method" end if access_token req["Authorization"] = "Bearer #{access_token}" end if !%i(get head).include?(method) && payload req.body = payload.to_json end response = http.request req json_response = JSON.parse(response.body, symbolize_names: true) if block_given? yield response, json_response else return response, json_response end end end
Return yield result if block was given
Return yield result if block was given
Ruby
apache-2.0
smartbox-io/cell,smartbox-io/cell
ruby
## Code Before: class Brain require "net/http" def self.request(path:, method: :get, payload: nil, access_token: nil) uri = URI("#{ENV["BRAIN_URL"]}#{path}") http = Net::HTTP.new(uri.host, uri.port) req = case method when :get Net::HTTP::Get.new(uri.path) when :post Net::HTTP::Post.new(uri.path, "Content-Type" => "application/json") else raise "unknown method" end if access_token req["Authorization"] = "Bearer #{access_token}" end if !%i(get head).include?(method) && payload req.body = payload.to_json end response = http.request req json_response = JSON.parse(response.body, symbolize_names: true) yield response, json_response if block_given? return response, json_response end end ## Instruction: Return yield result if block was given ## Code After: class Brain require "net/http" def self.request(path:, method: :get, payload: nil, access_token: nil) uri = URI("#{ENV["BRAIN_URL"]}#{path}") http = Net::HTTP.new(uri.host, uri.port) req = case method when :get Net::HTTP::Get.new(uri.path) when :post Net::HTTP::Post.new(uri.path, "Content-Type" => "application/json") else raise "unknown method" end if access_token req["Authorization"] = "Bearer #{access_token}" end if !%i(get head).include?(method) && payload req.body = payload.to_json end response = http.request req json_response = JSON.parse(response.body, symbolize_names: true) if block_given? yield response, json_response else return response, json_response end end end
f0e3c10f63b440e65c4e3225dd6582f1a5bcba8d
doc/release-notes.md
doc/release-notes.md
Bitcoin ABC version 0.24.6 is now available from: <https://download.bitcoinabc.org/0.24.6/> This release includes the following features and fixes:
Bitcoin ABC version 0.24.6 is now available from: <https://download.bitcoinabc.org/0.24.6/> This release includes the following features and fixes: - Add a checkpoint after the November 15th, 2021 eCash upgrade.
Add a release note for the 0.24.6 checkpoint
Add a release note for the 0.24.6 checkpoint Summary: As per title. Test Plan: Read it. Reviewers: #bitcoin_abc, PiRK Reviewed By: #bitcoin_abc, PiRK Differential Revision: https://reviews.bitcoinabc.org/D10477
Markdown
mit
Bitcoin-ABC/bitcoin-abc,Bitcoin-ABC/bitcoin-abc,Bitcoin-ABC/bitcoin-abc,Bitcoin-ABC/bitcoin-abc,Bitcoin-ABC/bitcoin-abc,Bitcoin-ABC/bitcoin-abc,Bitcoin-ABC/bitcoin-abc,Bitcoin-ABC/bitcoin-abc,Bitcoin-ABC/bitcoin-abc,Bitcoin-ABC/bitcoin-abc,Bitcoin-ABC/bitcoin-abc,Bitcoin-ABC/bitcoin-abc,Bitcoin-ABC/bitcoin-abc
markdown
## Code Before: Bitcoin ABC version 0.24.6 is now available from: <https://download.bitcoinabc.org/0.24.6/> This release includes the following features and fixes: ## Instruction: Add a release note for the 0.24.6 checkpoint Summary: As per title. Test Plan: Read it. Reviewers: #bitcoin_abc, PiRK Reviewed By: #bitcoin_abc, PiRK Differential Revision: https://reviews.bitcoinabc.org/D10477 ## Code After: Bitcoin ABC version 0.24.6 is now available from: <https://download.bitcoinabc.org/0.24.6/> This release includes the following features and fixes: - Add a checkpoint after the November 15th, 2021 eCash upgrade.
69f9a0f0032508735cfc05f1ffa1e47612404b81
[email protected]/metadata.json
[email protected]/metadata.json
{ "shell-version": ["3.4", "3.6", "3.7.92", "3.8"], "uuid": "[email protected]", "name": "Icon Hider", "description": "Show/Hide icons from top panel", "url": "https://github.com/ikalnitsky/gnome-shell-extension-icon-hider", "settings-schema": "org.gnome.shell.extensions.icon-hider", "gettext-domain": "org.gnome.shell.extensions.icon-hider", "version": "3" }
{ "shell-version": ["3.4", "3.6", "3.8"], "uuid": "[email protected]", "name": "Icon Hider", "description": "Show/Hide icons from top panel", "url": "https://github.com/ikalnitsky/gnome-shell-extension-icon-hider", "settings-schema": "org.gnome.shell.extensions.icon-hider", "gettext-domain": "org.gnome.shell.extensions.icon-hider", "version": "3" }
Remove Gnome-Shell testing version from metdata.
Remove Gnome-Shell testing version from metdata.
JSON
bsd-3-clause
ikalnitsky/gnome-shell-extension-icon-hider
json
## Code Before: { "shell-version": ["3.4", "3.6", "3.7.92", "3.8"], "uuid": "[email protected]", "name": "Icon Hider", "description": "Show/Hide icons from top panel", "url": "https://github.com/ikalnitsky/gnome-shell-extension-icon-hider", "settings-schema": "org.gnome.shell.extensions.icon-hider", "gettext-domain": "org.gnome.shell.extensions.icon-hider", "version": "3" } ## Instruction: Remove Gnome-Shell testing version from metdata. ## Code After: { "shell-version": ["3.4", "3.6", "3.8"], "uuid": "[email protected]", "name": "Icon Hider", "description": "Show/Hide icons from top panel", "url": "https://github.com/ikalnitsky/gnome-shell-extension-icon-hider", "settings-schema": "org.gnome.shell.extensions.icon-hider", "gettext-domain": "org.gnome.shell.extensions.icon-hider", "version": "3" }
d80b8cdbd0794f7c1a910e7225279763a7a9840c
src/less/partials/_terminal.less
src/less/partials/_terminal.less
.asciinema-terminal { box-sizing: content-box; -moz-box-sizing: content-box; -webkit-box-sizing: content-box; overflow: hidden; padding: 0; margin: 0px; display: block; white-space: pre; border: 0; word-wrap: normal; word-break: normal; border-radius: 0; border-style: solid; cursor: text; border-width: 0.75em; .terminal-font; .line { display: block; width: 200%; .cursor-a { display: inline-block; } .cursor-b { display: none; } .blink { visibility: hidden; } } &.cursor { .line { .cursor-a { display: none; } .cursor-b { display: inline-block; } } } &.blink { .line { .blink { visibility: visible; } } } .bright { font-weight: bold; } .underline { text-decoration: underline; } .italic { font-style: italic; } .strikethrough { text-decoration: line-through; } &.font-small { font-size: 12px; } &.font-medium { font-size: 18px; } &.font-big { font-size: 24px; } }
.asciinema-terminal { box-sizing: content-box; -moz-box-sizing: content-box; -webkit-box-sizing: content-box; overflow: hidden; padding: 0; margin: 0px; display: block; white-space: pre; border: 0; word-wrap: normal; word-break: normal; border-radius: 0; border-style: solid; cursor: text; border-width: 0.75em; .terminal-font; .line { display: block; width: 200%; .cursor-a { display: inline-block; } .cursor-b { display: none; border-radius: 0.05em; } .blink { visibility: hidden; } } &.cursor { .line { .cursor-a { display: none; } .cursor-b { display: inline-block; } } } &.blink { .line { .blink { visibility: visible; } } } .bright { font-weight: bold; } .underline { text-decoration: underline; } .italic { font-style: italic; } .strikethrough { text-decoration: line-through; } &.font-small { font-size: 12px; } &.font-medium { font-size: 18px; } &.font-big { font-size: 24px; } }
Add tiny border-radius to cursor block
Add tiny border-radius to cursor block
Less
apache-2.0
asciinema/asciinema-player,asciinema/asciinema-player
less
## Code Before: .asciinema-terminal { box-sizing: content-box; -moz-box-sizing: content-box; -webkit-box-sizing: content-box; overflow: hidden; padding: 0; margin: 0px; display: block; white-space: pre; border: 0; word-wrap: normal; word-break: normal; border-radius: 0; border-style: solid; cursor: text; border-width: 0.75em; .terminal-font; .line { display: block; width: 200%; .cursor-a { display: inline-block; } .cursor-b { display: none; } .blink { visibility: hidden; } } &.cursor { .line { .cursor-a { display: none; } .cursor-b { display: inline-block; } } } &.blink { .line { .blink { visibility: visible; } } } .bright { font-weight: bold; } .underline { text-decoration: underline; } .italic { font-style: italic; } .strikethrough { text-decoration: line-through; } &.font-small { font-size: 12px; } &.font-medium { font-size: 18px; } &.font-big { font-size: 24px; } } ## Instruction: Add tiny border-radius to cursor block ## Code After: .asciinema-terminal { box-sizing: content-box; -moz-box-sizing: content-box; -webkit-box-sizing: content-box; overflow: hidden; padding: 0; margin: 0px; display: block; white-space: pre; border: 0; word-wrap: normal; word-break: normal; border-radius: 0; border-style: solid; cursor: text; border-width: 0.75em; .terminal-font; .line { display: block; width: 200%; .cursor-a { display: inline-block; } .cursor-b { display: none; border-radius: 0.05em; } .blink { visibility: hidden; } } &.cursor { .line { .cursor-a { display: none; } .cursor-b { display: inline-block; } } } &.blink { .line { .blink { visibility: visible; } } } .bright { font-weight: bold; } .underline { text-decoration: underline; } .italic { font-style: italic; } .strikethrough { text-decoration: line-through; } &.font-small { font-size: 12px; } &.font-medium { font-size: 18px; } &.font-big { font-size: 24px; } }
911fb12a52b7970e78208048dd7452a505e5077f
tables.sql
tables.sql
USE dmarc; CREATE TABLE report ( serial int(10) unsigned NOT NULL AUTO_INCREMENT, date_begin timestamp NOT NULL DEFAULT '0000-00-00 00:00:00', date_end timestamp NOT NULL DEFAULT '0000-00-00 00:00:00', domain varchar(255) NOT NULL, org varchar(255) NOT NULL, report_id varchar(255) NOT NULL, PRIMARY KEY (serial), UNIQUE KEY domain (domain,report_id) ); CREATE TABLE rptrecord ( serial int(10) unsigned NOT NULL, ip varchar(39) NOT NULL, count int(10) unsigned NOT NULL, disposition enum('none','quarantine','reject'), reason varchar(255), dkim_result enum('none','pass','fail','neutral','policy','temperror','permerror'), spf_result enum('none','neutral','pass','fail','softfail','temperror','permerror'), KEY serial (serial,ip) ); CREATE TABLE rptresult ( serial int(10) unsigned NOT NULL, ip varchar(39) NOT NULL, type enum('dkim','spf'), seq int(10) unsigned NOT NULL, domain varchar(255), result enum('none','pass','fail','neutral','policy','temperror','permerror'), KEY serial (serial,ip,type,seq) );
USE dmarc; CREATE TABLE report ( serial int(10) unsigned NOT NULL AUTO_INCREMENT, date_begin timestamp NOT NULL DEFAULT '0000-00-00 00:00:00', date_end timestamp NOT NULL DEFAULT '0000-00-00 00:00:00', domain varchar(255) NOT NULL, org varchar(255) NOT NULL, report_id varchar(255) NOT NULL, PRIMARY KEY (serial), UNIQUE KEY domain (domain,report_id) ); CREATE TABLE rptrecord ( serial int(10) unsigned NOT NULL, ip varchar(39) NOT NULL, count int(10) unsigned NOT NULL, disposition enum('none','quarantine','reject'), reason varchar(255), dkim_result enum('none','pass','fail','neutral','policy','temperror','permerror'), spf_result enum('none','neutral','pass','fail','softfail','temperror','permerror'), KEY serial (serial,ip) ); CREATE TABLE rptresult ( serial int(10) unsigned NOT NULL, ip varchar(39) NOT NULL, type enum('dkim','spf'), seq int(10) unsigned NOT NULL, domain varchar(255), result enum('none','pass','fail','softfail','neutral','policy','temperror','permerror'), KEY serial (serial,ip,type,seq) );
Add softfail to list of result statuses
Add softfail to list of result statuses
SQL
mit
solarissmoke/php-dmarc
sql
## Code Before: USE dmarc; CREATE TABLE report ( serial int(10) unsigned NOT NULL AUTO_INCREMENT, date_begin timestamp NOT NULL DEFAULT '0000-00-00 00:00:00', date_end timestamp NOT NULL DEFAULT '0000-00-00 00:00:00', domain varchar(255) NOT NULL, org varchar(255) NOT NULL, report_id varchar(255) NOT NULL, PRIMARY KEY (serial), UNIQUE KEY domain (domain,report_id) ); CREATE TABLE rptrecord ( serial int(10) unsigned NOT NULL, ip varchar(39) NOT NULL, count int(10) unsigned NOT NULL, disposition enum('none','quarantine','reject'), reason varchar(255), dkim_result enum('none','pass','fail','neutral','policy','temperror','permerror'), spf_result enum('none','neutral','pass','fail','softfail','temperror','permerror'), KEY serial (serial,ip) ); CREATE TABLE rptresult ( serial int(10) unsigned NOT NULL, ip varchar(39) NOT NULL, type enum('dkim','spf'), seq int(10) unsigned NOT NULL, domain varchar(255), result enum('none','pass','fail','neutral','policy','temperror','permerror'), KEY serial (serial,ip,type,seq) ); ## Instruction: Add softfail to list of result statuses ## Code After: USE dmarc; CREATE TABLE report ( serial int(10) unsigned NOT NULL AUTO_INCREMENT, date_begin timestamp NOT NULL DEFAULT '0000-00-00 00:00:00', date_end timestamp NOT NULL DEFAULT '0000-00-00 00:00:00', domain varchar(255) NOT NULL, org varchar(255) NOT NULL, report_id varchar(255) NOT NULL, PRIMARY KEY (serial), UNIQUE KEY domain (domain,report_id) ); CREATE TABLE rptrecord ( serial int(10) unsigned NOT NULL, ip varchar(39) NOT NULL, count int(10) unsigned NOT NULL, disposition enum('none','quarantine','reject'), reason varchar(255), dkim_result enum('none','pass','fail','neutral','policy','temperror','permerror'), spf_result enum('none','neutral','pass','fail','softfail','temperror','permerror'), KEY serial (serial,ip) ); CREATE TABLE rptresult ( serial int(10) unsigned NOT NULL, ip varchar(39) NOT NULL, type enum('dkim','spf'), seq int(10) unsigned NOT NULL, domain varchar(255), result enum('none','pass','fail','softfail','neutral','policy','temperror','permerror'), KEY serial (serial,ip,type,seq) );
cad2867e2de105923d37056d4c2aa4272aa4566f
_Experiment/MyPlayground.playground/Contents.swift
_Experiment/MyPlayground.playground/Contents.swift
//: Playground - noun: a place where people can play import Foundation struct myStruct { } func switchOnAny(any: Any) -> String { println("Dynamic Type == \(any.dynamicType)") switch any { case let array as [Any]: return "Array" case let array as NSArray: return "NSArray" default: return "Default" } } let emptyStringArray : [String] = [] let stringArray : [String] = ["Bob", "Roger"] let intArray = [1, 2, 3] let customStructArray : [myStruct] = [] println("\t\touput : \(switchOnAny([]))") println("\t\touput : \(switchOnAny(emptyStringArray))") println("\t\touput : \(switchOnAny(stringArray))") println("\t\touput : \(switchOnAny(intArray))") println("\t\touput : \(switchOnAny(customStructArray))")
//: Playground - noun: a place where people can play import Foundation //protocol ArrayType {} //extension Array : ArrayType {} //extension NSArray : ArrayType {} struct myStruct { } func switchOnAny(any: Any) -> String { println("Dynamic Type == \(any.dynamicType)") switch any { // case let array as ArrayType: // return "Stack" case let array as [Any]: return "Array" case let array as NSArray: return "NSArray" default: return "Default" } } let emptyStringArray : [String] = [] let stringArray : [String] = ["Bob", "Roger"] let intArray = [1, 2, 3] let customStructArray : [myStruct] = [] println("\t\touput : \(switchOnAny([]))") println("\t\touput : \(switchOnAny(emptyStringArray))") println("\t\touput : \(switchOnAny(stringArray))") println("\t\touput : \(switchOnAny(intArray))") println("\t\touput : \(switchOnAny(customStructArray))")
Add more experimentation in playground
Add more experimentation in playground
Swift
mit
VinceBurn/SwiftyPlist,VinceBurn/SwiftyPlist
swift
## Code Before: //: Playground - noun: a place where people can play import Foundation struct myStruct { } func switchOnAny(any: Any) -> String { println("Dynamic Type == \(any.dynamicType)") switch any { case let array as [Any]: return "Array" case let array as NSArray: return "NSArray" default: return "Default" } } let emptyStringArray : [String] = [] let stringArray : [String] = ["Bob", "Roger"] let intArray = [1, 2, 3] let customStructArray : [myStruct] = [] println("\t\touput : \(switchOnAny([]))") println("\t\touput : \(switchOnAny(emptyStringArray))") println("\t\touput : \(switchOnAny(stringArray))") println("\t\touput : \(switchOnAny(intArray))") println("\t\touput : \(switchOnAny(customStructArray))") ## Instruction: Add more experimentation in playground ## Code After: //: Playground - noun: a place where people can play import Foundation //protocol ArrayType {} //extension Array : ArrayType {} //extension NSArray : ArrayType {} struct myStruct { } func switchOnAny(any: Any) -> String { println("Dynamic Type == \(any.dynamicType)") switch any { // case let array as ArrayType: // return "Stack" case let array as [Any]: return "Array" case let array as NSArray: return "NSArray" default: return "Default" } } let emptyStringArray : [String] = [] let stringArray : [String] = ["Bob", "Roger"] let intArray = [1, 2, 3] let customStructArray : [myStruct] = [] println("\t\touput : \(switchOnAny([]))") println("\t\touput : \(switchOnAny(emptyStringArray))") println("\t\touput : \(switchOnAny(stringArray))") println("\t\touput : \(switchOnAny(intArray))") println("\t\touput : \(switchOnAny(customStructArray))")
c2859bd8da741862ee01a276a1350fb4a5931dbc
data_access.py
data_access.py
import sys import mysql.connector def insert(): cursor = connection.cursor() try: cursor.execute("drop table employees") except: pass cursor.execute("create table employees (id integer primary key, name text)") cursor.close() print("Inserting employees...") for n in xrange(0, 10000): cursor = connection.cursor() cursor.execute("insert into employees (id, name) values (%d, 'Employee_%d')" % (n, n)) connection.commit() cursor.close() def select(): print("Selecting employees...") while True: cursor = connection.cursor() cursor.execute("select * from employees where name like '%1417773'") for row in cursor: pass cursor.close() connection = mysql.connector.connect(host='localhost', database='test') if "insert" in sys.argv: while True: insert() elif "insert_once" in sys.argv: insert() elif "select" in sys.argv: select() else: print("USAGE: data_access.py <insert|insert_once|select>") connection.close()
from random import randint import sys import mysql.connector NUM_EMPLOYEES = 10000 def insert(): cursor = connection.cursor() try: cursor.execute("drop table employees") except: pass cursor.execute("create table employees (id integer primary key, name text)") cursor.close() print("Inserting employees...") for n in xrange(0, NUM_EMPLOYEES): cursor = connection.cursor() cursor.execute("insert into employees (id, name) values (%d, 'Employee_%d')" % (n, n)) connection.commit() cursor.close() def select(): print("Selecting employees...") while True: cursor = connection.cursor() cursor.execute("select * from employees where name like '%%%d'" % randint(0, NUM_EMPLOYEES)) for row in cursor: pass cursor.close() connection = mysql.connector.connect(host='localhost', database='test') if "insert" in sys.argv: while True: insert() elif "insert_once" in sys.argv: insert() elif "select" in sys.argv: select() else: print("USAGE: data_access.py <insert|insert_once|select>") connection.close()
Change data access script to issue SELECTs that actually return a value
Change data access script to issue SELECTs that actually return a value This makes the part about tracing the SQL statements and tracing the number of rows returned a little more interesting.
Python
mit
goldshtn/linux-tracing-workshop,goldshtn/linux-tracing-workshop,goldshtn/linux-tracing-workshop,goldshtn/linux-tracing-workshop,goldshtn/linux-tracing-workshop,goldshtn/linux-tracing-workshop,goldshtn/linux-tracing-workshop,goldshtn/linux-tracing-workshop
python
## Code Before: import sys import mysql.connector def insert(): cursor = connection.cursor() try: cursor.execute("drop table employees") except: pass cursor.execute("create table employees (id integer primary key, name text)") cursor.close() print("Inserting employees...") for n in xrange(0, 10000): cursor = connection.cursor() cursor.execute("insert into employees (id, name) values (%d, 'Employee_%d')" % (n, n)) connection.commit() cursor.close() def select(): print("Selecting employees...") while True: cursor = connection.cursor() cursor.execute("select * from employees where name like '%1417773'") for row in cursor: pass cursor.close() connection = mysql.connector.connect(host='localhost', database='test') if "insert" in sys.argv: while True: insert() elif "insert_once" in sys.argv: insert() elif "select" in sys.argv: select() else: print("USAGE: data_access.py <insert|insert_once|select>") connection.close() ## Instruction: Change data access script to issue SELECTs that actually return a value This makes the part about tracing the SQL statements and tracing the number of rows returned a little more interesting. ## Code After: from random import randint import sys import mysql.connector NUM_EMPLOYEES = 10000 def insert(): cursor = connection.cursor() try: cursor.execute("drop table employees") except: pass cursor.execute("create table employees (id integer primary key, name text)") cursor.close() print("Inserting employees...") for n in xrange(0, NUM_EMPLOYEES): cursor = connection.cursor() cursor.execute("insert into employees (id, name) values (%d, 'Employee_%d')" % (n, n)) connection.commit() cursor.close() def select(): print("Selecting employees...") while True: cursor = connection.cursor() cursor.execute("select * from employees where name like '%%%d'" % randint(0, NUM_EMPLOYEES)) for row in cursor: pass cursor.close() connection = mysql.connector.connect(host='localhost', database='test') if "insert" in sys.argv: while True: insert() elif "insert_once" in sys.argv: insert() elif "select" in sys.argv: select() else: print("USAGE: data_access.py <insert|insert_once|select>") connection.close()