commit stringlengths 40 40 | old_file stringlengths 4 184 | new_file stringlengths 4 184 | old_contents stringlengths 1 3.6k | new_contents stringlengths 5 3.38k | subject stringlengths 15 778 | message stringlengths 16 6.74k | lang stringclasses 201 values | license stringclasses 13 values | repos stringlengths 6 116k | config stringclasses 201 values | content stringlengths 137 7.24k | diff stringlengths 26 5.55k | diff_length int64 1 123 | relative_diff_length float64 0.01 89 | n_lines_added int64 0 108 | n_lines_deleted int64 0 106 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f768c1f1925bfb84d4fdddf6537b749d3776b590 | main.go | main.go | package main
import (
"fmt"
"log"
"net/http"
"path/filepath"
"github.com/docopt/docopt.go"
)
func main() {
arguments, _ := docopt.Parse(usage(), nil, true, "0.1", false)
port := arguments["--port"].(string)
path, _ := filepath.Abs(arguments["<directory>"].(string))
start(path, port)
}
func usage() string {
return `Static Web Server
This tool serves static files in the given directory through http on localhost over the given port number (e.g 5000 by default)
Usage:
staticws <directory> [--port=N]
staticws -h | --help
staticws --version
Options:
-h --help Show this screen.
--version Show version.
--port=N Web server port number [default: 5000].`
}
func start(path, port string) {
log.Println("Serving files from", path)
log.Println("Listening on port", port)
panic(http.ListenAndServe(fmt.Sprintf(":%v", port), http.FileServer(http.Dir(path))))
}
| package main
import (
"fmt"
"log"
"net/http"
"path/filepath"
"github.com/docopt/docopt.go"
)
func main() {
arguments, _ := docopt.Parse(usage(), nil, true, "0.1", false)
port := arguments["--port"].(string)
path, _ := filepath.Abs(arguments["<directory>"].(string))
start(path, port)
}
func usage() string {
return `Static Web Server
This tool serves static files in the given directory through http on localhost over the given port number (e.g 5000 by default)
Usage:
staticws <directory> [--port=N]
staticws -h | --help
staticws --version
Options:
-h --help Show this screen.
--version Show version.
--port=N Web server port number [default: 5000].`
}
func start(path, port string) {
log.Println("Serving files from", path)
log.Println("Listening on port", port)
http.Handle("/", http.FileServer(http.Dir(path)))
panic(http.ListenAndServe(fmt.Sprintf(":%v", port), Log(http.DefaultServeMux)))
}
func Log(handler http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
log.Printf("%s %s %s", r.RemoteAddr, r.Method, r.URL)
handler.ServeHTTP(w, r)
})
}
| Add logging of request to stdout | Add logging of request to stdout
| Go | mit | lminaudier/sws | go | ## Code Before:
package main
import (
"fmt"
"log"
"net/http"
"path/filepath"
"github.com/docopt/docopt.go"
)
func main() {
arguments, _ := docopt.Parse(usage(), nil, true, "0.1", false)
port := arguments["--port"].(string)
path, _ := filepath.Abs(arguments["<directory>"].(string))
start(path, port)
}
func usage() string {
return `Static Web Server
This tool serves static files in the given directory through http on localhost over the given port number (e.g 5000 by default)
Usage:
staticws <directory> [--port=N]
staticws -h | --help
staticws --version
Options:
-h --help Show this screen.
--version Show version.
--port=N Web server port number [default: 5000].`
}
func start(path, port string) {
log.Println("Serving files from", path)
log.Println("Listening on port", port)
panic(http.ListenAndServe(fmt.Sprintf(":%v", port), http.FileServer(http.Dir(path))))
}
## Instruction:
Add logging of request to stdout
## Code After:
package main
import (
"fmt"
"log"
"net/http"
"path/filepath"
"github.com/docopt/docopt.go"
)
func main() {
arguments, _ := docopt.Parse(usage(), nil, true, "0.1", false)
port := arguments["--port"].(string)
path, _ := filepath.Abs(arguments["<directory>"].(string))
start(path, port)
}
func usage() string {
return `Static Web Server
This tool serves static files in the given directory through http on localhost over the given port number (e.g 5000 by default)
Usage:
staticws <directory> [--port=N]
staticws -h | --help
staticws --version
Options:
-h --help Show this screen.
--version Show version.
--port=N Web server port number [default: 5000].`
}
func start(path, port string) {
log.Println("Serving files from", path)
log.Println("Listening on port", port)
http.Handle("/", http.FileServer(http.Dir(path)))
panic(http.ListenAndServe(fmt.Sprintf(":%v", port), Log(http.DefaultServeMux)))
}
func Log(handler http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
log.Printf("%s %s %s", r.RemoteAddr, r.Method, r.URL)
handler.ServeHTTP(w, r)
})
}
| package main
import (
"fmt"
"log"
"net/http"
"path/filepath"
"github.com/docopt/docopt.go"
)
func main() {
arguments, _ := docopt.Parse(usage(), nil, true, "0.1", false)
port := arguments["--port"].(string)
path, _ := filepath.Abs(arguments["<directory>"].(string))
start(path, port)
}
func usage() string {
return `Static Web Server
This tool serves static files in the given directory through http on localhost over the given port number (e.g 5000 by default)
Usage:
staticws <directory> [--port=N]
staticws -h | --help
staticws --version
Options:
-h --help Show this screen.
--version Show version.
--port=N Web server port number [default: 5000].`
}
func start(path, port string) {
log.Println("Serving files from", path)
log.Println("Listening on port", port)
+
+ http.Handle("/", http.FileServer(http.Dir(path)))
- panic(http.ListenAndServe(fmt.Sprintf(":%v", port), http.FileServer(http.Dir(path))))
? ^^ ^^^^^^^^^^^^^^^ ^ ^^^^^ -
+ panic(http.ListenAndServe(fmt.Sprintf(":%v", port), Log(http.DefaultServeMux)))
? ^ ^^^ ^^^^^^^^ ^^^^^
}
+
+ func Log(handler http.Handler) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ log.Printf("%s %s %s", r.RemoteAddr, r.Method, r.URL)
+ handler.ServeHTTP(w, r)
+ })
+ }
+ | 12 | 0.292683 | 11 | 1 |
3c984efae90ff1e5a3a38494c7174d5add822fbf | app/views/cms/resources/_form.html.haml | app/views/cms/resources/_form.html.haml | = form_for @entry do |f|
- @attributes_for_form.each do |attribute|
%fieldset.form-group
= f.label @resource.human_attribute_name(attribute.name)
= render attribute.form_partial_path, f: f, entry: @entry, attribute: attribute
%fieldset.form-group
= f.submit 'Submit', class: 'btn btn-primary'
= link_to 'Back', send("#{ @resource.model_name.route_key }_path"),
class: 'btn btn-secondary-outline'
- if @entry.persisted?
= link_to 'Delete', send("#{ @resource.model_name.element }_path", @entry),
class: 'btn btn-danger-outline pull-md-right', method: :delete,
data: { confirm: 'Are you sure?' }
| = form_for @entry do |f|
- @attributes_for_form.each do |attribute|
%fieldset.form-group
= f.label @resource.human_attribute_name(attribute.name)
= render attribute.form_partial_path, f: f, entry: @entry, attribute: attribute
%fieldset.form-group
= f.submit 'Submit', class: 'btn btn-primary'
= link_to 'Back', polymorphic_path(@resource),
class: 'btn btn-secondary-outline'
- if @entry.persisted?
= link_to 'Delete', polymorphic_path(@entry),
class: 'btn btn-danger-outline pull-md-right', method: :delete,
data: { confirm: 'Are you sure?' }
| Fix back and delete links for resources form | Fix back and delete links for resources form
| Haml | mit | droptheplot/adminable,droptheplot/adminable,droptheplot/adminable | haml | ## Code Before:
= form_for @entry do |f|
- @attributes_for_form.each do |attribute|
%fieldset.form-group
= f.label @resource.human_attribute_name(attribute.name)
= render attribute.form_partial_path, f: f, entry: @entry, attribute: attribute
%fieldset.form-group
= f.submit 'Submit', class: 'btn btn-primary'
= link_to 'Back', send("#{ @resource.model_name.route_key }_path"),
class: 'btn btn-secondary-outline'
- if @entry.persisted?
= link_to 'Delete', send("#{ @resource.model_name.element }_path", @entry),
class: 'btn btn-danger-outline pull-md-right', method: :delete,
data: { confirm: 'Are you sure?' }
## Instruction:
Fix back and delete links for resources form
## Code After:
= form_for @entry do |f|
- @attributes_for_form.each do |attribute|
%fieldset.form-group
= f.label @resource.human_attribute_name(attribute.name)
= render attribute.form_partial_path, f: f, entry: @entry, attribute: attribute
%fieldset.form-group
= f.submit 'Submit', class: 'btn btn-primary'
= link_to 'Back', polymorphic_path(@resource),
class: 'btn btn-secondary-outline'
- if @entry.persisted?
= link_to 'Delete', polymorphic_path(@entry),
class: 'btn btn-danger-outline pull-md-right', method: :delete,
data: { confirm: 'Are you sure?' }
| = form_for @entry do |f|
- @attributes_for_form.each do |attribute|
%fieldset.form-group
= f.label @resource.human_attribute_name(attribute.name)
= render attribute.form_partial_path, f: f, entry: @entry, attribute: attribute
%fieldset.form-group
= f.submit 'Submit', class: 'btn btn-primary'
- = link_to 'Back', send("#{ @resource.model_name.route_key }_path"),
+ = link_to 'Back', polymorphic_path(@resource),
class: 'btn btn-secondary-outline'
- if @entry.persisted?
- = link_to 'Delete', send("#{ @resource.model_name.element }_path", @entry),
+ = link_to 'Delete', polymorphic_path(@entry),
class: 'btn btn-danger-outline pull-md-right', method: :delete,
data: { confirm: 'Are you sure?' } | 4 | 0.307692 | 2 | 2 |
f34c27bf5e15cebc46b0a72c6313d9f0a88aebe6 | src/typography/tables.sass | src/typography/tables.sass | @import "bourbon/bourbon"
@import "vars/component"
@import "./fonts"
@import "util/exports"
+exports("typography/tables")
table
+font-caption(delta, 0, 0)
th, td
padding-left: rythm(0.5)
padding-right: rythm(0.5)
th
+font-caption(gamma)
border-bottom: 3px solid #ddd
td:not(:first-child):not(:last-child)
border-left: 1px solid #ddd
border-right: 1px solid #ddd
| @import "bourbon/bourbon"
@import "vars/component"
@import "./fonts"
@import "util/exports"
+exports("typography/tables")
table
+font-caption(delta, 0, 0)
th, td
padding-left: rythm(0.5)
padding-right: rythm(0.5)
th
+font-caption(gamma)
border-bottom: 3px solid #ddd
td
border-right: 1px solid #ddd
td:last-child)
border-right: none
| Fix table borders for when there are only two columns. | Fix table borders for when there are only two columns.
| Sass | agpl-3.0 | JoshuaToenyes/stout-ui,JoshuaToenyes/stout-ui,JoshuaToenyes/stout-ui | sass | ## Code Before:
@import "bourbon/bourbon"
@import "vars/component"
@import "./fonts"
@import "util/exports"
+exports("typography/tables")
table
+font-caption(delta, 0, 0)
th, td
padding-left: rythm(0.5)
padding-right: rythm(0.5)
th
+font-caption(gamma)
border-bottom: 3px solid #ddd
td:not(:first-child):not(:last-child)
border-left: 1px solid #ddd
border-right: 1px solid #ddd
## Instruction:
Fix table borders for when there are only two columns.
## Code After:
@import "bourbon/bourbon"
@import "vars/component"
@import "./fonts"
@import "util/exports"
+exports("typography/tables")
table
+font-caption(delta, 0, 0)
th, td
padding-left: rythm(0.5)
padding-right: rythm(0.5)
th
+font-caption(gamma)
border-bottom: 3px solid #ddd
td
border-right: 1px solid #ddd
td:last-child)
border-right: none
| @import "bourbon/bourbon"
@import "vars/component"
@import "./fonts"
@import "util/exports"
+exports("typography/tables")
table
+font-caption(delta, 0, 0)
th, td
padding-left: rythm(0.5)
padding-right: rythm(0.5)
th
+font-caption(gamma)
border-bottom: 3px solid #ddd
+ td
- td:not(:first-child):not(:last-child)
- border-left: 1px solid #ddd
border-right: 1px solid #ddd
+
+ td:last-child)
+ border-right: none | 6 | 0.26087 | 4 | 2 |
049be916ac36b93cbd76411ac990d15eeb80632d | public/style/mixins.styl | public/style/mixins.styl | clearfix()
zoom 1
&:before, &:after
content '.'
display block
height: 0
overflow hidden
&:after
clear both
invisible()
height 0
line-height 0
font-size 0
color transparent
overflow hidden
vertical-gradient(from, to)
background-image -webkit-gradient(linear, left top, left bottom, color-stop(0, from), color-stop(1, to))
background-image -webkit-linear-gradient(top, from 0%, to 100%)
background-image -moz-linear-gradient(top, from 0%, to 100%)
background-image linear-gradient(top, from 0%, to 100%)
border-radius()
-webkit-border-radius arguments
-moz-border-radius arguments
border-radius arguments
| clearfix()
zoom 1
&:before, &:after
content '.'
display block
height: 0
overflow hidden
&:after
clear both
invisible()
height 0
line-height 0
font-size 0
color transparent
overflow hidden
vertical-gradient(from, to)
background-image -webkit-gradient(linear, left top, left bottom, color-stop(0, from), color-stop(1, to))
background-image -webkit-linear-gradient(top, from 0%, to 100%)
background-image -moz-linear-gradient(top, from 0%, to 100%)
background-image linear-gradient(top, from 0%, to 100%)
box-shadow()
-webkit-box-shadow arguments
-moz-box-shadow arguments
box-shadow arguments
border-radius()
-webkit-border-radius arguments
-moz-border-radius arguments
border-radius arguments
| Add drop shadow to menu. | Add drop shadow to menu.
| Stylus | mit | jared-stilwell/JSComplexity.org,philbooth/JSComplexity.org,jared-stilwell/JSComplexity.org,philbooth/JSComplexity.org,jared-stilwell/JSComplexity.org | stylus | ## Code Before:
clearfix()
zoom 1
&:before, &:after
content '.'
display block
height: 0
overflow hidden
&:after
clear both
invisible()
height 0
line-height 0
font-size 0
color transparent
overflow hidden
vertical-gradient(from, to)
background-image -webkit-gradient(linear, left top, left bottom, color-stop(0, from), color-stop(1, to))
background-image -webkit-linear-gradient(top, from 0%, to 100%)
background-image -moz-linear-gradient(top, from 0%, to 100%)
background-image linear-gradient(top, from 0%, to 100%)
border-radius()
-webkit-border-radius arguments
-moz-border-radius arguments
border-radius arguments
## Instruction:
Add drop shadow to menu.
## Code After:
clearfix()
zoom 1
&:before, &:after
content '.'
display block
height: 0
overflow hidden
&:after
clear both
invisible()
height 0
line-height 0
font-size 0
color transparent
overflow hidden
vertical-gradient(from, to)
background-image -webkit-gradient(linear, left top, left bottom, color-stop(0, from), color-stop(1, to))
background-image -webkit-linear-gradient(top, from 0%, to 100%)
background-image -moz-linear-gradient(top, from 0%, to 100%)
background-image linear-gradient(top, from 0%, to 100%)
box-shadow()
-webkit-box-shadow arguments
-moz-box-shadow arguments
box-shadow arguments
border-radius()
-webkit-border-radius arguments
-moz-border-radius arguments
border-radius arguments
| clearfix()
zoom 1
&:before, &:after
content '.'
display block
height: 0
overflow hidden
&:after
clear both
invisible()
height 0
line-height 0
font-size 0
color transparent
overflow hidden
vertical-gradient(from, to)
background-image -webkit-gradient(linear, left top, left bottom, color-stop(0, from), color-stop(1, to))
background-image -webkit-linear-gradient(top, from 0%, to 100%)
background-image -moz-linear-gradient(top, from 0%, to 100%)
background-image linear-gradient(top, from 0%, to 100%)
+ box-shadow()
+ -webkit-box-shadow arguments
+ -moz-box-shadow arguments
+ box-shadow arguments
+
border-radius()
-webkit-border-radius arguments
-moz-border-radius arguments
border-radius arguments | 5 | 0.185185 | 5 | 0 |
fb083580920b89bda2cbfc0d3be5ad5c2a0ca074 | src/jwt-request.js | src/jwt-request.js | var JWTHelper = require('./jwt-helper');
var JWTConfig = require('./jwt-config');
var JWTRequest = {
setAuthorizationHeader(options, token) {
if (!options.headers) options.headers = {};
options.headers[JWTConfig.authHeader] = `${JWTConfig.authPrefix} ${token}`;
return options;
},
handleUnAuthorizedFetch(url, options) {
return fetch(url, options).then((response) => response.json());
},
handleAuthorizedFetch(url, options) {
return new Promise((resolve, reject) => {
JWTHelper.getToken().then((token) => {
if (token && !JWTHelper.isTokenExpired(token)) {
options = this.setAuthorizationHeader(options, token);
fetch(url, options).then((response) => {
resolve(response.json())
});
} else {
reject('Token is either not valid or has expired.');
}
})
})
},
fetch(url, options, skipAuthorization) {
options = options || {};
if (skipAuthorization) {
return this.handleUnAuthorizedFetch(url, options);
} else {
return this.handleAuthorizedFetch(url, options);
}
}
};
module.exports = JWTRequest; | var JWTHelper = require('./jwt-helper');
var JWTConfig = require('./jwt-config');
var JWTRequest = {
setAuthorizationHeader(options, token) {
if (!options.headers) options.headers = {};
options.headers[JWTConfig.authHeader] = `${JWTConfig.authPrefix} ${token}`;
return options;
},
handleUnAuthorizedFetch(url, options) {
return fetch(url, options).then((response) => response.json());
},
handleAuthorizedFetch(url, options) {
return new Promise((resolve, reject) => {
JWTHelper.getToken().then((token) => {
if (token && !JWTHelper.isTokenExpired(token)) {
options = this.setAuthorizationHeader(options, token);
fetch(url, options).then((response) => {
resolve(response.json())
});
} else {
reject('Token is either not valid or has expired.');
}
})
})
},
fetch(url, options) {
options = options || {};
if (options.skipAuthorization) {
return this.handleUnAuthorizedFetch(url, options);
} else {
return this.handleAuthorizedFetch(url, options);
}
}
};
module.exports = JWTRequest; | Add skipAuthorization to options instead of a variable in our method | Add skipAuthorization to options instead of a variable in our method | JavaScript | mit | iDay/react-native-http,iktw/react-native-http | javascript | ## Code Before:
var JWTHelper = require('./jwt-helper');
var JWTConfig = require('./jwt-config');
var JWTRequest = {
setAuthorizationHeader(options, token) {
if (!options.headers) options.headers = {};
options.headers[JWTConfig.authHeader] = `${JWTConfig.authPrefix} ${token}`;
return options;
},
handleUnAuthorizedFetch(url, options) {
return fetch(url, options).then((response) => response.json());
},
handleAuthorizedFetch(url, options) {
return new Promise((resolve, reject) => {
JWTHelper.getToken().then((token) => {
if (token && !JWTHelper.isTokenExpired(token)) {
options = this.setAuthorizationHeader(options, token);
fetch(url, options).then((response) => {
resolve(response.json())
});
} else {
reject('Token is either not valid or has expired.');
}
})
})
},
fetch(url, options, skipAuthorization) {
options = options || {};
if (skipAuthorization) {
return this.handleUnAuthorizedFetch(url, options);
} else {
return this.handleAuthorizedFetch(url, options);
}
}
};
module.exports = JWTRequest;
## Instruction:
Add skipAuthorization to options instead of a variable in our method
## Code After:
var JWTHelper = require('./jwt-helper');
var JWTConfig = require('./jwt-config');
var JWTRequest = {
setAuthorizationHeader(options, token) {
if (!options.headers) options.headers = {};
options.headers[JWTConfig.authHeader] = `${JWTConfig.authPrefix} ${token}`;
return options;
},
handleUnAuthorizedFetch(url, options) {
return fetch(url, options).then((response) => response.json());
},
handleAuthorizedFetch(url, options) {
return new Promise((resolve, reject) => {
JWTHelper.getToken().then((token) => {
if (token && !JWTHelper.isTokenExpired(token)) {
options = this.setAuthorizationHeader(options, token);
fetch(url, options).then((response) => {
resolve(response.json())
});
} else {
reject('Token is either not valid or has expired.');
}
})
})
},
fetch(url, options) {
options = options || {};
if (options.skipAuthorization) {
return this.handleUnAuthorizedFetch(url, options);
} else {
return this.handleAuthorizedFetch(url, options);
}
}
};
module.exports = JWTRequest; | var JWTHelper = require('./jwt-helper');
var JWTConfig = require('./jwt-config');
var JWTRequest = {
setAuthorizationHeader(options, token) {
if (!options.headers) options.headers = {};
options.headers[JWTConfig.authHeader] = `${JWTConfig.authPrefix} ${token}`;
return options;
},
handleUnAuthorizedFetch(url, options) {
return fetch(url, options).then((response) => response.json());
},
handleAuthorizedFetch(url, options) {
return new Promise((resolve, reject) => {
JWTHelper.getToken().then((token) => {
if (token && !JWTHelper.isTokenExpired(token)) {
options = this.setAuthorizationHeader(options, token);
fetch(url, options).then((response) => {
resolve(response.json())
});
} else {
reject('Token is either not valid or has expired.');
}
})
})
},
- fetch(url, options, skipAuthorization) {
+ fetch(url, options) {
options = options || {};
- if (skipAuthorization) {
+ if (options.skipAuthorization) {
? ++++++++
return this.handleUnAuthorizedFetch(url, options);
} else {
return this.handleAuthorizedFetch(url, options);
}
}
};
module.exports = JWTRequest; | 4 | 0.095238 | 2 | 2 |
e0d2b4014996f0dea7a9cf0fe45888f460e6826a | labs/spring_2016_update.md | labs/spring_2016_update.md | Hi {lab_director_name}!
This past year has been exciting, and it was amazing working together with you!
I know that we've certainly learned a lot.
In the next two months, Hack Club is focusing on growth. It's time to onboard
the 50+ clubs on our waitlist! Because of this, we're focusing on getting the
early workshops right (i.e. workshop 1 and 2 and such). This means that we might
not have curriculum that's targeted for clubs like yours which are already well
past workshops 1 and 2. We are comfortable with doing this because we know
you'll be able to figure it out with each other!
For these new clubs, we're putting together a [Hack Club Guide]
(https://github.com/hackclub/hackclub/blob/master/clubs/README.md).
We'd love your feedback on this guide here:
[here](https://github.com/hackclub/hackclub/commit/f418eea0e904938e0765f73d58216710d936a1f7).
You are Hack Club's inaugural class! It is now time for you to help the next
generation of club leaders. Make them feel welcome---make the Slack feel like
it's their home. They'll be reaching out for questions---go out of your way to
help them out!
Let's make 2016 amazing!
Jonathan, Zach
| Subject: Hack Club Semester 2 Update
Hi everyone!
We've learned and failed a tremendous amount this past semester. All of you
have been incredibly supportive and both of us feel so privileged to have the
opportunity to work with each and every one of you.
In the next two months, Hack Club is focusing on growth. Our goal is to start
50 clubs and increase our weekly attendance by 750 students by the end of
February. As a result of this, we're focusing on rewriting our early workshops
and standardizing the beginning of clubs ([click here][club_guide] for an
example of what we're working on). This means that we won't be building
curriculum for clubs like yours at the pace we did last semester and we may
not have new workshops in time for all of your meetings while we're still
early into this semester.
Your work this past semester has led us to where we are today and we'll only
be able to continue the movement with your continued involvement. As we
onboard new clubs, the best way for you to contribute is to welcome and
support new club leaders. Make the Slack feel like it's their home. We're
looking to pair existing clubs with new clubs to start the mentorship early.
If you are interested in being a mentor to new clubs, please reply directly to
this email to let us know.
Let's make 2016 amazing!
Jonathan & Zach
[club_guide]: https://github.com/hackclub/hackclub/blob/master/clubs/README.md
| Add 2016 spring semester update for club leaders | Add 2016 spring semester update for club leaders
| Markdown | mit | hackedu/meta,ssun098/meta,hackedu/meta,ssun098/meta | markdown | ## Code Before:
Hi {lab_director_name}!
This past year has been exciting, and it was amazing working together with you!
I know that we've certainly learned a lot.
In the next two months, Hack Club is focusing on growth. It's time to onboard
the 50+ clubs on our waitlist! Because of this, we're focusing on getting the
early workshops right (i.e. workshop 1 and 2 and such). This means that we might
not have curriculum that's targeted for clubs like yours which are already well
past workshops 1 and 2. We are comfortable with doing this because we know
you'll be able to figure it out with each other!
For these new clubs, we're putting together a [Hack Club Guide]
(https://github.com/hackclub/hackclub/blob/master/clubs/README.md).
We'd love your feedback on this guide here:
[here](https://github.com/hackclub/hackclub/commit/f418eea0e904938e0765f73d58216710d936a1f7).
You are Hack Club's inaugural class! It is now time for you to help the next
generation of club leaders. Make them feel welcome---make the Slack feel like
it's their home. They'll be reaching out for questions---go out of your way to
help them out!
Let's make 2016 amazing!
Jonathan, Zach
## Instruction:
Add 2016 spring semester update for club leaders
## Code After:
Subject: Hack Club Semester 2 Update
Hi everyone!
We've learned and failed a tremendous amount this past semester. All of you
have been incredibly supportive and both of us feel so privileged to have the
opportunity to work with each and every one of you.
In the next two months, Hack Club is focusing on growth. Our goal is to start
50 clubs and increase our weekly attendance by 750 students by the end of
February. As a result of this, we're focusing on rewriting our early workshops
and standardizing the beginning of clubs ([click here][club_guide] for an
example of what we're working on). This means that we won't be building
curriculum for clubs like yours at the pace we did last semester and we may
not have new workshops in time for all of your meetings while we're still
early into this semester.
Your work this past semester has led us to where we are today and we'll only
be able to continue the movement with your continued involvement. As we
onboard new clubs, the best way for you to contribute is to welcome and
support new club leaders. Make the Slack feel like it's their home. We're
looking to pair existing clubs with new clubs to start the mentorship early.
If you are interested in being a mentor to new clubs, please reply directly to
this email to let us know.
Let's make 2016 amazing!
Jonathan & Zach
[club_guide]: https://github.com/hackclub/hackclub/blob/master/clubs/README.md
| - Hi {lab_director_name}!
+ Subject: Hack Club Semester 2 Update
+ Hi everyone!
- This past year has been exciting, and it was amazing working together with you!
- I know that we've certainly learned a lot.
+ We've learned and failed a tremendous amount this past semester. All of you
+ have been incredibly supportive and both of us feel so privileged to have the
+ opportunity to work with each and every one of you.
- In the next two months, Hack Club is focusing on growth. It's time to onboard
- the 50+ clubs on our waitlist! Because of this, we're focusing on getting the
- early workshops right (i.e. workshop 1 and 2 and such). This means that we might
- not have curriculum that's targeted for clubs like yours which are already well
- past workshops 1 and 2. We are comfortable with doing this because we know
- you'll be able to figure it out with each other!
- For these new clubs, we're putting together a [Hack Club Guide]
- (https://github.com/hackclub/hackclub/blob/master/clubs/README.md).
- We'd love your feedback on this guide here:
- [here](https://github.com/hackclub/hackclub/commit/f418eea0e904938e0765f73d58216710d936a1f7).
+ In the next two months, Hack Club is focusing on growth. Our goal is to start
+ 50 clubs and increase our weekly attendance by 750 students by the end of
+ February. As a result of this, we're focusing on rewriting our early workshops
+ and standardizing the beginning of clubs ([click here][club_guide] for an
+ example of what we're working on). This means that we won't be building
+ curriculum for clubs like yours at the pace we did last semester and we may
+ not have new workshops in time for all of your meetings while we're still
+ early into this semester.
- You are Hack Club's inaugural class! It is now time for you to help the next
- generation of club leaders. Make them feel welcome---make the Slack feel like
- it's their home. They'll be reaching out for questions---go out of your way to
- help them out!
+ Your work this past semester has led us to where we are today and we'll only
+ be able to continue the movement with your continued involvement. As we
+ onboard new clubs, the best way for you to contribute is to welcome and
+ support new club leaders. Make the Slack feel like it's their home. We're
+ looking to pair existing clubs with new clubs to start the mentorship early.
+ If you are interested in being a mentor to new clubs, please reply directly to
+ this email to let us know.
- Let's make 2016 amazing!
+ Let's make 2016 amazing!
? ++
- Jonathan, Zach
? ^
+ Jonathan & Zach
? ^^
+
+ [club_guide]: https://github.com/hackclub/hackclub/blob/master/clubs/README.md | 43 | 1.791667 | 24 | 19 |
a83202db9da5280243f0ade3f0932e9e75972fae | .sh/10-core-commands.sh | .sh/10-core-commands.sh |
if ! command -v realpath >/dev/null 2>&1; then
realpath() {
printf %s "$1" | command grep -q '^/' && printf %s "$1" || printf %s "$PWD/${1#./}"
}
fi
|
if ! which realpath >/dev/null 2>&1; then
realpath() {
printf %s "$1" | command grep -q '^/' && printf %s "$1" || printf %s "$PWD/${1#./}"
}
fi
| Replace realpath only if previously undefined | Shell: Replace realpath only if previously undefined
| Shell | unlicense | nisavid/home,nisavid/home,nisavid/home,nisavid/home | shell | ## Code Before:
if ! command -v realpath >/dev/null 2>&1; then
realpath() {
printf %s "$1" | command grep -q '^/' && printf %s "$1" || printf %s "$PWD/${1#./}"
}
fi
## Instruction:
Shell: Replace realpath only if previously undefined
## Code After:
if ! which realpath >/dev/null 2>&1; then
realpath() {
printf %s "$1" | command grep -q '^/' && printf %s "$1" || printf %s "$PWD/${1#./}"
}
fi
|
- if ! command -v realpath >/dev/null 2>&1; then
? ^^^^^^^^^
+ if ! which realpath >/dev/null 2>&1; then
? +++ ^
realpath() {
printf %s "$1" | command grep -q '^/' && printf %s "$1" || printf %s "$PWD/${1#./}"
}
fi | 2 | 0.333333 | 1 | 1 |
77f0c9dcf7fdc4ebe904c32b64fbcb5c9c1f4d6b | src/main/java/com/googlecode/pngtastic/core/PngChunkInserter.java | src/main/java/com/googlecode/pngtastic/core/PngChunkInserter.java | package com.googlecode.pngtastic.core;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
/**
* Usage:
* <code>
* byte[] bytes = new PngChunkInserter().insert(image, PngChunkInserter.dpi300Chunk);
* final File exported = image.export(toDir + "/name.png", bytes);
* </code>
*
* @author ray
*/
public class PngChunkInserter {
private static final byte[] dpi300 = new byte[] { 0, 0, 46, 35, 0, 0, 46, 35, 1 };
public static final PngChunk dpi300Chunk = new PngChunk(PngChunk.PHYSICAL_PIXEL_DIMENSIONS.getBytes(), dpi300);
public byte[] insert(PngImage image, PngChunk chunk) throws IOException {
// add it after the header chunk
image.getChunks().add(1, chunk);
final ByteArrayOutputStream outputBytes = new ByteArrayOutputStream();
image.writeDataOutputStream(outputBytes);
return outputBytes.toByteArray();
}
}
| package com.googlecode.pngtastic.core;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
/**
* Usage:
* <code>
* byte[] bytes = new PngChunkInserter().insert(image, PngChunkInserter.dpi300Chunk);
* final File exported = image.export(toDir + "/name.png", bytes);
* </code>
*
* @author ray
*/
public class PngChunkInserter {
/**
* Conversion note: one inch is equal to exactly 0.0254 meters.
* 300dpi = 300 / 0.0254 = 11,811.023622 = 11811 = 0x2E23 = new byte[] { 0, 0, 46, 35 }
* http://comments.gmane.org/gmane.comp.graphics.png.general/2425
*/
private static final byte[] dpi300 = new byte[] { 0, 0, 46, 35, 0, 0, 46, 35, 1 };
public static final PngChunk dpi300Chunk = new PngChunk(PngChunk.PHYSICAL_PIXEL_DIMENSIONS.getBytes(), dpi300);
public byte[] insert(PngImage image, PngChunk chunk) throws IOException {
// add it after the header chunk
image.getChunks().add(1, chunk);
final ByteArrayOutputStream outputBytes = new ByteArrayOutputStream();
image.writeDataOutputStream(outputBytes);
return outputBytes.toByteArray();
}
}
| Add comment describing dpi conversion | Add comment describing dpi conversion
| Java | mit | depsypher/pngtastic | java | ## Code Before:
package com.googlecode.pngtastic.core;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
/**
* Usage:
* <code>
* byte[] bytes = new PngChunkInserter().insert(image, PngChunkInserter.dpi300Chunk);
* final File exported = image.export(toDir + "/name.png", bytes);
* </code>
*
* @author ray
*/
public class PngChunkInserter {
private static final byte[] dpi300 = new byte[] { 0, 0, 46, 35, 0, 0, 46, 35, 1 };
public static final PngChunk dpi300Chunk = new PngChunk(PngChunk.PHYSICAL_PIXEL_DIMENSIONS.getBytes(), dpi300);
public byte[] insert(PngImage image, PngChunk chunk) throws IOException {
// add it after the header chunk
image.getChunks().add(1, chunk);
final ByteArrayOutputStream outputBytes = new ByteArrayOutputStream();
image.writeDataOutputStream(outputBytes);
return outputBytes.toByteArray();
}
}
## Instruction:
Add comment describing dpi conversion
## Code After:
package com.googlecode.pngtastic.core;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
/**
* Usage:
* <code>
* byte[] bytes = new PngChunkInserter().insert(image, PngChunkInserter.dpi300Chunk);
* final File exported = image.export(toDir + "/name.png", bytes);
* </code>
*
* @author ray
*/
public class PngChunkInserter {
/**
* Conversion note: one inch is equal to exactly 0.0254 meters.
* 300dpi = 300 / 0.0254 = 11,811.023622 = 11811 = 0x2E23 = new byte[] { 0, 0, 46, 35 }
* http://comments.gmane.org/gmane.comp.graphics.png.general/2425
*/
private static final byte[] dpi300 = new byte[] { 0, 0, 46, 35, 0, 0, 46, 35, 1 };
public static final PngChunk dpi300Chunk = new PngChunk(PngChunk.PHYSICAL_PIXEL_DIMENSIONS.getBytes(), dpi300);
public byte[] insert(PngImage image, PngChunk chunk) throws IOException {
// add it after the header chunk
image.getChunks().add(1, chunk);
final ByteArrayOutputStream outputBytes = new ByteArrayOutputStream();
image.writeDataOutputStream(outputBytes);
return outputBytes.toByteArray();
}
}
| package com.googlecode.pngtastic.core;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
/**
* Usage:
* <code>
* byte[] bytes = new PngChunkInserter().insert(image, PngChunkInserter.dpi300Chunk);
* final File exported = image.export(toDir + "/name.png", bytes);
* </code>
*
* @author ray
*/
public class PngChunkInserter {
+ /**
+ * Conversion note: one inch is equal to exactly 0.0254 meters.
+ * 300dpi = 300 / 0.0254 = 11,811.023622 = 11811 = 0x2E23 = new byte[] { 0, 0, 46, 35 }
+ * http://comments.gmane.org/gmane.comp.graphics.png.general/2425
+ */
private static final byte[] dpi300 = new byte[] { 0, 0, 46, 35, 0, 0, 46, 35, 1 };
public static final PngChunk dpi300Chunk = new PngChunk(PngChunk.PHYSICAL_PIXEL_DIMENSIONS.getBytes(), dpi300);
public byte[] insert(PngImage image, PngChunk chunk) throws IOException {
// add it after the header chunk
image.getChunks().add(1, chunk);
final ByteArrayOutputStream outputBytes = new ByteArrayOutputStream();
image.writeDataOutputStream(outputBytes);
return outputBytes.toByteArray();
}
} | 5 | 0.166667 | 5 | 0 |
ced0ceddd353c389bc4f305d8b635271ed081707 | pkg/analyzer/test/src/diagnostics/private_optional_parameter_test.dart | pkg/analyzer/test/src/diagnostics/private_optional_parameter_test.dart | // Copyright (c) 2019, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
import 'package:analyzer/src/error/codes.dart';
import 'package:test_reflective_loader/test_reflective_loader.dart';
import '../dart/resolution/driver_resolution.dart';
main() {
defineReflectiveSuite(() {
defineReflectiveTests(RecursiveCompileTimeConstantTest);
});
}
@reflectiveTest
class RecursiveCompileTimeConstantTest extends DriverResolutionTest {
test_private() async {
await assertErrorsInCode('''
f({var _p}) {}
''', [
error(CompileTimeErrorCode.PRIVATE_OPTIONAL_PARAMETER, 3, 6),
]);
}
test_fieldFormal() async {
await assertErrorsInCode(r'''
class A {
var _p;
A({this._p: 0});
}
''', [
error(HintCode.UNUSED_FIELD, 16, 2),
error(CompileTimeErrorCode.PRIVATE_OPTIONAL_PARAMETER, 25, 7),
]);
}
test_withDefaultValue() async {
await assertErrorsInCode('''
f({_p : 0}) {}
''', [
error(CompileTimeErrorCode.PRIVATE_OPTIONAL_PARAMETER, 3, 2),
]);
}
}
| // Copyright (c) 2019, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
import 'package:analyzer/src/error/codes.dart';
import 'package:test_reflective_loader/test_reflective_loader.dart';
import '../dart/resolution/driver_resolution.dart';
main() {
defineReflectiveSuite(() {
defineReflectiveTests(PrivateOptionalParameterTest);
});
}
@reflectiveTest
class PrivateOptionalParameterTest extends DriverResolutionTest {
test_private() async {
await assertErrorsInCode('''
f({var _p}) {}
''', [
error(CompileTimeErrorCode.PRIVATE_OPTIONAL_PARAMETER, 3, 6),
]);
}
test_fieldFormal() async {
await assertErrorsInCode(r'''
class A {
var _p;
A({this._p: 0});
}
''', [
error(HintCode.UNUSED_FIELD, 16, 2),
error(CompileTimeErrorCode.PRIVATE_OPTIONAL_PARAMETER, 25, 7),
]);
}
test_withDefaultValue() async {
await assertErrorsInCode('''
f({_p : 0}) {}
''', [
error(CompileTimeErrorCode.PRIVATE_OPTIONAL_PARAMETER, 3, 2),
]);
}
}
| Rename an incorrectly named test class | Rename an incorrectly named test class
Change-Id: I40cdb290cea7beaffa68d5f21f1e340e7086a857
Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/138603
Reviewed-by: Konstantin Shcheglov <09e4d7516628963212bf4aace2f97603d2b706e4@google.com>
Commit-Queue: Brian Wilkerson <1f7641b6b14c52b9163524ab8d9aabff80176f21@google.com>
| Dart | bsd-3-clause | dart-lang/sdk,dart-lang/sdk,dart-lang/sdk,dart-lang/sdk,dart-lang/sdk,dart-lang/sdk,dart-lang/sdk,dart-lang/sdk | dart | ## Code Before:
// Copyright (c) 2019, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
import 'package:analyzer/src/error/codes.dart';
import 'package:test_reflective_loader/test_reflective_loader.dart';
import '../dart/resolution/driver_resolution.dart';
main() {
defineReflectiveSuite(() {
defineReflectiveTests(RecursiveCompileTimeConstantTest);
});
}
@reflectiveTest
class RecursiveCompileTimeConstantTest extends DriverResolutionTest {
test_private() async {
await assertErrorsInCode('''
f({var _p}) {}
''', [
error(CompileTimeErrorCode.PRIVATE_OPTIONAL_PARAMETER, 3, 6),
]);
}
test_fieldFormal() async {
await assertErrorsInCode(r'''
class A {
var _p;
A({this._p: 0});
}
''', [
error(HintCode.UNUSED_FIELD, 16, 2),
error(CompileTimeErrorCode.PRIVATE_OPTIONAL_PARAMETER, 25, 7),
]);
}
test_withDefaultValue() async {
await assertErrorsInCode('''
f({_p : 0}) {}
''', [
error(CompileTimeErrorCode.PRIVATE_OPTIONAL_PARAMETER, 3, 2),
]);
}
}
## Instruction:
Rename an incorrectly named test class
Change-Id: I40cdb290cea7beaffa68d5f21f1e340e7086a857
Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/138603
Reviewed-by: Konstantin Shcheglov <09e4d7516628963212bf4aace2f97603d2b706e4@google.com>
Commit-Queue: Brian Wilkerson <1f7641b6b14c52b9163524ab8d9aabff80176f21@google.com>
## Code After:
// Copyright (c) 2019, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
import 'package:analyzer/src/error/codes.dart';
import 'package:test_reflective_loader/test_reflective_loader.dart';
import '../dart/resolution/driver_resolution.dart';
main() {
defineReflectiveSuite(() {
defineReflectiveTests(PrivateOptionalParameterTest);
});
}
@reflectiveTest
class PrivateOptionalParameterTest extends DriverResolutionTest {
test_private() async {
await assertErrorsInCode('''
f({var _p}) {}
''', [
error(CompileTimeErrorCode.PRIVATE_OPTIONAL_PARAMETER, 3, 6),
]);
}
test_fieldFormal() async {
await assertErrorsInCode(r'''
class A {
var _p;
A({this._p: 0});
}
''', [
error(HintCode.UNUSED_FIELD, 16, 2),
error(CompileTimeErrorCode.PRIVATE_OPTIONAL_PARAMETER, 25, 7),
]);
}
test_withDefaultValue() async {
await assertErrorsInCode('''
f({_p : 0}) {}
''', [
error(CompileTimeErrorCode.PRIVATE_OPTIONAL_PARAMETER, 3, 2),
]);
}
}
| // Copyright (c) 2019, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
import 'package:analyzer/src/error/codes.dart';
import 'package:test_reflective_loader/test_reflective_loader.dart';
import '../dart/resolution/driver_resolution.dart';
main() {
defineReflectiveSuite(() {
- defineReflectiveTests(RecursiveCompileTimeConstantTest);
+ defineReflectiveTests(PrivateOptionalParameterTest);
});
}
@reflectiveTest
- class RecursiveCompileTimeConstantTest extends DriverResolutionTest {
+ class PrivateOptionalParameterTest extends DriverResolutionTest {
test_private() async {
await assertErrorsInCode('''
f({var _p}) {}
''', [
error(CompileTimeErrorCode.PRIVATE_OPTIONAL_PARAMETER, 3, 6),
]);
}
test_fieldFormal() async {
await assertErrorsInCode(r'''
class A {
var _p;
A({this._p: 0});
}
''', [
error(HintCode.UNUSED_FIELD, 16, 2),
error(CompileTimeErrorCode.PRIVATE_OPTIONAL_PARAMETER, 25, 7),
]);
}
test_withDefaultValue() async {
await assertErrorsInCode('''
f({_p : 0}) {}
''', [
error(CompileTimeErrorCode.PRIVATE_OPTIONAL_PARAMETER, 3, 2),
]);
}
} | 4 | 0.088889 | 2 | 2 |
5c9928e4edc5d9ab29821f61fe93e499bf2cf02d | .travis.yml | .travis.yml | language: android
android:
components:
- extra-android-m2repository
licenses:
- 'android-sdk-license-.+'
script:
- ./gradlew --full-stacktrace -q clean assemble cobertura uploadArchives
after_success:
- ./gradlew coveralls
| language: android
android:
components:
- extra-android-m2repository
licenses:
- 'android-sdk-license-.+'
script:
- ./gradlew --full-stacktrace -q clean assemble cobertura uploadArchives
after_success:
- ls -al build/cobertura/
- ./gradlew coveralls --info
| Add option to show info for coveralls task. | Add option to show info for coveralls task.
| YAML | apache-2.0 | ksoichiro/gradle-eclipse-aar-plugin | yaml | ## Code Before:
language: android
android:
components:
- extra-android-m2repository
licenses:
- 'android-sdk-license-.+'
script:
- ./gradlew --full-stacktrace -q clean assemble cobertura uploadArchives
after_success:
- ./gradlew coveralls
## Instruction:
Add option to show info for coveralls task.
## Code After:
language: android
android:
components:
- extra-android-m2repository
licenses:
- 'android-sdk-license-.+'
script:
- ./gradlew --full-stacktrace -q clean assemble cobertura uploadArchives
after_success:
- ls -al build/cobertura/
- ./gradlew coveralls --info
| language: android
android:
components:
- extra-android-m2repository
licenses:
- 'android-sdk-license-.+'
script:
- ./gradlew --full-stacktrace -q clean assemble cobertura uploadArchives
after_success:
+ - ls -al build/cobertura/
- - ./gradlew coveralls
+ - ./gradlew coveralls --info
? +++++++
| 3 | 0.214286 | 2 | 1 |
74fa3f03a18a3133e4b76160acaf2781f3e931b0 | pubspec.yaml | pubspec.yaml | name: dherkin
version: 0.2.1
author: Danil Kornishev <danil.kornishev@gmail.com>
description: Gherkin/Cucumber implementation in dart
homepage: https://github.com/dkornishev/dherkin
dependencies:
log4dart: ">=1.4.10 <1.5.0"
args: ">=0.8.7 < 0.9.0"
dev_dependencies:
| name: dherkin
version: 0.2.1
author: Danil Kornishev <danil.kornishev@gmail.com>
description: Gherkin/Cucumber implementation in dart
homepage: https://github.com/dkornishev/dherkin
dependencies:
log4dart: ">=1.4.10 <1.5.0"
args: ">=0.8.7 < 0.9.0"
worker:
git: https://github.com/Dreckr/Worker.git
#worker: ">=0.3.9 <0.4.0"
dev_dependencies:
| Change worker dependency URI to public. | Change worker dependency URI to public.
| YAML | bsd-2-clause | dkornishev/dherkin | yaml | ## Code Before:
name: dherkin
version: 0.2.1
author: Danil Kornishev <danil.kornishev@gmail.com>
description: Gherkin/Cucumber implementation in dart
homepage: https://github.com/dkornishev/dherkin
dependencies:
log4dart: ">=1.4.10 <1.5.0"
args: ">=0.8.7 < 0.9.0"
dev_dependencies:
## Instruction:
Change worker dependency URI to public.
## Code After:
name: dherkin
version: 0.2.1
author: Danil Kornishev <danil.kornishev@gmail.com>
description: Gherkin/Cucumber implementation in dart
homepage: https://github.com/dkornishev/dherkin
dependencies:
log4dart: ">=1.4.10 <1.5.0"
args: ">=0.8.7 < 0.9.0"
worker:
git: https://github.com/Dreckr/Worker.git
#worker: ">=0.3.9 <0.4.0"
dev_dependencies:
| name: dherkin
version: 0.2.1
author: Danil Kornishev <danil.kornishev@gmail.com>
description: Gherkin/Cucumber implementation in dart
homepage: https://github.com/dkornishev/dherkin
dependencies:
log4dart: ">=1.4.10 <1.5.0"
args: ">=0.8.7 < 0.9.0"
+ worker:
+ git: https://github.com/Dreckr/Worker.git
+ #worker: ">=0.3.9 <0.4.0"
dev_dependencies: | 3 | 0.333333 | 3 | 0 |
c103c74a0753011672c1b2915d82b5a763a403d7 | news/887.feature | news/887.feature | * Lazy load Moment.js locale based on user's current portal language.
[davilima6]
* Upgrade moment.js from version 2.10.6 to 2.24.0
| * Lazy load Moment.js locale based on user's current portal language.
[davilima6]
* Upgrade moment.js from version 2.10.6 to 2.24.0
[davilima6]
| Add author to previous changelog | Add author to previous changelog
| Cucumber | bsd-3-clause | plone/mockup,plone/mockup,plone/mockup | cucumber | ## Code Before:
* Lazy load Moment.js locale based on user's current portal language.
[davilima6]
* Upgrade moment.js from version 2.10.6 to 2.24.0
## Instruction:
Add author to previous changelog
## Code After:
* Lazy load Moment.js locale based on user's current portal language.
[davilima6]
* Upgrade moment.js from version 2.10.6 to 2.24.0
[davilima6]
| * Lazy load Moment.js locale based on user's current portal language.
[davilima6]
* Upgrade moment.js from version 2.10.6 to 2.24.0
+ [davilima6] | 1 | 0.25 | 1 | 0 |
07819596b8bb995336486e1d74672551771d3183 | spec/spec_helper.rb | spec/spec_helper.rb | $LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
$LOAD_PATH.unshift(File.dirname(__FILE__))
require 'spec'
require 'rapper'
require 'fileutils'
Spec::Runner.configure do |config|
# Tear down test case assets folders
config.after :each do
FileUtils.rm_r( Dir[ "tmp/*" ] )
FileUtils.rm_r( Dir[ "spec/fixtures/*/assets" ] )
end
end
def file_names( path )
Dir[path].map do |path|
File.basename( path )
end
end
# Adapted from: https://github.com/wycats/ruby_decorators/blob/master/specs.rb
Spec::Matchers.define :have_stdout do |regex|
regex = /^#{Regexp.escape(regex)}$/ if regex.is_a?(String)
match do |proc|
$stdout = StringIO.new
proc.call
$stdout.rewind
@captured = $stdout.read
$stdout = STDOUT
@captured =~ regex
end
failure_message do |proc|
"Expected #{regex.inspect} but got #{@captured.inspect}"
end
failure_message do |proc|
"Expected #{@captured.inspect} not to match #{regex.inspect}"
end
end
# https://github.com/wycats/ruby_decorators/blob/master/specs.rb
module Kernel
def silence_stdout
$stdout = StringIO.new
yield
$stdout = STDOUT
end
end
| $LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
$LOAD_PATH.unshift(File.dirname(__FILE__))
require 'spec'
require 'rapper'
require 'fileutils'
Spec::Runner.configure do |config|
config.before :suite do
`mkdir tmp/`
end
# Tear down test case assets folders
config.after :each do
FileUtils.rm_r( Dir[ "tmp/*" ] )
FileUtils.rm_r( Dir[ "spec/fixtures/*/assets" ] )
end
end
def file_names( path )
Dir[path].map do |path|
File.basename( path )
end
end
# Adapted from: https://github.com/wycats/ruby_decorators/blob/master/specs.rb
Spec::Matchers.define :have_stdout do |regex|
regex = /^#{Regexp.escape(regex)}$/ if regex.is_a?(String)
match do |proc|
$stdout = StringIO.new
proc.call
$stdout.rewind
@captured = $stdout.read
$stdout = STDOUT
@captured =~ regex
end
failure_message do |proc|
"Expected #{regex.inspect} but got #{@captured.inspect}"
end
failure_message do |proc|
"Expected #{@captured.inspect} not to match #{regex.inspect}"
end
end
# https://github.com/wycats/ruby_decorators/blob/master/specs.rb
module Kernel
def silence_stdout
$stdout = StringIO.new
yield
$stdout = STDOUT
end
end
| Set up tmp directory before specs. | Set up tmp directory before specs.
| Ruby | mit | tysontate/rapper,tysontate/rapper | ruby | ## Code Before:
$LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
$LOAD_PATH.unshift(File.dirname(__FILE__))
require 'spec'
require 'rapper'
require 'fileutils'
Spec::Runner.configure do |config|
# Tear down test case assets folders
config.after :each do
FileUtils.rm_r( Dir[ "tmp/*" ] )
FileUtils.rm_r( Dir[ "spec/fixtures/*/assets" ] )
end
end
def file_names( path )
Dir[path].map do |path|
File.basename( path )
end
end
# Adapted from: https://github.com/wycats/ruby_decorators/blob/master/specs.rb
Spec::Matchers.define :have_stdout do |regex|
regex = /^#{Regexp.escape(regex)}$/ if regex.is_a?(String)
match do |proc|
$stdout = StringIO.new
proc.call
$stdout.rewind
@captured = $stdout.read
$stdout = STDOUT
@captured =~ regex
end
failure_message do |proc|
"Expected #{regex.inspect} but got #{@captured.inspect}"
end
failure_message do |proc|
"Expected #{@captured.inspect} not to match #{regex.inspect}"
end
end
# https://github.com/wycats/ruby_decorators/blob/master/specs.rb
module Kernel
def silence_stdout
$stdout = StringIO.new
yield
$stdout = STDOUT
end
end
## Instruction:
Set up tmp directory before specs.
## Code After:
$LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
$LOAD_PATH.unshift(File.dirname(__FILE__))
require 'spec'
require 'rapper'
require 'fileutils'
Spec::Runner.configure do |config|
config.before :suite do
`mkdir tmp/`
end
# Tear down test case assets folders
config.after :each do
FileUtils.rm_r( Dir[ "tmp/*" ] )
FileUtils.rm_r( Dir[ "spec/fixtures/*/assets" ] )
end
end
def file_names( path )
Dir[path].map do |path|
File.basename( path )
end
end
# Adapted from: https://github.com/wycats/ruby_decorators/blob/master/specs.rb
Spec::Matchers.define :have_stdout do |regex|
regex = /^#{Regexp.escape(regex)}$/ if regex.is_a?(String)
match do |proc|
$stdout = StringIO.new
proc.call
$stdout.rewind
@captured = $stdout.read
$stdout = STDOUT
@captured =~ regex
end
failure_message do |proc|
"Expected #{regex.inspect} but got #{@captured.inspect}"
end
failure_message do |proc|
"Expected #{@captured.inspect} not to match #{regex.inspect}"
end
end
# https://github.com/wycats/ruby_decorators/blob/master/specs.rb
module Kernel
def silence_stdout
$stdout = StringIO.new
yield
$stdout = STDOUT
end
end
| $LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
$LOAD_PATH.unshift(File.dirname(__FILE__))
require 'spec'
require 'rapper'
require 'fileutils'
Spec::Runner.configure do |config|
+ config.before :suite do
+ `mkdir tmp/`
+ end
+
# Tear down test case assets folders
config.after :each do
FileUtils.rm_r( Dir[ "tmp/*" ] )
FileUtils.rm_r( Dir[ "spec/fixtures/*/assets" ] )
end
end
def file_names( path )
Dir[path].map do |path|
File.basename( path )
end
end
# Adapted from: https://github.com/wycats/ruby_decorators/blob/master/specs.rb
Spec::Matchers.define :have_stdout do |regex|
regex = /^#{Regexp.escape(regex)}$/ if regex.is_a?(String)
match do |proc|
$stdout = StringIO.new
proc.call
$stdout.rewind
@captured = $stdout.read
$stdout = STDOUT
@captured =~ regex
end
failure_message do |proc|
"Expected #{regex.inspect} but got #{@captured.inspect}"
end
failure_message do |proc|
"Expected #{@captured.inspect} not to match #{regex.inspect}"
end
end
# https://github.com/wycats/ruby_decorators/blob/master/specs.rb
module Kernel
def silence_stdout
$stdout = StringIO.new
yield
$stdout = STDOUT
end
end | 4 | 0.076923 | 4 | 0 |
f0222f19b86d65eae6f8f5e54c18d9c3555506c2 | tests/Constraint/TypeTest.php | tests/Constraint/TypeTest.php | <?php
namespace League\JsonGuard\Test\Constraint;
use League\JsonGuard\Constraint\DraftFour\Type;
use League\JsonGuard\ValidationError;
use League\JsonGuard\Validator;
class TypeTest extends \PHPUnit_Framework_TestCase
{
function test_numeric_string_is_not_a_number()
{
$type = new Type();
$error = $type->validate('1', 'number', new Validator([], new \stdClass()));
$this->assertInstanceOf(ValidationError::class, $error);
}
}
| <?php
namespace League\JsonGuard\Test\Constraint;
use League\JsonGuard\Constraint\DraftFour\Type;
use League\JsonGuard\ValidationError;
use League\JsonGuard\Validator;
class TypeTest extends \PHPUnit_Framework_TestCase
{
function test_numeric_string_is_not_a_number()
{
$type = new Type();
$error = $type->validate('1', 'number', new Validator([], new \stdClass()));
$this->assertInstanceOf(ValidationError::class, $error);
}
function test_numeric_string_is_a_string()
{
$type = new Type();
$error = $type->validate('9223372036854775999', 'string', new Validator([], new \stdClass()));
$this->assertNull($error);
}
}
| Add test for numeric strings | Add test for numeric strings
| PHP | mit | thephpleague/json-guard,machete-php/validation,yuloh/json-guard,yuloh/json-guard,machete-php/validation | php | ## Code Before:
<?php
namespace League\JsonGuard\Test\Constraint;
use League\JsonGuard\Constraint\DraftFour\Type;
use League\JsonGuard\ValidationError;
use League\JsonGuard\Validator;
class TypeTest extends \PHPUnit_Framework_TestCase
{
function test_numeric_string_is_not_a_number()
{
$type = new Type();
$error = $type->validate('1', 'number', new Validator([], new \stdClass()));
$this->assertInstanceOf(ValidationError::class, $error);
}
}
## Instruction:
Add test for numeric strings
## Code After:
<?php
namespace League\JsonGuard\Test\Constraint;
use League\JsonGuard\Constraint\DraftFour\Type;
use League\JsonGuard\ValidationError;
use League\JsonGuard\Validator;
class TypeTest extends \PHPUnit_Framework_TestCase
{
function test_numeric_string_is_not_a_number()
{
$type = new Type();
$error = $type->validate('1', 'number', new Validator([], new \stdClass()));
$this->assertInstanceOf(ValidationError::class, $error);
}
function test_numeric_string_is_a_string()
{
$type = new Type();
$error = $type->validate('9223372036854775999', 'string', new Validator([], new \stdClass()));
$this->assertNull($error);
}
}
| <?php
namespace League\JsonGuard\Test\Constraint;
use League\JsonGuard\Constraint\DraftFour\Type;
use League\JsonGuard\ValidationError;
use League\JsonGuard\Validator;
class TypeTest extends \PHPUnit_Framework_TestCase
{
function test_numeric_string_is_not_a_number()
{
$type = new Type();
$error = $type->validate('1', 'number', new Validator([], new \stdClass()));
$this->assertInstanceOf(ValidationError::class, $error);
}
+
+ function test_numeric_string_is_a_string()
+ {
+ $type = new Type();
+
+ $error = $type->validate('9223372036854775999', 'string', new Validator([], new \stdClass()));
+
+ $this->assertNull($error);
+ }
} | 9 | 0.473684 | 9 | 0 |
c6c2e150726c1bd32fba2b91897030228c99dd4e | flake.nix | flake.nix | {
description = "A flake for building ush";
inputs.nixpkgs.url = github:NixOS/nixpkgs/nixos-unstable;
outputs = { self, nixpkgs }: {
defaultPackage.x86_64-linux =
with import nixpkgs { system = "x86_64-linux"; };
stdenv.mkDerivation rec {
pname = "ush";
version = "0.72";
src = self;
nativeBuildInputs = [
meson
ninja
pkg-config
];
buildInputs = [ libedit ];
meta = with lib; {
description = "small personal shell";
homepage = "https://qirus.net";
license = licenses.bsd3;
maintainers = with maintainers; [ hjones2199 ];
};
};
};
}
| {
description = "A flake for building ush";
inputs.nixpkgs.url = github:NixOS/nixpkgs/nixos-unstable;
outputs = { self, nixpkgs }: {
defaultPackage.x86_64-linux =
with import nixpkgs { system = "x86_64-linux"; };
stdenv.mkDerivation rec {
pname = "ush";
version = "0.72";
src = self;
nativeBuildInputs = [
meson
ninja
pkg-config
];
buildInputs = [ libedit ];
meta = with lib; {
description = "small personal shell";
homepage = "https://qirus.net";
maintainers = with maintainers; [ hjones2199 ];
license = licenses.bsd3;
platforms = platforms.linux;
};
};
riscvPackage =
with import nixpkgs { system = "x86_64-linux"; crossSystem = nixpkgs.lib.systems.examples.riscv64; };
stdenv.mkDerivation rec {
pname = "ush";
version = "0.72";
src = self;
nativeBuildInputs = [
meson
ninja
pkg-config
];
buildInputs = [ libedit ];
meta = with lib; {
description = "small personal shell";
homepage = "https://qirus.net";
maintainers = with maintainers; [ hjones2199 ];
license = licenses.bsd3;
platforms = platforms.linux;
};
};
};
}
| Add RISCV cross-compilation via nix | Add RISCV cross-compilation via nix
| Nix | bsd-3-clause | hjones2199/ush | nix | ## Code Before:
{
description = "A flake for building ush";
inputs.nixpkgs.url = github:NixOS/nixpkgs/nixos-unstable;
outputs = { self, nixpkgs }: {
defaultPackage.x86_64-linux =
with import nixpkgs { system = "x86_64-linux"; };
stdenv.mkDerivation rec {
pname = "ush";
version = "0.72";
src = self;
nativeBuildInputs = [
meson
ninja
pkg-config
];
buildInputs = [ libedit ];
meta = with lib; {
description = "small personal shell";
homepage = "https://qirus.net";
license = licenses.bsd3;
maintainers = with maintainers; [ hjones2199 ];
};
};
};
}
## Instruction:
Add RISCV cross-compilation via nix
## Code After:
{
description = "A flake for building ush";
inputs.nixpkgs.url = github:NixOS/nixpkgs/nixos-unstable;
outputs = { self, nixpkgs }: {
defaultPackage.x86_64-linux =
with import nixpkgs { system = "x86_64-linux"; };
stdenv.mkDerivation rec {
pname = "ush";
version = "0.72";
src = self;
nativeBuildInputs = [
meson
ninja
pkg-config
];
buildInputs = [ libedit ];
meta = with lib; {
description = "small personal shell";
homepage = "https://qirus.net";
maintainers = with maintainers; [ hjones2199 ];
license = licenses.bsd3;
platforms = platforms.linux;
};
};
riscvPackage =
with import nixpkgs { system = "x86_64-linux"; crossSystem = nixpkgs.lib.systems.examples.riscv64; };
stdenv.mkDerivation rec {
pname = "ush";
version = "0.72";
src = self;
nativeBuildInputs = [
meson
ninja
pkg-config
];
buildInputs = [ libedit ];
meta = with lib; {
description = "small personal shell";
homepage = "https://qirus.net";
maintainers = with maintainers; [ hjones2199 ];
license = licenses.bsd3;
platforms = platforms.linux;
};
};
};
}
| {
description = "A flake for building ush";
inputs.nixpkgs.url = github:NixOS/nixpkgs/nixos-unstable;
outputs = { self, nixpkgs }: {
defaultPackage.x86_64-linux =
with import nixpkgs { system = "x86_64-linux"; };
- stdenv.mkDerivation rec {
+ stdenv.mkDerivation rec {
? ++
- pname = "ush";
+ pname = "ush";
? ++
- version = "0.72";
+ version = "0.72";
? ++
- src = self;
+ src = self;
? ++
- nativeBuildInputs = [
+ nativeBuildInputs = [
? ++
- meson
+ meson
? ++
- ninja
+ ninja
? ++
- pkg-config
+ pkg-config
? ++
- ];
+ ];
? ++
- buildInputs = [ libedit ];
+ buildInputs = [ libedit ];
? ++
- meta = with lib; {
+ meta = with lib; {
? ++
- description = "small personal shell";
+ description = "small personal shell";
? ++
- homepage = "https://qirus.net";
+ homepage = "https://qirus.net";
? ++
- license = licenses.bsd3;
- maintainers = with maintainers; [ hjones2199 ];
+ maintainers = with maintainers; [ hjones2199 ];
? ++
+ license = licenses.bsd3;
+ platforms = platforms.linux;
+ };
};
+ riscvPackage =
+ with import nixpkgs { system = "x86_64-linux"; crossSystem = nixpkgs.lib.systems.examples.riscv64; };
+ stdenv.mkDerivation rec {
+ pname = "ush";
+ version = "0.72";
+
+ src = self;
+
+ nativeBuildInputs = [
+ meson
+ ninja
+ pkg-config
+ ];
+
+ buildInputs = [ libedit ];
+
+ meta = with lib; {
+ description = "small personal shell";
+ homepage = "https://qirus.net";
+ maintainers = with maintainers; [ hjones2199 ];
+ license = licenses.bsd3;
+ platforms = platforms.linux;
+ };
- };
+ };
? ++
};
} | 57 | 1.83871 | 41 | 16 |
ddbef13134b9609f8833e2118dcd88387c9ce581 | addressbook-web-tests/src/test/java/ru/stqua/pft/addressbook/tests/ContactModificationTests.java | addressbook-web-tests/src/test/java/ru/stqua/pft/addressbook/tests/ContactModificationTests.java | package ru.stqua.pft.addressbook.tests;
import org.testng.annotations.Test;
import ru.stqua.pft.addressbook.model.ContactData;
/**
* Created by Alexander Gorny on 1/29/2017.
*/
public class ContactModificationTests extends TestBase {
@Test
public void testContactModification(){
app.getNavigationHelper().gotoHomePage();
if (! app.getContactHelper().isThereAContact()){
// create new contact
app.getContactHelper().createContact(new ContactData("Alex", "Gorny", "Cool Woker", "Mr", "GE", "New Orleans", "444-555-6666", "cool@mail.com", "woker@mail.com", "www.homepage.com", "test1"), true);
app.getNavigationHelper().gotoHomePage();
// modify created contact
app.getContactHelper().modifyContact(new ContactData("Chuck", "Norris", "Walker", "Mr", "Texas Ranger", "Ryan", "444-555-6666", "cool@mail.com", "woker@mail.com", "www.homepage.com", null), 1);
app.getNavigationHelper().gotoHomePage();
// delete previously created contact
app.getContactHelper().selectContactFromTheList(1);
app.getContactHelper().deleteSelectedContant();
} else {
app.getContactHelper().modifyContact(new ContactData("Chuck", "Norris", "Walker", "Mr", "Texas Ranger", "Ryan", "444-555-6666", "cool@mail.com", "woker@mail.com", "www.homepage.com", null), 1);
}
app.getNavigationHelper().gotoHomePage();
}
}
| package ru.stqua.pft.addressbook.tests;
import org.testng.annotations.Test;
import ru.stqua.pft.addressbook.model.ContactData;
/**
* Created by Alexander Gorny on 1/29/2017.
*/
public class ContactModificationTests extends TestBase {
@Test
public void testContactModification(){
app.getNavigationHelper().gotoHomePage();
if (! app.getContactHelper().isThereAContact()){
// create new contact
app.getContactHelper().createContact(new ContactData("Alex", "Gorny", "Cool Woker", "Mr", "GE", "New Orleans", "444-555-6666", "cool@mail.com", "woker@mail.com", "www.homepage.com", "test1"), true);
app.getNavigationHelper().gotoHomePage();
}
app.getContactHelper().modifyContact(new ContactData("Chuck", "Norris", "Walker", "Mr", "Texas Ranger", "Ryan", "444-555-6666", "cool@mail.com", "woker@mail.com", "www.homepage.com", null), 1);
app.getNavigationHelper().gotoHomePage();
}
}
| Remove Contact Deletion from ContactModification | Remove Contact Deletion from ContactModification
| Java | apache-2.0 | GornyAlex/pdt_37,GornyAlex/pdt_37 | java | ## Code Before:
package ru.stqua.pft.addressbook.tests;
import org.testng.annotations.Test;
import ru.stqua.pft.addressbook.model.ContactData;
/**
* Created by Alexander Gorny on 1/29/2017.
*/
public class ContactModificationTests extends TestBase {
@Test
public void testContactModification(){
app.getNavigationHelper().gotoHomePage();
if (! app.getContactHelper().isThereAContact()){
// create new contact
app.getContactHelper().createContact(new ContactData("Alex", "Gorny", "Cool Woker", "Mr", "GE", "New Orleans", "444-555-6666", "cool@mail.com", "woker@mail.com", "www.homepage.com", "test1"), true);
app.getNavigationHelper().gotoHomePage();
// modify created contact
app.getContactHelper().modifyContact(new ContactData("Chuck", "Norris", "Walker", "Mr", "Texas Ranger", "Ryan", "444-555-6666", "cool@mail.com", "woker@mail.com", "www.homepage.com", null), 1);
app.getNavigationHelper().gotoHomePage();
// delete previously created contact
app.getContactHelper().selectContactFromTheList(1);
app.getContactHelper().deleteSelectedContant();
} else {
app.getContactHelper().modifyContact(new ContactData("Chuck", "Norris", "Walker", "Mr", "Texas Ranger", "Ryan", "444-555-6666", "cool@mail.com", "woker@mail.com", "www.homepage.com", null), 1);
}
app.getNavigationHelper().gotoHomePage();
}
}
## Instruction:
Remove Contact Deletion from ContactModification
## Code After:
package ru.stqua.pft.addressbook.tests;
import org.testng.annotations.Test;
import ru.stqua.pft.addressbook.model.ContactData;
/**
* Created by Alexander Gorny on 1/29/2017.
*/
public class ContactModificationTests extends TestBase {
@Test
public void testContactModification(){
app.getNavigationHelper().gotoHomePage();
if (! app.getContactHelper().isThereAContact()){
// create new contact
app.getContactHelper().createContact(new ContactData("Alex", "Gorny", "Cool Woker", "Mr", "GE", "New Orleans", "444-555-6666", "cool@mail.com", "woker@mail.com", "www.homepage.com", "test1"), true);
app.getNavigationHelper().gotoHomePage();
}
app.getContactHelper().modifyContact(new ContactData("Chuck", "Norris", "Walker", "Mr", "Texas Ranger", "Ryan", "444-555-6666", "cool@mail.com", "woker@mail.com", "www.homepage.com", null), 1);
app.getNavigationHelper().gotoHomePage();
}
}
| package ru.stqua.pft.addressbook.tests;
import org.testng.annotations.Test;
import ru.stqua.pft.addressbook.model.ContactData;
/**
* Created by Alexander Gorny on 1/29/2017.
*/
public class ContactModificationTests extends TestBase {
@Test
public void testContactModification(){
app.getNavigationHelper().gotoHomePage();
if (! app.getContactHelper().isThereAContact()){
// create new contact
app.getContactHelper().createContact(new ContactData("Alex", "Gorny", "Cool Woker", "Mr", "GE", "New Orleans", "444-555-6666", "cool@mail.com", "woker@mail.com", "www.homepage.com", "test1"), true);
app.getNavigationHelper().gotoHomePage();
- // modify created contact
- app.getContactHelper().modifyContact(new ContactData("Chuck", "Norris", "Walker", "Mr", "Texas Ranger", "Ryan", "444-555-6666", "cool@mail.com", "woker@mail.com", "www.homepage.com", null), 1);
- app.getNavigationHelper().gotoHomePage();
- // delete previously created contact
- app.getContactHelper().selectContactFromTheList(1);
- app.getContactHelper().deleteSelectedContant();
- } else {
- app.getContactHelper().modifyContact(new ContactData("Chuck", "Norris", "Walker", "Mr", "Texas Ranger", "Ryan", "444-555-6666", "cool@mail.com", "woker@mail.com", "www.homepage.com", null), 1);
}
+ app.getContactHelper().modifyContact(new ContactData("Chuck", "Norris", "Walker", "Mr", "Texas Ranger", "Ryan", "444-555-6666", "cool@mail.com", "woker@mail.com", "www.homepage.com", null), 1);
app.getNavigationHelper().gotoHomePage();
-
}
} | 10 | 0.294118 | 1 | 9 |
7b7690a5f693517292d7ed5d635a7b0e49af64cc | lib/fivemat/minitest/unit.rb | lib/fivemat/minitest/unit.rb | require 'minitest/unit'
module Fivemat
module MiniTest
class Unit < ::MiniTest::Unit
def _run_suites(suites, type)
offset = 0
suites.reject do |suite|
filter = options[:filter] || '/./'
filter = Regexp.new $1 if filter =~ /\/(.*)\//
suite.send("#{type}_methods").grep(filter).empty?
end.map do |suite|
print "#{suite} "
result = _run_suite suite, type
puts
report.each_with_index do |msg, i|
puts "%3d) %s" % [offset + i + 1, msg.gsub(/\n/, "\n ")]
end
offset += report.size
report.clear
result
end
end
end
end
end
| require 'minitest/unit'
require 'fivemat/elapsed_time'
module Fivemat
module MiniTest
class Unit < ::MiniTest::Unit
include ElapsedTime
def _run_suites(suites, type)
offset = 0
suites.reject do |suite|
filter = options[:filter] || '/./'
filter = Regexp.new $1 if filter =~ /\/(.*)\//
suite.send("#{type}_methods").grep(filter).empty?
end.map do |suite|
print "#{suite} "
start_time = Time.now
result = _run_suite suite, type
print_elapsed_time $stdout, start_time
puts
report.each_with_index do |msg, i|
puts "%3d) %s" % [offset + i + 1, msg.gsub(/\n/, "\n ")]
end
offset += report.size
report.clear
result
end
end
end
end
end
| Support elapsed time in MiniTest formatter. | Support elapsed time in MiniTest formatter.
| Ruby | mit | tpope/fivemat | ruby | ## Code Before:
require 'minitest/unit'
module Fivemat
module MiniTest
class Unit < ::MiniTest::Unit
def _run_suites(suites, type)
offset = 0
suites.reject do |suite|
filter = options[:filter] || '/./'
filter = Regexp.new $1 if filter =~ /\/(.*)\//
suite.send("#{type}_methods").grep(filter).empty?
end.map do |suite|
print "#{suite} "
result = _run_suite suite, type
puts
report.each_with_index do |msg, i|
puts "%3d) %s" % [offset + i + 1, msg.gsub(/\n/, "\n ")]
end
offset += report.size
report.clear
result
end
end
end
end
end
## Instruction:
Support elapsed time in MiniTest formatter.
## Code After:
require 'minitest/unit'
require 'fivemat/elapsed_time'
module Fivemat
module MiniTest
class Unit < ::MiniTest::Unit
include ElapsedTime
def _run_suites(suites, type)
offset = 0
suites.reject do |suite|
filter = options[:filter] || '/./'
filter = Regexp.new $1 if filter =~ /\/(.*)\//
suite.send("#{type}_methods").grep(filter).empty?
end.map do |suite|
print "#{suite} "
start_time = Time.now
result = _run_suite suite, type
print_elapsed_time $stdout, start_time
puts
report.each_with_index do |msg, i|
puts "%3d) %s" % [offset + i + 1, msg.gsub(/\n/, "\n ")]
end
offset += report.size
report.clear
result
end
end
end
end
end
| require 'minitest/unit'
+ require 'fivemat/elapsed_time'
module Fivemat
module MiniTest
class Unit < ::MiniTest::Unit
+ include ElapsedTime
+
def _run_suites(suites, type)
offset = 0
suites.reject do |suite|
filter = options[:filter] || '/./'
filter = Regexp.new $1 if filter =~ /\/(.*)\//
suite.send("#{type}_methods").grep(filter).empty?
end.map do |suite|
print "#{suite} "
+ start_time = Time.now
result = _run_suite suite, type
+ print_elapsed_time $stdout, start_time
puts
report.each_with_index do |msg, i|
puts "%3d) %s" % [offset + i + 1, msg.gsub(/\n/, "\n ")]
end
offset += report.size
report.clear
result
end
end
end
end
end | 5 | 0.192308 | 5 | 0 |
b41bb694d7b2cc3be083e84f5108689fb75e502e | app/views/projects/index.html.haml | app/views/projects/index.html.haml | - provide :title, 'Projects'
.page-header
%h1= yield :title
- if @projects.any?
%table.table.table-striped
%thead
%tr
%th Name
%th Viewports
%th
= succeed 's' do
%abbr.initialism{ title: 'Uniform Resource Locators' } URL
%tbody
- @projects.each do |project|
%tr
%td= link_to project.name, project, class: 'url-multiline'
%td= project.viewports.map(&:to_s).join(', ')
%td= project.urls.count
= link_to new_project_path, class: 'btn btn-primary' do
%span.glyphicon.glyphicon-plus
Add new Project
| - provide :title, 'Projects'
.page-header
%h1
= yield :title
= link_to new_project_path, class: 'btn btn-primary' do
%span.glyphicon.glyphicon-plus
Add new Project
- if @projects.any?
%table.table.table-striped
%thead
%tr
%th Name
%th Viewports
%th
= succeed 's' do
%abbr.initialism{ title: 'Uniform Resource Locators' } URL
%tbody
- @projects.each do |project|
%tr
%td= link_to project.name, project, class: 'url-multiline'
%td= project.viewports.map(&:to_s).join(', ')
%td= project.urls.count
| Move "Add new Project" button to header | Move "Add new Project" button to header
Since we are not paginating the full list of projects, the button being
below the list of projects could potentially become a problem when you
have a large list of projects. Moving it to the top header resolves
that.
| Haml | mit | kalw/diffux,diffux/diffux,kalw/diffux,kalw/diffux,diffux/diffux,diffux/diffux | haml | ## Code Before:
- provide :title, 'Projects'
.page-header
%h1= yield :title
- if @projects.any?
%table.table.table-striped
%thead
%tr
%th Name
%th Viewports
%th
= succeed 's' do
%abbr.initialism{ title: 'Uniform Resource Locators' } URL
%tbody
- @projects.each do |project|
%tr
%td= link_to project.name, project, class: 'url-multiline'
%td= project.viewports.map(&:to_s).join(', ')
%td= project.urls.count
= link_to new_project_path, class: 'btn btn-primary' do
%span.glyphicon.glyphicon-plus
Add new Project
## Instruction:
Move "Add new Project" button to header
Since we are not paginating the full list of projects, the button being
below the list of projects could potentially become a problem when you
have a large list of projects. Moving it to the top header resolves
that.
## Code After:
- provide :title, 'Projects'
.page-header
%h1
= yield :title
= link_to new_project_path, class: 'btn btn-primary' do
%span.glyphicon.glyphicon-plus
Add new Project
- if @projects.any?
%table.table.table-striped
%thead
%tr
%th Name
%th Viewports
%th
= succeed 's' do
%abbr.initialism{ title: 'Uniform Resource Locators' } URL
%tbody
- @projects.each do |project|
%tr
%td= link_to project.name, project, class: 'url-multiline'
%td= project.viewports.map(&:to_s).join(', ')
%td= project.urls.count
| - provide :title, 'Projects'
.page-header
+ %h1
- %h1= yield :title
? ^^^
+ = yield :title
? ^^
+ = link_to new_project_path, class: 'btn btn-primary' do
+ %span.glyphicon.glyphicon-plus
+ Add new Project
- if @projects.any?
%table.table.table-striped
%thead
%tr
%th Name
%th Viewports
%th
= succeed 's' do
%abbr.initialism{ title: 'Uniform Resource Locators' } URL
%tbody
- @projects.each do |project|
%tr
%td= link_to project.name, project, class: 'url-multiline'
%td= project.viewports.map(&:to_s).join(', ')
%td= project.urls.count
-
- = link_to new_project_path, class: 'btn btn-primary' do
- %span.glyphicon.glyphicon-plus
- Add new Project
- | 11 | 0.423077 | 5 | 6 |
210b5b241ea7ad7759400fd8afadd872a8148a2e | integration/shared_undo_integration_examples.rb | integration/shared_undo_integration_examples.rb | shared_examples "undo integration" do
subject { Undo }
let(:object) { Hash.new hello: :world }
it "stores and restores object" do
uuid = subject.store object
expect(subject.restore uuid).to eq object
end
it "deletes stored object" do
uuid = subject.store object
subject.delete uuid
expect { subject.restore uuid }.to raise_error(KeyError)
end
describe "special cases" do
it "stores and restores nil" do
uuid = subject.store nil
expect(subject.restore uuid).to eq nil
end
it "stores and restores array" do
uuid = subject.store [:foo]
expect(subject.restore uuid).to eq [:foo]
end
end
end
| shared_examples "undo integration" do
subject { Undo }
let(:object) { Hash.new hello: :world }
it "stores and restores object" do
uuid = subject.store object
expect(subject.restore uuid).to eq object
end
it "deletes stored object" do
uuid = subject.store object
subject.delete uuid
expect { subject.restore uuid }.to raise_error(KeyError)
end
it "delete an unexisted key does not raise an error" do
expect { subject.delete "does not exist" }.not_to raise_error
end
describe "special cases" do
it "stores and restores nil" do
uuid = subject.store nil
expect(subject.restore uuid).to eq nil
end
it "stores and restores array" do
uuid = subject.store [1,2,3]
expect(subject.restore uuid).to eq [1,2,3]
end
end
end
| Delete an unexisted key does not raise an error | Delete an unexisted key does not raise an error
| Ruby | mit | AlexParamonov/undo | ruby | ## Code Before:
shared_examples "undo integration" do
subject { Undo }
let(:object) { Hash.new hello: :world }
it "stores and restores object" do
uuid = subject.store object
expect(subject.restore uuid).to eq object
end
it "deletes stored object" do
uuid = subject.store object
subject.delete uuid
expect { subject.restore uuid }.to raise_error(KeyError)
end
describe "special cases" do
it "stores and restores nil" do
uuid = subject.store nil
expect(subject.restore uuid).to eq nil
end
it "stores and restores array" do
uuid = subject.store [:foo]
expect(subject.restore uuid).to eq [:foo]
end
end
end
## Instruction:
Delete an unexisted key does not raise an error
## Code After:
shared_examples "undo integration" do
subject { Undo }
let(:object) { Hash.new hello: :world }
it "stores and restores object" do
uuid = subject.store object
expect(subject.restore uuid).to eq object
end
it "deletes stored object" do
uuid = subject.store object
subject.delete uuid
expect { subject.restore uuid }.to raise_error(KeyError)
end
it "delete an unexisted key does not raise an error" do
expect { subject.delete "does not exist" }.not_to raise_error
end
describe "special cases" do
it "stores and restores nil" do
uuid = subject.store nil
expect(subject.restore uuid).to eq nil
end
it "stores and restores array" do
uuid = subject.store [1,2,3]
expect(subject.restore uuid).to eq [1,2,3]
end
end
end
| shared_examples "undo integration" do
subject { Undo }
let(:object) { Hash.new hello: :world }
it "stores and restores object" do
uuid = subject.store object
expect(subject.restore uuid).to eq object
end
it "deletes stored object" do
uuid = subject.store object
subject.delete uuid
expect { subject.restore uuid }.to raise_error(KeyError)
end
+ it "delete an unexisted key does not raise an error" do
+ expect { subject.delete "does not exist" }.not_to raise_error
+ end
+
describe "special cases" do
it "stores and restores nil" do
uuid = subject.store nil
expect(subject.restore uuid).to eq nil
end
it "stores and restores array" do
- uuid = subject.store [:foo]
? ^^^^
+ uuid = subject.store [1,2,3]
? ^^^^^
- expect(subject.restore uuid).to eq [:foo]
? ^^^^
+ expect(subject.restore uuid).to eq [1,2,3]
? ^^^^^
end
end
end | 8 | 0.296296 | 6 | 2 |
f5700a430d0be3420cd5be437ef0a2fbce8a3211 | README.md | README.md | Gomaotsu otomeList Visualizer will visualize your otome list.

## How to use this application
### Requirements
This application requires that your computer is connected to the internet.
I have tested this application with Java 8 (1.8.0_40) on MacOSX.
### Download and setup
#### Clone this project
```sh
cd ~/git/
git clone https://github.com/funasoul/gomaotsu.git
```
#### Launch Eclipse and import as Maven project.
1. [File] -> [Import] -> [Maven] -> [Existing Maven Project] -> [Next]
2. Navigate to ```~/git/gomaotsu``` -> [Next]
3. Select ```/pom.xml``` which you will see in the dialog.
4. Press [Next], then Eclipse will create a new project.
That's it!
| Gomaotsu otomeList Visualizer will visualize your otome list.

## How to use this application
### Requirements
This application requires that your computer is connected to the internet.
I have tested this application with Java 8 (1.8.0_40) on MacOSX.
### Download and setup
#### Clone this project
```sh
cd ~/git/
git clone https://github.com/funasoul/gomaotsu.git
```
#### Build from command line
```sh
sudo port install maven32 # if your machine doesn't have Maven
sudo port select maven maven32 # if your machine doesn't have Maven
export JAVA_HOME=`/usr/libexec/java_home -v 1.8`
mvn clean package
```
#### Launch from command line
```sh
java -jar target/Gomaotsu-{$version}-SNAPSHOT-jar-with-dependencies.jar
```
Or, you can import this project in Eclipse as follows:
#### Launch Eclipse and import as Maven project.
1. [File] -> [Import] -> [Maven] -> [Existing Maven Project] -> [Next]
2. Navigate to ```~/git/gomaotsu``` -> [Next]
3. Select ```/pom.xml``` which you will see in the dialog.
4. Press [Next], then Eclipse will create a new project.
That's it!
| Add description on how to compile and run | Add description on how to compile and run
| Markdown | apache-2.0 | funasoul/gomaotsu,funasoul/gomaotsu | markdown | ## Code Before:
Gomaotsu otomeList Visualizer will visualize your otome list.

## How to use this application
### Requirements
This application requires that your computer is connected to the internet.
I have tested this application with Java 8 (1.8.0_40) on MacOSX.
### Download and setup
#### Clone this project
```sh
cd ~/git/
git clone https://github.com/funasoul/gomaotsu.git
```
#### Launch Eclipse and import as Maven project.
1. [File] -> [Import] -> [Maven] -> [Existing Maven Project] -> [Next]
2. Navigate to ```~/git/gomaotsu``` -> [Next]
3. Select ```/pom.xml``` which you will see in the dialog.
4. Press [Next], then Eclipse will create a new project.
That's it!
## Instruction:
Add description on how to compile and run
## Code After:
Gomaotsu otomeList Visualizer will visualize your otome list.

## How to use this application
### Requirements
This application requires that your computer is connected to the internet.
I have tested this application with Java 8 (1.8.0_40) on MacOSX.
### Download and setup
#### Clone this project
```sh
cd ~/git/
git clone https://github.com/funasoul/gomaotsu.git
```
#### Build from command line
```sh
sudo port install maven32 # if your machine doesn't have Maven
sudo port select maven maven32 # if your machine doesn't have Maven
export JAVA_HOME=`/usr/libexec/java_home -v 1.8`
mvn clean package
```
#### Launch from command line
```sh
java -jar target/Gomaotsu-{$version}-SNAPSHOT-jar-with-dependencies.jar
```
Or, you can import this project in Eclipse as follows:
#### Launch Eclipse and import as Maven project.
1. [File] -> [Import] -> [Maven] -> [Existing Maven Project] -> [Next]
2. Navigate to ```~/git/gomaotsu``` -> [Next]
3. Select ```/pom.xml``` which you will see in the dialog.
4. Press [Next], then Eclipse will create a new project.
That's it!
| Gomaotsu otomeList Visualizer will visualize your otome list.

## How to use this application
### Requirements
This application requires that your computer is connected to the internet.
I have tested this application with Java 8 (1.8.0_40) on MacOSX.
### Download and setup
#### Clone this project
```sh
cd ~/git/
git clone https://github.com/funasoul/gomaotsu.git
```
+
+ #### Build from command line
+ ```sh
+ sudo port install maven32 # if your machine doesn't have Maven
+ sudo port select maven maven32 # if your machine doesn't have Maven
+ export JAVA_HOME=`/usr/libexec/java_home -v 1.8`
+ mvn clean package
+ ```
+
+ #### Launch from command line
+ ```sh
+ java -jar target/Gomaotsu-{$version}-SNAPSHOT-jar-with-dependencies.jar
+ ```
+
+ Or, you can import this project in Eclipse as follows:
#### Launch Eclipse and import as Maven project.
1. [File] -> [Import] -> [Maven] -> [Existing Maven Project] -> [Next]
2. Navigate to ```~/git/gomaotsu``` -> [Next]
3. Select ```/pom.xml``` which you will see in the dialog.
4. Press [Next], then Eclipse will create a new project.
That's it! | 15 | 0.714286 | 15 | 0 |
9c94432a4831319e474bd3f14ce2bedd19013bb5 | app/observers/update_search_observer.rb | app/observers/update_search_observer.rb | class UpdateSearchObserver < Mongoid::Observer
observe :artefact
def after_save(artefact)
rummageable_artefact = RummageableArtefact.new(artefact)
rummageable_artefact.submit if rummageable_artefact.should_be_indexed?
# Relying on current behaviour where this does not raise errors
# if done more than once, or done on artefacts never put live
rummageable_artefact.delete if artefact.archived?
end
end
| class UpdateSearchObserver < Mongoid::Observer
observe :artefact
def after_save(artefact)
rummageable_artefact = RummageableArtefact.new(artefact)
rummageable_artefact.submit if rummageable_artefact.should_be_indexed?
if artefact.live? && becoming_nonindexed_kind?(artefact)
rummageable_artefact.delete
end
# Relying on current behaviour where this does not raise errors
# if done more than once, or done on artefacts never put live
rummageable_artefact.delete if artefact.archived?
end
def becoming_nonindexed_kind?(artefact)
old_kind = artefact.kind_was
new_kind = artefact.kind
not_a_new_record = ! old_kind.nil?
not_a_new_record &&
(! RummageableArtefact::FORMATS_NOT_TO_INDEX.include?(old_kind)) &&
RummageableArtefact::FORMATS_NOT_TO_INDEX.include?(new_kind)
end
end
| Delete artefacts when changed to nonindexed kinds | Delete artefacts when changed to nonindexed kinds
We have had some artefacts created (incorrectly) as Answers when they should
have been CompletedTransactions. These were not removed automatically. This
change would ensure that they are in future.
| Ruby | mit | alphagov/panopticon,theodi/panopticon,alphagov/panopticon,theodi/panopticon,theodi/panopticon,alphagov/panopticon,alphagov/panopticon,theodi/panopticon | ruby | ## Code Before:
class UpdateSearchObserver < Mongoid::Observer
observe :artefact
def after_save(artefact)
rummageable_artefact = RummageableArtefact.new(artefact)
rummageable_artefact.submit if rummageable_artefact.should_be_indexed?
# Relying on current behaviour where this does not raise errors
# if done more than once, or done on artefacts never put live
rummageable_artefact.delete if artefact.archived?
end
end
## Instruction:
Delete artefacts when changed to nonindexed kinds
We have had some artefacts created (incorrectly) as Answers when they should
have been CompletedTransactions. These were not removed automatically. This
change would ensure that they are in future.
## Code After:
class UpdateSearchObserver < Mongoid::Observer
observe :artefact
def after_save(artefact)
rummageable_artefact = RummageableArtefact.new(artefact)
rummageable_artefact.submit if rummageable_artefact.should_be_indexed?
if artefact.live? && becoming_nonindexed_kind?(artefact)
rummageable_artefact.delete
end
# Relying on current behaviour where this does not raise errors
# if done more than once, or done on artefacts never put live
rummageable_artefact.delete if artefact.archived?
end
def becoming_nonindexed_kind?(artefact)
old_kind = artefact.kind_was
new_kind = artefact.kind
not_a_new_record = ! old_kind.nil?
not_a_new_record &&
(! RummageableArtefact::FORMATS_NOT_TO_INDEX.include?(old_kind)) &&
RummageableArtefact::FORMATS_NOT_TO_INDEX.include?(new_kind)
end
end
| class UpdateSearchObserver < Mongoid::Observer
observe :artefact
def after_save(artefact)
rummageable_artefact = RummageableArtefact.new(artefact)
+
rummageable_artefact.submit if rummageable_artefact.should_be_indexed?
+
+ if artefact.live? && becoming_nonindexed_kind?(artefact)
+ rummageable_artefact.delete
+ end
+
# Relying on current behaviour where this does not raise errors
# if done more than once, or done on artefacts never put live
rummageable_artefact.delete if artefact.archived?
end
+
+ def becoming_nonindexed_kind?(artefact)
+ old_kind = artefact.kind_was
+ new_kind = artefact.kind
+
+ not_a_new_record = ! old_kind.nil?
+ not_a_new_record &&
+ (! RummageableArtefact::FORMATS_NOT_TO_INDEX.include?(old_kind)) &&
+ RummageableArtefact::FORMATS_NOT_TO_INDEX.include?(new_kind)
+ end
end | 16 | 1.454545 | 16 | 0 |
a3d6f2d8a3032452177501d4e5524b768e5256c7 | lib/ext/sawyer/relation.rb | lib/ext/sawyer/relation.rb | require 'sawyer'
module Patch
def href(options=nil)
# see: octokit/octokit.rb#727
name.to_s == "ssh" ? @href : super
end
end
Sawyer::Relation.send(:prepend, Patch)
| require 'sawyer'
module Patch
def href(options=nil)
# Temporary workaround for: https://github.com/octokit/octokit.rb/issues/727
name.to_s == "ssh" ? @href : super
end
end
Sawyer::Relation.send(:prepend, Patch)
| Add proper link to issue | Add proper link to issue
| Ruby | mit | benbalter/octokit.rb,octokit/octokit.rb,pvdb/octokit.rb,LizzHale/octokit.rb,octokit/octokit.rb,JuanitoFatas/octokit.rb,pvdb/octokit.rb,JuanitoFatas/octokit.rb,benbalter/octokit.rb,LizzHale/octokit.rb | ruby | ## Code Before:
require 'sawyer'
module Patch
def href(options=nil)
# see: octokit/octokit.rb#727
name.to_s == "ssh" ? @href : super
end
end
Sawyer::Relation.send(:prepend, Patch)
## Instruction:
Add proper link to issue
## Code After:
require 'sawyer'
module Patch
def href(options=nil)
# Temporary workaround for: https://github.com/octokit/octokit.rb/issues/727
name.to_s == "ssh" ? @href : super
end
end
Sawyer::Relation.send(:prepend, Patch)
| require 'sawyer'
module Patch
def href(options=nil)
- # see: octokit/octokit.rb#727
+ # Temporary workaround for: https://github.com/octokit/octokit.rb/issues/727
name.to_s == "ssh" ? @href : super
end
end
Sawyer::Relation.send(:prepend, Patch)
| 2 | 0.181818 | 1 | 1 |
6eca5fdd20323c6e2edfede29b2e947b1917dcfb | lib/dizby/distributed/array.rb | lib/dizby/distributed/array.rb |
require 'dizby/distributed/object'
require 'dizby/distributed/undumpable'
module Dizby
class DistributedArray
def initialize(ary, server)
@ary =
ary.map do |obj|
if obj.is_a? UndumpableObject
DistributedObject.new(obj, server)
else
self.class.distribute_if_necessary(obj)
end
end
end
def self.distribute_if_necessary(obj)
Marshal.dump(obj)
rescue
DistributedObject.new(obj, server)
else
obj
end
def self._load(str)
Marshal.load(str)
end
def _dump(_)
Marshal.dump(@ary)
end
end
end
|
require 'dizby/distributed/object'
require 'dizby/distributed/undumpable'
module Dizby
class DistributedArray
def initialize(ary, server)
@ary = ary.map { |obj| self.class.distribute_if_necessary(obj, server) }
end
def self.distribute_if_necessary(obj, server)
Marshal.dump(obj)
rescue
server.make_distributed(obj, false)
else
obj
end
private_class_method :distribute_if_necessary
def self._load(str)
Marshal.load(str)
end
def _dump(_)
Marshal.dump(@ary)
end
end
end
| Fix a runtime error and simplify logic in DistributedArray | Fix a runtime error and simplify logic in DistributedArray
| Ruby | mpl-2.0 | rideliner/dizby | ruby | ## Code Before:
require 'dizby/distributed/object'
require 'dizby/distributed/undumpable'
module Dizby
class DistributedArray
def initialize(ary, server)
@ary =
ary.map do |obj|
if obj.is_a? UndumpableObject
DistributedObject.new(obj, server)
else
self.class.distribute_if_necessary(obj)
end
end
end
def self.distribute_if_necessary(obj)
Marshal.dump(obj)
rescue
DistributedObject.new(obj, server)
else
obj
end
def self._load(str)
Marshal.load(str)
end
def _dump(_)
Marshal.dump(@ary)
end
end
end
## Instruction:
Fix a runtime error and simplify logic in DistributedArray
## Code After:
require 'dizby/distributed/object'
require 'dizby/distributed/undumpable'
module Dizby
class DistributedArray
def initialize(ary, server)
@ary = ary.map { |obj| self.class.distribute_if_necessary(obj, server) }
end
def self.distribute_if_necessary(obj, server)
Marshal.dump(obj)
rescue
server.make_distributed(obj, false)
else
obj
end
private_class_method :distribute_if_necessary
def self._load(str)
Marshal.load(str)
end
def _dump(_)
Marshal.dump(@ary)
end
end
end
|
require 'dizby/distributed/object'
require 'dizby/distributed/undumpable'
module Dizby
class DistributedArray
def initialize(ary, server)
- @ary =
- ary.map do |obj|
- if obj.is_a? UndumpableObject
- DistributedObject.new(obj, server)
- else
- self.class.distribute_if_necessary(obj)
? ^
+ @ary = ary.map { |obj| self.class.distribute_if_necessary(obj, server) }
? ++++ + +++++++ + ^^^^^ ++++++++ ++
- end
- end
end
- def self.distribute_if_necessary(obj)
+ def self.distribute_if_necessary(obj, server)
? ++++++++
Marshal.dump(obj)
rescue
- DistributedObject.new(obj, server)
+ server.make_distributed(obj, false)
else
obj
end
+ private_class_method :distribute_if_necessary
def self._load(str)
Marshal.load(str)
end
def _dump(_)
Marshal.dump(@ary)
end
end
end | 14 | 0.411765 | 4 | 10 |
250101a2c7c566f1921ff7f06bef85ca90a1c2f8 | src/braid/search/ui/search_page_styles.cljs | src/braid/search/ui/search_page_styles.cljs | (ns braid.search.ui.search-page-styles
(:require
[garden.units :refer [rem em px]]
[braid.core.client.ui.styles.mixins :as mixins]
[braid.core.client.ui.styles.vars :as vars]))
(def avatar-size (rem 4))
(def card-style
[:>.card
{:margin-bottom "50%"
:max-width (rem 25)}
[:>.header
{:overflow "auto"}
[:>.pill.off
:>.pill.on
{:color [["white" "!important"]]}]
[:>.status
{:display "inline-block"
:margin-left (em 0.5)}
(mixins/mini-text)]
[:>.badges
{:display "inline-block"
:margin [[0 (em 0.5)]]}
[:>.admin::before
{:display "inline-block"
:-webkit-font-smoothing "antialiased"}
(mixins/fontawesome \uf0e3)]]
[:>img.avatar
{:margin-top (px 2)
:border-radius (px 3)
:width avatar-size
:height avatar-size
:background "white"
:float "left"}]]
[:>.local-time
[:&::after
(mixins/fontawesome \uf017)
{:margin-left (em 0.25)}]]])
(def >search-page
[:>.page.search
[:>.threads
card-style]
[:>.content
card-style]])
| (ns braid.search.ui.search-page-styles)
(def >search-page
[:>.page.search])
| Remove unnecessary search page styles | Remove unnecessary search page styles
| Clojure | agpl-3.0 | braidchat/braid,rafd/braid,rafd/braid,braidchat/braid | clojure | ## Code Before:
(ns braid.search.ui.search-page-styles
(:require
[garden.units :refer [rem em px]]
[braid.core.client.ui.styles.mixins :as mixins]
[braid.core.client.ui.styles.vars :as vars]))
(def avatar-size (rem 4))
(def card-style
[:>.card
{:margin-bottom "50%"
:max-width (rem 25)}
[:>.header
{:overflow "auto"}
[:>.pill.off
:>.pill.on
{:color [["white" "!important"]]}]
[:>.status
{:display "inline-block"
:margin-left (em 0.5)}
(mixins/mini-text)]
[:>.badges
{:display "inline-block"
:margin [[0 (em 0.5)]]}
[:>.admin::before
{:display "inline-block"
:-webkit-font-smoothing "antialiased"}
(mixins/fontawesome \uf0e3)]]
[:>img.avatar
{:margin-top (px 2)
:border-radius (px 3)
:width avatar-size
:height avatar-size
:background "white"
:float "left"}]]
[:>.local-time
[:&::after
(mixins/fontawesome \uf017)
{:margin-left (em 0.25)}]]])
(def >search-page
[:>.page.search
[:>.threads
card-style]
[:>.content
card-style]])
## Instruction:
Remove unnecessary search page styles
## Code After:
(ns braid.search.ui.search-page-styles)
(def >search-page
[:>.page.search])
| - (ns braid.search.ui.search-page-styles
+ (ns braid.search.ui.search-page-styles)
? +
- (:require
- [garden.units :refer [rem em px]]
- [braid.core.client.ui.styles.mixins :as mixins]
- [braid.core.client.ui.styles.vars :as vars]))
-
- (def avatar-size (rem 4))
-
- (def card-style
- [:>.card
- {:margin-bottom "50%"
- :max-width (rem 25)}
-
- [:>.header
- {:overflow "auto"}
-
- [:>.pill.off
- :>.pill.on
- {:color [["white" "!important"]]}]
-
- [:>.status
- {:display "inline-block"
- :margin-left (em 0.5)}
- (mixins/mini-text)]
-
- [:>.badges
- {:display "inline-block"
- :margin [[0 (em 0.5)]]}
-
- [:>.admin::before
- {:display "inline-block"
- :-webkit-font-smoothing "antialiased"}
- (mixins/fontawesome \uf0e3)]]
-
- [:>img.avatar
- {:margin-top (px 2)
- :border-radius (px 3)
- :width avatar-size
- :height avatar-size
- :background "white"
- :float "left"}]]
-
- [:>.local-time
-
- [:&::after
- (mixins/fontawesome \uf017)
- {:margin-left (em 0.25)}]]])
(def >search-page
- [:>.page.search
+ [:>.page.search])
? ++
-
- [:>.threads
- card-style]
-
- [:>.content
- card-style]]) | 56 | 1 | 2 | 54 |
40a6f113dab6c3ce63acd776b6c891a1678342ac | lib/accounts-config.js | lib/accounts-config.js | orion.users.configure({
// Allow users to create accounts, but don't give them any admin permissions
forbidClientAccountCreation: false,
defaultPermissions: ['files.upload', 'entity.dreams.personal', 'entity.recipes.personal', 'entity.activities.personal'],
// Set home route path to root (user is redirected here after sign out)
homeRoutePath: '/'
})
// Configure sign in route to use default sign in template (without this, the Orion Admin login template would be used)
AccountsTemplates.configureRoute('signIn');
| orion.users.configure({
// Allow users to create accounts, but don't give them any admin permissions
forbidClientAccountCreation: false,
defaultPermissions: ['files.upload', 'entity.dreams.personal', 'entity.recipes.personal', 'entity.activities.personal'],
// Set home route path to root (user is redirected here after sign out)
homeRoutePath: '/'
})
// Configure sign in route to use default sign in template (without this, the Orion Admin login template would be used)
AccountsTemplates.configureRoute('signIn', {
// Unfortunately, AccountsTemplates routes are not added to the Router object before our manual routes in router.js, so we have to use a path that does not conflict with the slug routing for content pages defined there
path: '/users/accounts/sign-in'
});
| Fix routing conflict between sign-in and slug-based content pages | Fix routing conflict between sign-in and slug-based content pages
| JavaScript | apache-2.0 | GeoMakers/geomakers.org,GeoMakers/geomakers.org | javascript | ## Code Before:
orion.users.configure({
// Allow users to create accounts, but don't give them any admin permissions
forbidClientAccountCreation: false,
defaultPermissions: ['files.upload', 'entity.dreams.personal', 'entity.recipes.personal', 'entity.activities.personal'],
// Set home route path to root (user is redirected here after sign out)
homeRoutePath: '/'
})
// Configure sign in route to use default sign in template (without this, the Orion Admin login template would be used)
AccountsTemplates.configureRoute('signIn');
## Instruction:
Fix routing conflict between sign-in and slug-based content pages
## Code After:
orion.users.configure({
// Allow users to create accounts, but don't give them any admin permissions
forbidClientAccountCreation: false,
defaultPermissions: ['files.upload', 'entity.dreams.personal', 'entity.recipes.personal', 'entity.activities.personal'],
// Set home route path to root (user is redirected here after sign out)
homeRoutePath: '/'
})
// Configure sign in route to use default sign in template (without this, the Orion Admin login template would be used)
AccountsTemplates.configureRoute('signIn', {
// Unfortunately, AccountsTemplates routes are not added to the Router object before our manual routes in router.js, so we have to use a path that does not conflict with the slug routing for content pages defined there
path: '/users/accounts/sign-in'
});
| orion.users.configure({
// Allow users to create accounts, but don't give them any admin permissions
forbidClientAccountCreation: false,
defaultPermissions: ['files.upload', 'entity.dreams.personal', 'entity.recipes.personal', 'entity.activities.personal'],
// Set home route path to root (user is redirected here after sign out)
homeRoutePath: '/'
})
// Configure sign in route to use default sign in template (without this, the Orion Admin login template would be used)
- AccountsTemplates.configureRoute('signIn');
? ^^
+ AccountsTemplates.configureRoute('signIn', {
? ^^^
+
+ // Unfortunately, AccountsTemplates routes are not added to the Router object before our manual routes in router.js, so we have to use a path that does not conflict with the slug routing for content pages defined there
+ path: '/users/accounts/sign-in'
+ }); | 6 | 0.545455 | 5 | 1 |
b5fd5ac764fc7a17b892d57c468ba9e30f0566ce | package.json | package.json | {
"name": "pension_guidance",
"dependencies": {
"bower": "*",
"travis-notify-geckoboard": "*"
},
"scripts": {
"postinstall": "./node_modules/bower/bin/bower install"
}
}
| {
"name": "pension_guidance",
"dependencies": {
"bower": "*",
"travis-notify-geckoboard": "git://github.com/guidance-guarantee-programme/travis-notify-geckoboard#item-must-be-array"
},
"scripts": {
"postinstall": "./node_modules/bower/bin/bower install"
}
}
| Use our fork of travis-notify-geckoboard | Use our fork of travis-notify-geckoboard
This currently broken but we've a pull request open to fix it:
https://github.com/contentful/travis-notify-geckoboard/pull/1
| JSON | mit | guidance-guarantee-programme/pension_guidance,guidance-guarantee-programme/pension_guidance,guidance-guarantee-programme/pension_guidance | json | ## Code Before:
{
"name": "pension_guidance",
"dependencies": {
"bower": "*",
"travis-notify-geckoboard": "*"
},
"scripts": {
"postinstall": "./node_modules/bower/bin/bower install"
}
}
## Instruction:
Use our fork of travis-notify-geckoboard
This currently broken but we've a pull request open to fix it:
https://github.com/contentful/travis-notify-geckoboard/pull/1
## Code After:
{
"name": "pension_guidance",
"dependencies": {
"bower": "*",
"travis-notify-geckoboard": "git://github.com/guidance-guarantee-programme/travis-notify-geckoboard#item-must-be-array"
},
"scripts": {
"postinstall": "./node_modules/bower/bin/bower install"
}
}
| {
"name": "pension_guidance",
"dependencies": {
"bower": "*",
- "travis-notify-geckoboard": "*"
+ "travis-notify-geckoboard": "git://github.com/guidance-guarantee-programme/travis-notify-geckoboard#item-must-be-array"
},
"scripts": {
"postinstall": "./node_modules/bower/bin/bower install"
}
} | 2 | 0.2 | 1 | 1 |
76201053f41651f4ac290567f3d20a2170260b06 | docs/adr/adr-logging-support.md | docs/adr/adr-logging-support.md |
The library move away from the idea to use `System.out.println("Your message")`
because it is a bad practice. You can search on the network many articles about
this fact.
The library follow some Best practices in the software industry and use
the dependency SLF4J (Simple Logging Facade for Java).
The Simple Logging Facade for Java (SLF4J) serves as
a simple facade or abstraction for various logging frameworks
(e.g. java.util.logging, logback, log4j) allowing
the end user to plug in the desired logging framework at deployment time.
The library uses SLF4J in the whole project and later the user has to
choose the final implementation. In the example, the development use
Logback but any user could use any logging framework.
For testing purposes, enable traces is a good practice but for
production projects try to disable some levels.
Generate many traces in your logs impact in your performance.
## Links
https://javarevisited.blogspot.com/2016/06/why-use-log4j-logging-vs.html
https://www.slf4j.org/
https://logback.qos.ch/ |
The library move away from the idea to use `System.out.println("Your message")`
because using `stdout` is not the same as using `stderr` or a logging interface.
The reason you shouldn't use `System.out` is that we depend on output redirection
provided by ev3dev's brickrun. `stderr` is redirected via SSH to the user
when running remotely. If you use `System.out`, the message will be
displayed on the brick display only. On the other hand, this might
be intentional, in which case you should use `System.out`.
The library follow some Best practices in the software industry and use
the dependency SLF4J (Simple Logging Facade for Java).
The Simple Logging Facade for Java (SLF4J) serves as
a simple facade or abstraction for various logging frameworks
(e.g. java.util.logging, logback, log4j) allowing
the end user to plug in the desired logging framework at deployment time.
The library uses SLF4J in the whole project and later the user has to
choose the final implementation. In the example, the development use
Logback but any user could use any logging framework.
For testing purposes, enable traces is a good practice but for
production projects try to disable some levels.
Generate many traces in your logs impact in your performance.
## Links
https://javarevisited.blogspot.com/2016/06/why-use-log4j-logging-vs.html
https://www.slf4j.org/
https://logback.qos.ch/
| Add better explanation to Logging ADR | Add better explanation to Logging ADR
| Markdown | mit | jabrena/ev3dev-lang-java,ev3dev-lang-java/ev3dev-lang-java | markdown | ## Code Before:
The library move away from the idea to use `System.out.println("Your message")`
because it is a bad practice. You can search on the network many articles about
this fact.
The library follow some Best practices in the software industry and use
the dependency SLF4J (Simple Logging Facade for Java).
The Simple Logging Facade for Java (SLF4J) serves as
a simple facade or abstraction for various logging frameworks
(e.g. java.util.logging, logback, log4j) allowing
the end user to plug in the desired logging framework at deployment time.
The library uses SLF4J in the whole project and later the user has to
choose the final implementation. In the example, the development use
Logback but any user could use any logging framework.
For testing purposes, enable traces is a good practice but for
production projects try to disable some levels.
Generate many traces in your logs impact in your performance.
## Links
https://javarevisited.blogspot.com/2016/06/why-use-log4j-logging-vs.html
https://www.slf4j.org/
https://logback.qos.ch/
## Instruction:
Add better explanation to Logging ADR
## Code After:
The library move away from the idea to use `System.out.println("Your message")`
because using `stdout` is not the same as using `stderr` or a logging interface.
The reason you shouldn't use `System.out` is that we depend on output redirection
provided by ev3dev's brickrun. `stderr` is redirected via SSH to the user
when running remotely. If you use `System.out`, the message will be
displayed on the brick display only. On the other hand, this might
be intentional, in which case you should use `System.out`.
The library follow some Best practices in the software industry and use
the dependency SLF4J (Simple Logging Facade for Java).
The Simple Logging Facade for Java (SLF4J) serves as
a simple facade or abstraction for various logging frameworks
(e.g. java.util.logging, logback, log4j) allowing
the end user to plug in the desired logging framework at deployment time.
The library uses SLF4J in the whole project and later the user has to
choose the final implementation. In the example, the development use
Logback but any user could use any logging framework.
For testing purposes, enable traces is a good practice but for
production projects try to disable some levels.
Generate many traces in your logs impact in your performance.
## Links
https://javarevisited.blogspot.com/2016/06/why-use-log4j-logging-vs.html
https://www.slf4j.org/
https://logback.qos.ch/
|
The library move away from the idea to use `System.out.println("Your message")`
- because it is a bad practice. You can search on the network many articles about
- this fact.
+ because using `stdout` is not the same as using `stderr` or a logging interface.
+
+ The reason you shouldn't use `System.out` is that we depend on output redirection
+ provided by ev3dev's brickrun. `stderr` is redirected via SSH to the user
+ when running remotely. If you use `System.out`, the message will be
+ displayed on the brick display only. On the other hand, this might
+ be intentional, in which case you should use `System.out`.
The library follow some Best practices in the software industry and use
the dependency SLF4J (Simple Logging Facade for Java).
- The Simple Logging Facade for Java (SLF4J) serves as
? -
+ The Simple Logging Facade for Java (SLF4J) serves as
- a simple facade or abstraction for various logging frameworks
? -
+ a simple facade or abstraction for various logging frameworks
- (e.g. java.util.logging, logback, log4j) allowing
? -
+ (e.g. java.util.logging, logback, log4j) allowing
- the end user to plug in the desired logging framework at deployment time.
? -
+ the end user to plug in the desired logging framework at deployment time.
The library uses SLF4J in the whole project and later the user has to
- choose the final implementation. In the example, the development use
? -
+ choose the final implementation. In the example, the development use
Logback but any user could use any logging framework.
- For testing purposes, enable traces is a good practice but for
? -
+ For testing purposes, enable traces is a good practice but for
- production projects try to disable some levels.
? -
+ production projects try to disable some levels.
Generate many traces in your logs impact in your performance.
## Links
https://javarevisited.blogspot.com/2016/06/why-use-log4j-logging-vs.html
https://www.slf4j.org/
https://logback.qos.ch/ | 23 | 0.884615 | 14 | 9 |
3909ff306bd2949fbda42e7a1933123e9e62eba3 | routes/api.js | routes/api.js | var Weather = require('../modules/openweathermap');
var express = require('express');
var util = require('util');
var router = express.Router();
//new Weather API using the key defaulted from the environment
var weather = new Weather();
/* GET home page. */
router.post('/weather', function(req, res, next) {
var zip = req.body.text;
var fahrenheit = !req.body.command.includes('c');
if (!zip || !isValidUSZip(zip.trim())) {
res.json({
text: "I'm sorry I didn't understand. Please use a US zip"
});
}
weather.getWeatherByZip(zip.trim(), function handleResponse(err, result) {
if (err) {
res.json({
text: "I'm sorry I couldn't process your request. Please try again later"
});
} else {
res.json({
response_type: "in_channel",
text: util.format('Here\'s the weather in %s\n%s°%s %s',
result.city,
fahrenheit ? result.tempF.toFixed(1) : result.tempC.toFixed(1),
fahrenheit ? 'F' : 'C',
result.description.join(', '))
});
}
});
});
function isValidUSZip(zip) {
return /^\d{5}(-\d{4})?$/.test(zip);
}
module.exports = router; | var Weather = require('../modules/openweathermap');
var express = require('express');
var util = require('util');
var router = express.Router();
//new Weather API using the key defaulted from the environment
var weather = new Weather();
/* GET home page. */
router.post('/weather', function(req, res, next) {
var zip = req.body.text;
var fahrenheit = req.body.command.indexOf('c') == -1;
if (!zip || !isValidUSZip(zip.trim())) {
res.json({
text: "I'm sorry I didn't understand. Please use a US zip"
});
}
weather.getWeatherByZip(zip.trim(), function handleResponse(err, result) {
if (err) {
res.json({
text: "I'm sorry I couldn't process your request. Please try again later"
});
} else {
res.json({
response_type: "in_channel",
text: util.format('Here\'s the weather in %s\n%s°%s %s',
result.city,
fahrenheit ? result.tempF.toFixed(1) : result.tempC.toFixed(1),
fahrenheit ? 'F' : 'C',
result.description.join(', '))
});
}
});
});
function isValidUSZip(zip) {
return /^\d{5}(-\d{4})?$/.test(zip);
}
module.exports = router;
| Fix to use indexOf whihc is more commonly available than includes | Fix to use indexOf whihc is more commonly available than includes
| JavaScript | mit | tschaible/slack-weather,tschaible/slack-weather | javascript | ## Code Before:
var Weather = require('../modules/openweathermap');
var express = require('express');
var util = require('util');
var router = express.Router();
//new Weather API using the key defaulted from the environment
var weather = new Weather();
/* GET home page. */
router.post('/weather', function(req, res, next) {
var zip = req.body.text;
var fahrenheit = !req.body.command.includes('c');
if (!zip || !isValidUSZip(zip.trim())) {
res.json({
text: "I'm sorry I didn't understand. Please use a US zip"
});
}
weather.getWeatherByZip(zip.trim(), function handleResponse(err, result) {
if (err) {
res.json({
text: "I'm sorry I couldn't process your request. Please try again later"
});
} else {
res.json({
response_type: "in_channel",
text: util.format('Here\'s the weather in %s\n%s°%s %s',
result.city,
fahrenheit ? result.tempF.toFixed(1) : result.tempC.toFixed(1),
fahrenheit ? 'F' : 'C',
result.description.join(', '))
});
}
});
});
function isValidUSZip(zip) {
return /^\d{5}(-\d{4})?$/.test(zip);
}
module.exports = router;
## Instruction:
Fix to use indexOf whihc is more commonly available than includes
## Code After:
var Weather = require('../modules/openweathermap');
var express = require('express');
var util = require('util');
var router = express.Router();
//new Weather API using the key defaulted from the environment
var weather = new Weather();
/* GET home page. */
router.post('/weather', function(req, res, next) {
var zip = req.body.text;
var fahrenheit = req.body.command.indexOf('c') == -1;
if (!zip || !isValidUSZip(zip.trim())) {
res.json({
text: "I'm sorry I didn't understand. Please use a US zip"
});
}
weather.getWeatherByZip(zip.trim(), function handleResponse(err, result) {
if (err) {
res.json({
text: "I'm sorry I couldn't process your request. Please try again later"
});
} else {
res.json({
response_type: "in_channel",
text: util.format('Here\'s the weather in %s\n%s°%s %s',
result.city,
fahrenheit ? result.tempF.toFixed(1) : result.tempC.toFixed(1),
fahrenheit ? 'F' : 'C',
result.description.join(', '))
});
}
});
});
function isValidUSZip(zip) {
return /^\d{5}(-\d{4})?$/.test(zip);
}
module.exports = router;
| var Weather = require('../modules/openweathermap');
var express = require('express');
var util = require('util');
var router = express.Router();
//new Weather API using the key defaulted from the environment
var weather = new Weather();
/* GET home page. */
router.post('/weather', function(req, res, next) {
var zip = req.body.text;
- var fahrenheit = !req.body.command.includes('c');
? - --- ^
+ var fahrenheit = req.body.command.indexOf('c') == -1;
? ^^^ ++++++
if (!zip || !isValidUSZip(zip.trim())) {
res.json({
text: "I'm sorry I didn't understand. Please use a US zip"
});
}
weather.getWeatherByZip(zip.trim(), function handleResponse(err, result) {
if (err) {
res.json({
text: "I'm sorry I couldn't process your request. Please try again later"
});
} else {
res.json({
response_type: "in_channel",
text: util.format('Here\'s the weather in %s\n%s°%s %s',
result.city,
fahrenheit ? result.tempF.toFixed(1) : result.tempC.toFixed(1),
fahrenheit ? 'F' : 'C',
result.description.join(', '))
});
}
});
});
function isValidUSZip(zip) {
return /^\d{5}(-\d{4})?$/.test(zip);
}
module.exports = router; | 2 | 0.047619 | 1 | 1 |
1c0ea1a102ed91342ce0d609733426b8a07cd67d | easy_thumbnails/tests/apps.py | easy_thumbnails/tests/apps.py | from django.apps import AppConfig
class EasyThumbnailsTestConfig(AppConfig):
name = 'easy_thumbnails.tests'
label = 'easy_thumbnails_tests'
| try:
from django.apps import AppConfig
except ImportError:
# Early Django versions import everything in test, avoid the failure due to
# AppConfig only existing in 1.7+
AppConfig = object
class EasyThumbnailsTestConfig(AppConfig):
name = 'easy_thumbnails.tests'
label = 'easy_thumbnails_tests'
| Fix an import error for old django versions | Fix an import error for old django versions
Fixes #371
| Python | bsd-3-clause | SmileyChris/easy-thumbnails | python | ## Code Before:
from django.apps import AppConfig
class EasyThumbnailsTestConfig(AppConfig):
name = 'easy_thumbnails.tests'
label = 'easy_thumbnails_tests'
## Instruction:
Fix an import error for old django versions
Fixes #371
## Code After:
try:
from django.apps import AppConfig
except ImportError:
# Early Django versions import everything in test, avoid the failure due to
# AppConfig only existing in 1.7+
AppConfig = object
class EasyThumbnailsTestConfig(AppConfig):
name = 'easy_thumbnails.tests'
label = 'easy_thumbnails_tests'
| + try:
- from django.apps import AppConfig
+ from django.apps import AppConfig
? ++++
+ except ImportError:
+ # Early Django versions import everything in test, avoid the failure due to
+ # AppConfig only existing in 1.7+
+ AppConfig = object
class EasyThumbnailsTestConfig(AppConfig):
name = 'easy_thumbnails.tests'
label = 'easy_thumbnails_tests' | 7 | 1.166667 | 6 | 1 |
63b38bdb6b304d495ed17ae2defa1f7512491c5c | aritgeo/src/aritgeo.js | aritgeo/src/aritgeo.js | 'use strict'
module.exports = {
aritGeo: function(numlist) {
if (!Array.isArray(numlist)) {
return null;
}
if (numlist.length === 0) {
return 0;
}
if (numlist.length === 1 || numlist.length === 2) {
return -1;
}
}
} | 'use strict'
module.exports = {
aritGeo: function(numlist) {
if (!Array.isArray(numlist)) {
return null;
}
if (numlist.length === 0) {
return 0;
}
if (numlist.length === 1 || numlist.length === 2) {
return -1;
}
if (module.compute.isArithmetic(numlist.slice(1), numlist[1] - numlist[0])) {
return 'Arithmetic';
}
}
}
module.compute = {
isArithmetic: function(numlist, diff) {
if (numlist[1] - numlist[0] === diff) {
if (numlist.length === 2) {
return true;
} else return true && this.isArithmetic(numlist.slice(1), numlist[1] - numlist[0]);
} else return false;
}
} | Implement case for arithmetic sequences | Implement case for arithmetic sequences
| JavaScript | mit | princess-essien/andela-bootcamp-slc | javascript | ## Code Before:
'use strict'
module.exports = {
aritGeo: function(numlist) {
if (!Array.isArray(numlist)) {
return null;
}
if (numlist.length === 0) {
return 0;
}
if (numlist.length === 1 || numlist.length === 2) {
return -1;
}
}
}
## Instruction:
Implement case for arithmetic sequences
## Code After:
'use strict'
module.exports = {
aritGeo: function(numlist) {
if (!Array.isArray(numlist)) {
return null;
}
if (numlist.length === 0) {
return 0;
}
if (numlist.length === 1 || numlist.length === 2) {
return -1;
}
if (module.compute.isArithmetic(numlist.slice(1), numlist[1] - numlist[0])) {
return 'Arithmetic';
}
}
}
module.compute = {
isArithmetic: function(numlist, diff) {
if (numlist[1] - numlist[0] === diff) {
if (numlist.length === 2) {
return true;
} else return true && this.isArithmetic(numlist.slice(1), numlist[1] - numlist[0]);
} else return false;
}
} | 'use strict'
module.exports = {
aritGeo: function(numlist) {
if (!Array.isArray(numlist)) {
return null;
}
if (numlist.length === 0) {
return 0;
}
if (numlist.length === 1 || numlist.length === 2) {
return -1;
}
+
+ if (module.compute.isArithmetic(numlist.slice(1), numlist[1] - numlist[0])) {
+ return 'Arithmetic';
+ }
}
}
+
+ module.compute = {
+ isArithmetic: function(numlist, diff) {
+ if (numlist[1] - numlist[0] === diff) {
+ if (numlist.length === 2) {
+ return true;
+ } else return true && this.isArithmetic(numlist.slice(1), numlist[1] - numlist[0]);
+ } else return false;
+ }
+ } | 14 | 0.823529 | 14 | 0 |
36bce08da51e86dd8a6fb9860ae6c618121d40e3 | inkweaver/app/login/login.component.ts | inkweaver/app/login/login.component.ts | import { Component, OnInit } from '@angular/core';
import { Router } from '@angular/router';
import { LoginService } from './login.service';
import { UserService } from '../user/user.service';
import { ApiService } from '../shared/api.service';
import { ParserService } from '../shared/parser.service';
@Component({
selector: 'login',
templateUrl: './app/login/login.component.html'
})
export class LoginComponent {
private data: any;
private login: any;
constructor(
private router: Router,
private loginService: LoginService,
private userService: UserService,
private apiService: ApiService,
private parserService: ParserService) { }
ngOnInit() {
this.data = this.apiService.data;
this.data.menuItems = [
{ label: 'About', routerLink: ['/about'] }
];
this.login = {
username: '',
password: ''
};
}
public signIn() {
this.loginService.login(this.login.username, this.login.password)
.subscribe(response => {
let cookie: string = response.headers.get('Set-Cookie');
console.log(cookie);
document.cookie = cookie;
this.apiService.connect();
this.userService.getUserPreferences();
this.userService.getUserStories();
this.userService.getUserWikis();
this.router.navigate(['/user']);
});
return false;
}
}
| import { Component, OnInit } from '@angular/core';
import { Router } from '@angular/router';
import { LoginService } from './login.service';
import { UserService } from '../user/user.service';
import { ApiService } from '../shared/api.service';
import { ParserService } from '../shared/parser.service';
@Component({
selector: 'login',
templateUrl: './app/login/login.component.html'
})
export class LoginComponent {
private data: any;
private login: any;
constructor(
private router: Router,
private loginService: LoginService,
private userService: UserService,
private apiService: ApiService,
private parserService: ParserService) { }
ngOnInit() {
this.data = this.apiService.data;
this.data.menuItems = [
{ label: 'About', routerLink: ['/about'] }
];
this.login = {
username: '',
password: ''
};
}
public signIn() {
this.loginService.login(this.login.username, this.login.password)
.subscribe(response => {
console.log(document.cookie);
this.apiService.connect();
this.userService.getUserPreferences();
this.userService.getUserStories();
this.userService.getUserWikis();
this.router.navigate(['/user']);
});
return false;
}
}
| Document cookie hopefully not null | Document cookie hopefully not null
| TypeScript | mpl-2.0 | Plotypus/InkWeaver-Front,Plotypus/InkWeaver-Front,Plotypus/InkWeaver-Front,Plotypus/InkWeaver-Front | typescript | ## Code Before:
import { Component, OnInit } from '@angular/core';
import { Router } from '@angular/router';
import { LoginService } from './login.service';
import { UserService } from '../user/user.service';
import { ApiService } from '../shared/api.service';
import { ParserService } from '../shared/parser.service';
@Component({
selector: 'login',
templateUrl: './app/login/login.component.html'
})
export class LoginComponent {
private data: any;
private login: any;
constructor(
private router: Router,
private loginService: LoginService,
private userService: UserService,
private apiService: ApiService,
private parserService: ParserService) { }
ngOnInit() {
this.data = this.apiService.data;
this.data.menuItems = [
{ label: 'About', routerLink: ['/about'] }
];
this.login = {
username: '',
password: ''
};
}
public signIn() {
this.loginService.login(this.login.username, this.login.password)
.subscribe(response => {
let cookie: string = response.headers.get('Set-Cookie');
console.log(cookie);
document.cookie = cookie;
this.apiService.connect();
this.userService.getUserPreferences();
this.userService.getUserStories();
this.userService.getUserWikis();
this.router.navigate(['/user']);
});
return false;
}
}
## Instruction:
Document cookie hopefully not null
## Code After:
import { Component, OnInit } from '@angular/core';
import { Router } from '@angular/router';
import { LoginService } from './login.service';
import { UserService } from '../user/user.service';
import { ApiService } from '../shared/api.service';
import { ParserService } from '../shared/parser.service';
@Component({
selector: 'login',
templateUrl: './app/login/login.component.html'
})
export class LoginComponent {
private data: any;
private login: any;
constructor(
private router: Router,
private loginService: LoginService,
private userService: UserService,
private apiService: ApiService,
private parserService: ParserService) { }
ngOnInit() {
this.data = this.apiService.data;
this.data.menuItems = [
{ label: 'About', routerLink: ['/about'] }
];
this.login = {
username: '',
password: ''
};
}
public signIn() {
this.loginService.login(this.login.username, this.login.password)
.subscribe(response => {
console.log(document.cookie);
this.apiService.connect();
this.userService.getUserPreferences();
this.userService.getUserStories();
this.userService.getUserWikis();
this.router.navigate(['/user']);
});
return false;
}
}
| import { Component, OnInit } from '@angular/core';
import { Router } from '@angular/router';
import { LoginService } from './login.service';
import { UserService } from '../user/user.service';
import { ApiService } from '../shared/api.service';
import { ParserService } from '../shared/parser.service';
@Component({
selector: 'login',
templateUrl: './app/login/login.component.html'
})
export class LoginComponent {
private data: any;
private login: any;
constructor(
private router: Router,
private loginService: LoginService,
private userService: UserService,
private apiService: ApiService,
private parserService: ParserService) { }
ngOnInit() {
this.data = this.apiService.data;
this.data.menuItems = [
{ label: 'About', routerLink: ['/about'] }
];
this.login = {
username: '',
password: ''
};
}
public signIn() {
this.loginService.login(this.login.username, this.login.password)
.subscribe(response => {
- let cookie: string = response.headers.get('Set-Cookie');
- console.log(cookie);
+ console.log(document.cookie);
? +++++++++
- document.cookie = cookie;
this.apiService.connect();
this.userService.getUserPreferences();
this.userService.getUserStories();
this.userService.getUserWikis();
this.router.navigate(['/user']);
});
return false;
}
} | 4 | 0.078431 | 1 | 3 |
4c1a62028dc0dddaa47600f4bffe484a76398a6e | setup.sh | setup.sh | set -e
# Initialize project dependency directories and the shrinkwrap file location.
init(){
NODE_DIR=node_modules
SHRINKWRAP=npm-shrinkwrap.json
echo 'npm components directory:' $NODE_DIR
}
# Clean project dependencies.
clean(){
# If the node directory already exist,
# clear them so we know we're working with a clean
# slate of the dependencies listed in package.json.
if [ -d $NODE_DIR ]; then
echo 'Removing project dependency directories...'
rm -rf $NODE_DIR
fi
echo 'Project dependencies have been removed.'
}
# Install project dependencies.
install(){
echo 'Installing project dependencies...'
npm install
npm install --save --save-exact box-sizing-polyfill@0.1.0 jquery.easing@1.3.2 normalize-css@2.3.1 jquery@1.11.3 normalize-legacy-addon@0.1.0
if [ -f $SHRINKWRAP ]; then
echo 'Removing project dependency directories...'
rm -f $SHRINKWRAP
fi
echo 'Shrinkwrapping project dependencies...'
npm shrinkwrap
}
# Run tasks to build the project for distribution.
build(){
echo 'Building project...'
gulp clean
gulp build
}
init
clean
install
build
| set -e
# Initialize project dependency directories and the shrinkwrap file location.
init(){
NODE_DIR=node_modules
SHRINKWRAP=npm-shrinkwrap.json
echo 'npm components directory:' $NODE_DIR
}
# Clean project dependencies.
clean(){
# If the node directory already exist,
# clear them so we know we're working with a clean
# slate of the dependencies listed in package.json.
if [ -d $NODE_DIR ]; then
echo 'Removing project dependency directories...'
rm -rf $NODE_DIR
fi
echo 'Project dependencies have been removed.'
}
# Install project dependencies.
install(){
echo 'Installing project dependencies...'
npm install
if [ -f $SHRINKWRAP ]; then
echo 'Removing project dependency directories...'
rm -f $SHRINKWRAP
fi
echo 'Shrinkwrapping project dependencies...'
npm shrinkwrap
}
# Run tasks to build the project for distribution.
build(){
echo 'Building project...'
gulp clean
gulp build
}
init
clean
install
build
| Remove unneeded NPM install line | Remove unneeded NPM install line
| Shell | cc0-1.0 | mistergone/college-costs,mistergone/college-costs,mistergone/college-costs,mistergone/college-costs | shell | ## Code Before:
set -e
# Initialize project dependency directories and the shrinkwrap file location.
init(){
NODE_DIR=node_modules
SHRINKWRAP=npm-shrinkwrap.json
echo 'npm components directory:' $NODE_DIR
}
# Clean project dependencies.
clean(){
# If the node directory already exist,
# clear them so we know we're working with a clean
# slate of the dependencies listed in package.json.
if [ -d $NODE_DIR ]; then
echo 'Removing project dependency directories...'
rm -rf $NODE_DIR
fi
echo 'Project dependencies have been removed.'
}
# Install project dependencies.
install(){
echo 'Installing project dependencies...'
npm install
npm install --save --save-exact box-sizing-polyfill@0.1.0 jquery.easing@1.3.2 normalize-css@2.3.1 jquery@1.11.3 normalize-legacy-addon@0.1.0
if [ -f $SHRINKWRAP ]; then
echo 'Removing project dependency directories...'
rm -f $SHRINKWRAP
fi
echo 'Shrinkwrapping project dependencies...'
npm shrinkwrap
}
# Run tasks to build the project for distribution.
build(){
echo 'Building project...'
gulp clean
gulp build
}
init
clean
install
build
## Instruction:
Remove unneeded NPM install line
## Code After:
set -e
# Initialize project dependency directories and the shrinkwrap file location.
init(){
NODE_DIR=node_modules
SHRINKWRAP=npm-shrinkwrap.json
echo 'npm components directory:' $NODE_DIR
}
# Clean project dependencies.
clean(){
# If the node directory already exist,
# clear them so we know we're working with a clean
# slate of the dependencies listed in package.json.
if [ -d $NODE_DIR ]; then
echo 'Removing project dependency directories...'
rm -rf $NODE_DIR
fi
echo 'Project dependencies have been removed.'
}
# Install project dependencies.
install(){
echo 'Installing project dependencies...'
npm install
if [ -f $SHRINKWRAP ]; then
echo 'Removing project dependency directories...'
rm -f $SHRINKWRAP
fi
echo 'Shrinkwrapping project dependencies...'
npm shrinkwrap
}
# Run tasks to build the project for distribution.
build(){
echo 'Building project...'
gulp clean
gulp build
}
init
clean
install
build
| set -e
# Initialize project dependency directories and the shrinkwrap file location.
init(){
NODE_DIR=node_modules
SHRINKWRAP=npm-shrinkwrap.json
echo 'npm components directory:' $NODE_DIR
}
# Clean project dependencies.
clean(){
# If the node directory already exist,
# clear them so we know we're working with a clean
# slate of the dependencies listed in package.json.
if [ -d $NODE_DIR ]; then
echo 'Removing project dependency directories...'
rm -rf $NODE_DIR
fi
echo 'Project dependencies have been removed.'
}
# Install project dependencies.
install(){
echo 'Installing project dependencies...'
npm install
- npm install --save --save-exact box-sizing-polyfill@0.1.0 jquery.easing@1.3.2 normalize-css@2.3.1 jquery@1.11.3 normalize-legacy-addon@0.1.0
if [ -f $SHRINKWRAP ]; then
echo 'Removing project dependency directories...'
rm -f $SHRINKWRAP
fi
echo 'Shrinkwrapping project dependencies...'
npm shrinkwrap
}
# Run tasks to build the project for distribution.
build(){
echo 'Building project...'
gulp clean
gulp build
}
init
clean
install
build | 1 | 0.021739 | 0 | 1 |
e845f2dd24c03d11baaed494c49d39abe8d45927 | src/components/video_detail.js | src/components/video_detail.js | import React from 'react';
// import VideoListItem from './video_list_item';
const VideoDetail = ({video}) => {
const videoId = video.id.videoId;
const url = `https://www.youtube.com/embed/${videoId}`;
return (
<div className="video-detail col-md-8">
<div className="embed-responsive embed-responsive-16by9">
<iframe className="embed-responsive-item" src={url}></iframe>
</div>
<div className="details">
<div>{video.snippet.title}</div>
<div>{video.snippet.description}</div>
</div>
</div>
);
};
export default VideoDetail;
| import React from 'react';
// import VideoListItem from './video_list_item';
const VideoDetail = ({video}) => {
if (!video) {
return <div>Loading...</div>
}
const videoId = video.id.videoId;
const url = `https://www.youtube.com/embed/${videoId}`;
return (
<div className="video-detail col-md-8">
<div className="embed-responsive embed-responsive-16by9">
<iframe className="embed-responsive-item" src={url}></iframe>
</div>
<div className="details">
<div>{video.snippet.title}</div>
<div>{video.snippet.description}</div>
</div>
</div>
);
};
export default VideoDetail;
| Add loading when videos are still loading | Add loading when videos are still loading
| JavaScript | mit | izabelka/redux-simple-starter,izabelka/redux-simple-starter | javascript | ## Code Before:
import React from 'react';
// import VideoListItem from './video_list_item';
const VideoDetail = ({video}) => {
const videoId = video.id.videoId;
const url = `https://www.youtube.com/embed/${videoId}`;
return (
<div className="video-detail col-md-8">
<div className="embed-responsive embed-responsive-16by9">
<iframe className="embed-responsive-item" src={url}></iframe>
</div>
<div className="details">
<div>{video.snippet.title}</div>
<div>{video.snippet.description}</div>
</div>
</div>
);
};
export default VideoDetail;
## Instruction:
Add loading when videos are still loading
## Code After:
import React from 'react';
// import VideoListItem from './video_list_item';
const VideoDetail = ({video}) => {
if (!video) {
return <div>Loading...</div>
}
const videoId = video.id.videoId;
const url = `https://www.youtube.com/embed/${videoId}`;
return (
<div className="video-detail col-md-8">
<div className="embed-responsive embed-responsive-16by9">
<iframe className="embed-responsive-item" src={url}></iframe>
</div>
<div className="details">
<div>{video.snippet.title}</div>
<div>{video.snippet.description}</div>
</div>
</div>
);
};
export default VideoDetail;
| import React from 'react';
// import VideoListItem from './video_list_item';
const VideoDetail = ({video}) => {
+ if (!video) {
+ return <div>Loading...</div>
+ }
+
const videoId = video.id.videoId;
const url = `https://www.youtube.com/embed/${videoId}`;
return (
<div className="video-detail col-md-8">
<div className="embed-responsive embed-responsive-16by9">
<iframe className="embed-responsive-item" src={url}></iframe>
</div>
<div className="details">
<div>{video.snippet.title}</div>
<div>{video.snippet.description}</div>
</div>
</div>
);
};
export default VideoDetail; | 4 | 0.2 | 4 | 0 |
b7c967ad0f45cc1144a8713c6513bae5bca89242 | LiSE/LiSE/test_proxy.py | LiSE/LiSE/test_proxy.py | from LiSE.proxy import EngineProcessManager
import allegedb.test
class ProxyTest(allegedb.test.AllegedTest):
def setUp(self):
self.manager = EngineProcessManager()
self.engine = self.manager.start('sqlite:///:memory:')
self.graphmakers = (self.engine.new_character,)
def tearDown(self):
self.manager.shutdown()
class ProxyGraphTest(allegedb.test.AbstractGraphTest, ProxyTest):
pass
class BranchLineageTest(ProxyGraphTest, allegedb.test.AbstractBranchLineageTest):
pass
class DictStorageTest(ProxyTest, allegedb.test.DictStorageTest):
pass
class ListStorageTest(ProxyTest, allegedb.test.ListStorageTest):
pass
class SetStorageTest(ProxyTest, allegedb.test.SetStorageTest):
pass
| from LiSE.proxy import EngineProcessManager
import allegedb.test
class ProxyTest(allegedb.test.AllegedTest):
def setUp(self):
self.manager = EngineProcessManager()
self.engine = self.manager.start('sqlite:///:memory:')
self.graphmakers = (self.engine.new_character,)
def tearDown(self):
self.manager.shutdown()
class ProxyGraphTest(allegedb.test.AbstractGraphTest, ProxyTest):
pass
class DictStorageTest(ProxyTest, allegedb.test.DictStorageTest):
pass
class ListStorageTest(ProxyTest, allegedb.test.ListStorageTest):
pass
class SetStorageTest(ProxyTest, allegedb.test.SetStorageTest):
pass
| Delete BranchLineageTest, which assumes bidirectional graphs exist | Delete BranchLineageTest, which assumes bidirectional graphs exist
| Python | agpl-3.0 | LogicalDash/LiSE,LogicalDash/LiSE | python | ## Code Before:
from LiSE.proxy import EngineProcessManager
import allegedb.test
class ProxyTest(allegedb.test.AllegedTest):
def setUp(self):
self.manager = EngineProcessManager()
self.engine = self.manager.start('sqlite:///:memory:')
self.graphmakers = (self.engine.new_character,)
def tearDown(self):
self.manager.shutdown()
class ProxyGraphTest(allegedb.test.AbstractGraphTest, ProxyTest):
pass
class BranchLineageTest(ProxyGraphTest, allegedb.test.AbstractBranchLineageTest):
pass
class DictStorageTest(ProxyTest, allegedb.test.DictStorageTest):
pass
class ListStorageTest(ProxyTest, allegedb.test.ListStorageTest):
pass
class SetStorageTest(ProxyTest, allegedb.test.SetStorageTest):
pass
## Instruction:
Delete BranchLineageTest, which assumes bidirectional graphs exist
## Code After:
from LiSE.proxy import EngineProcessManager
import allegedb.test
class ProxyTest(allegedb.test.AllegedTest):
def setUp(self):
self.manager = EngineProcessManager()
self.engine = self.manager.start('sqlite:///:memory:')
self.graphmakers = (self.engine.new_character,)
def tearDown(self):
self.manager.shutdown()
class ProxyGraphTest(allegedb.test.AbstractGraphTest, ProxyTest):
pass
class DictStorageTest(ProxyTest, allegedb.test.DictStorageTest):
pass
class ListStorageTest(ProxyTest, allegedb.test.ListStorageTest):
pass
class SetStorageTest(ProxyTest, allegedb.test.SetStorageTest):
pass
| from LiSE.proxy import EngineProcessManager
import allegedb.test
class ProxyTest(allegedb.test.AllegedTest):
def setUp(self):
self.manager = EngineProcessManager()
self.engine = self.manager.start('sqlite:///:memory:')
self.graphmakers = (self.engine.new_character,)
def tearDown(self):
self.manager.shutdown()
class ProxyGraphTest(allegedb.test.AbstractGraphTest, ProxyTest):
pass
- class BranchLineageTest(ProxyGraphTest, allegedb.test.AbstractBranchLineageTest):
- pass
-
-
class DictStorageTest(ProxyTest, allegedb.test.DictStorageTest):
pass
class ListStorageTest(ProxyTest, allegedb.test.ListStorageTest):
pass
class SetStorageTest(ProxyTest, allegedb.test.SetStorageTest):
pass | 4 | 0.125 | 0 | 4 |
fab9f55fecc8e022a746da431696d25eaa0a75a8 | README.md | README.md |
Poking around with canvas and trying to make stuff move and explode.. maybe with some zombies
Although I chose to arrange the code in a completely different way, all these tutorials helped along the way:
* [How to make a simple HTML5 Canvas game](http://www.lostdecadegames.com/how-to-make-a-simple-html5-canvas-game/)
* [Making Sprite-based Games with Canvas](http://jlongster.com/Making-Sprite-based-Games-with-Canvas)
* [Optimizing your JavaScript game for Firefox OS](https://hacks.mozilla.org/2013/05/optimizing-your-javascript-game-for-firefox-os/)
And the sprites were shamelessly stolen from:
* [Spriters Resource](http://www.spriters-resource.com/)
| Poking around with canvas and trying to make stuff move and explode.. maybe with some zombies
##Play it
Game is now live! [play it here!](http://brunops.org/projects/zombies)
##Read about it
I wrote a post about it, [read it here](http://brunops.org/projects)
##Credits
Although I chose to arrange the code in a completely different way, all these tutorials helped along the way:
* [How to make a simple HTML5 Canvas game](http://www.lostdecadegames.com/how-to-make-a-simple-html5-canvas-game/)
* [Making Sprite-based Games with Canvas](http://jlongster.com/Making-Sprite-based-Games-with-Canvas)
* [Optimizing your JavaScript game for Firefox OS](https://hacks.mozilla.org/2013/05/optimizing-your-javascript-game-for-firefox-os/)
And the sprites were shamelessly stolen from:
* [Spriters Resource](http://www.spriters-resource.com/)
| Add links to live game and post about it | Add links to live game and post about it
| Markdown | mit | handrus/zombies-game,brunops/zombies-game,brunops/zombies-game | markdown | ## Code Before:
Poking around with canvas and trying to make stuff move and explode.. maybe with some zombies
Although I chose to arrange the code in a completely different way, all these tutorials helped along the way:
* [How to make a simple HTML5 Canvas game](http://www.lostdecadegames.com/how-to-make-a-simple-html5-canvas-game/)
* [Making Sprite-based Games with Canvas](http://jlongster.com/Making-Sprite-based-Games-with-Canvas)
* [Optimizing your JavaScript game for Firefox OS](https://hacks.mozilla.org/2013/05/optimizing-your-javascript-game-for-firefox-os/)
And the sprites were shamelessly stolen from:
* [Spriters Resource](http://www.spriters-resource.com/)
## Instruction:
Add links to live game and post about it
## Code After:
Poking around with canvas and trying to make stuff move and explode.. maybe with some zombies
##Play it
Game is now live! [play it here!](http://brunops.org/projects/zombies)
##Read about it
I wrote a post about it, [read it here](http://brunops.org/projects)
##Credits
Although I chose to arrange the code in a completely different way, all these tutorials helped along the way:
* [How to make a simple HTML5 Canvas game](http://www.lostdecadegames.com/how-to-make-a-simple-html5-canvas-game/)
* [Making Sprite-based Games with Canvas](http://jlongster.com/Making-Sprite-based-Games-with-Canvas)
* [Optimizing your JavaScript game for Firefox OS](https://hacks.mozilla.org/2013/05/optimizing-your-javascript-game-for-firefox-os/)
And the sprites were shamelessly stolen from:
* [Spriters Resource](http://www.spriters-resource.com/)
| -
Poking around with canvas and trying to make stuff move and explode.. maybe with some zombies
+ ##Play it
+ Game is now live! [play it here!](http://brunops.org/projects/zombies)
+
+ ##Read about it
+ I wrote a post about it, [read it here](http://brunops.org/projects)
+
+ ##Credits
Although I chose to arrange the code in a completely different way, all these tutorials helped along the way:
* [How to make a simple HTML5 Canvas game](http://www.lostdecadegames.com/how-to-make-a-simple-html5-canvas-game/)
* [Making Sprite-based Games with Canvas](http://jlongster.com/Making-Sprite-based-Games-with-Canvas)
* [Optimizing your JavaScript game for Firefox OS](https://hacks.mozilla.org/2013/05/optimizing-your-javascript-game-for-firefox-os/)
And the sprites were shamelessly stolen from:
* [Spriters Resource](http://www.spriters-resource.com/) | 8 | 0.8 | 7 | 1 |
aa92cdf493a29ea4d8bf79aadc2a538f35397b6b | docs/LocalizationMoment.md | docs/LocalizationMoment.md |
To localize the calendar with [moment.js](http://www.momentjs.com):
1. make sure [moment](https://www.npmjs.com/package/moment) is included in your dependencies
2. make sure the required moment's locale data is loaded
3. import `LocaleUtils` from `react-day-picker/moment` and pass it to the `localeUtils` props
4. use the `locale` prop to pass the current locale
### Example
The following component shows four day pickers: english, japanese, arabic and italian.
```jsx
import React from "react";
import DayPicker from "react-day-picker";
// Use this util to format the calendar values according to the
// selected locale with moment.js
import LocaleUtils from "react-day-picker/moment";
// Make sure moment.js has the required locale data
import "moment/locale/ja";
import "moment/locale/ar";
import "moment/locale/it";
function LocalizedExample() {
return (
<div>
<p>English</p>
<DayPicker localeUtils={ LocaleUtils } locale="en" />
<p>Japanese</p>
<DayPicker localeUtils={ LocaleUtils } locale="jp" />
<p>Arabic</p>
<DayPicker localeUtils={ LocaleUtils } locale="ar" />
<p>Italian</p>
<DayPicker localeUtils={ LocaleUtils } locale="it" />
</div>
);
}
```
|
To localize the calendar with [moment.js](http://www.momentjs.com):
1. make sure [moment](https://www.npmjs.com/package/moment) is included in your dependencies
2. make sure the required moment's locale data is available when rendering the day picker
3. import `LocaleUtils` from `react-day-picker/moment` and pass it to the `localeUtils` props
4. use the `locale` prop to pass the current locale
[See an example](http://www.gpbl.org/react-day-picker/examples/#localized).
### Code
The following component shows four day pickers: english, japanese, arabic and italian.
```jsx
import React from "react";
import DayPicker from "react-day-picker";
// Use this util to format the calendar values according to the
// selected locale with moment.js
import LocaleUtils from "react-day-picker/moment";
// Make sure moment.js has the required locale data
import "moment/locale/ja";
import "moment/locale/ar";
import "moment/locale/it";
function LocalizedExample() {
return (
<div>
<p>English</p>
<DayPicker localeUtils={ LocaleUtils } locale="en" />
<p>Japanese</p>
<DayPicker localeUtils={ LocaleUtils } locale="jp" />
<p>Arabic</p>
<DayPicker localeUtils={ LocaleUtils } locale="ar" />
<p>Italian</p>
<DayPicker localeUtils={ LocaleUtils } locale="it" />
</div>
);
}
```
| Add link to localization example | Add link to localization example
| Markdown | mit | gpbl/react-day-picker,saenglert/react-day-picker,gpbl/react-day-picker,gpbl/react-day-picker | markdown | ## Code Before:
To localize the calendar with [moment.js](http://www.momentjs.com):
1. make sure [moment](https://www.npmjs.com/package/moment) is included in your dependencies
2. make sure the required moment's locale data is loaded
3. import `LocaleUtils` from `react-day-picker/moment` and pass it to the `localeUtils` props
4. use the `locale` prop to pass the current locale
### Example
The following component shows four day pickers: english, japanese, arabic and italian.
```jsx
import React from "react";
import DayPicker from "react-day-picker";
// Use this util to format the calendar values according to the
// selected locale with moment.js
import LocaleUtils from "react-day-picker/moment";
// Make sure moment.js has the required locale data
import "moment/locale/ja";
import "moment/locale/ar";
import "moment/locale/it";
function LocalizedExample() {
return (
<div>
<p>English</p>
<DayPicker localeUtils={ LocaleUtils } locale="en" />
<p>Japanese</p>
<DayPicker localeUtils={ LocaleUtils } locale="jp" />
<p>Arabic</p>
<DayPicker localeUtils={ LocaleUtils } locale="ar" />
<p>Italian</p>
<DayPicker localeUtils={ LocaleUtils } locale="it" />
</div>
);
}
```
## Instruction:
Add link to localization example
## Code After:
To localize the calendar with [moment.js](http://www.momentjs.com):
1. make sure [moment](https://www.npmjs.com/package/moment) is included in your dependencies
2. make sure the required moment's locale data is available when rendering the day picker
3. import `LocaleUtils` from `react-day-picker/moment` and pass it to the `localeUtils` props
4. use the `locale` prop to pass the current locale
[See an example](http://www.gpbl.org/react-day-picker/examples/#localized).
### Code
The following component shows four day pickers: english, japanese, arabic and italian.
```jsx
import React from "react";
import DayPicker from "react-day-picker";
// Use this util to format the calendar values according to the
// selected locale with moment.js
import LocaleUtils from "react-day-picker/moment";
// Make sure moment.js has the required locale data
import "moment/locale/ja";
import "moment/locale/ar";
import "moment/locale/it";
function LocalizedExample() {
return (
<div>
<p>English</p>
<DayPicker localeUtils={ LocaleUtils } locale="en" />
<p>Japanese</p>
<DayPicker localeUtils={ LocaleUtils } locale="jp" />
<p>Arabic</p>
<DayPicker localeUtils={ LocaleUtils } locale="ar" />
<p>Italian</p>
<DayPicker localeUtils={ LocaleUtils } locale="it" />
</div>
);
}
```
|
To localize the calendar with [moment.js](http://www.momentjs.com):
1. make sure [moment](https://www.npmjs.com/package/moment) is included in your dependencies
- 2. make sure the required moment's locale data is loaded
? -
+ 2. make sure the required moment's locale data is available when rendering the day picker
? ++++ ++++++++++++ +++++++++ +++++++++
3. import `LocaleUtils` from `react-day-picker/moment` and pass it to the `localeUtils` props
4. use the `locale` prop to pass the current locale
- ### Example
+ [See an example](http://www.gpbl.org/react-day-picker/examples/#localized).
+
+ ### Code
The following component shows four day pickers: english, japanese, arabic and italian.
```jsx
import React from "react";
import DayPicker from "react-day-picker";
// Use this util to format the calendar values according to the
// selected locale with moment.js
import LocaleUtils from "react-day-picker/moment";
// Make sure moment.js has the required locale data
import "moment/locale/ja";
import "moment/locale/ar";
import "moment/locale/it";
function LocalizedExample() {
return (
<div>
<p>English</p>
<DayPicker localeUtils={ LocaleUtils } locale="en" />
<p>Japanese</p>
<DayPicker localeUtils={ LocaleUtils } locale="jp" />
<p>Arabic</p>
<DayPicker localeUtils={ LocaleUtils } locale="ar" />
<p>Italian</p>
<DayPicker localeUtils={ LocaleUtils } locale="it" />
</div>
);
}
``` | 6 | 0.130435 | 4 | 2 |
e5ea9e32346a14e1d4794ceb97b555c9e2249d00 | src/style/swagger-ui.scss | src/style/swagger-ui.scss | .url, .topbar {
display: none;
}
// The authorize button in the white bar near the top is not inside the white
// bar by default, so this improves the situation.
.swagger-ui .scheme-container {
margin-left: 40px !important;
padding-right: 40px !important;
}
| .url, .topbar {
display: none;
}
// The authorize button in the white bar near the top is not inside the white
// bar by default, so this improves the situation.
.swagger-ui .scheme-container {
background-color: inherit !important;
box-shadow: none !important;
-webkit-box-shadow: none !important;
}
| Remove background color and box shadow from schemes | Remove background color and box shadow from schemes
| SCSS | mit | apinf/open-api-designer,apinf/openapi-designer,apinf/open-api-designer,apinf/openapi-designer | scss | ## Code Before:
.url, .topbar {
display: none;
}
// The authorize button in the white bar near the top is not inside the white
// bar by default, so this improves the situation.
.swagger-ui .scheme-container {
margin-left: 40px !important;
padding-right: 40px !important;
}
## Instruction:
Remove background color and box shadow from schemes
## Code After:
.url, .topbar {
display: none;
}
// The authorize button in the white bar near the top is not inside the white
// bar by default, so this improves the situation.
.swagger-ui .scheme-container {
background-color: inherit !important;
box-shadow: none !important;
-webkit-box-shadow: none !important;
}
| .url, .topbar {
display: none;
}
// The authorize button in the white bar near the top is not inside the white
// bar by default, so this improves the situation.
.swagger-ui .scheme-container {
- margin-left: 40px !important;
- padding-right: 40px !important;
+ background-color: inherit !important;
+ box-shadow: none !important;
+ -webkit-box-shadow: none !important;
} | 5 | 0.5 | 3 | 2 |
ee92d9500948ccd1ddd55a8feb31a2f97672b099 | scenario/stages_without_rollback/3_get_and_transfer_images.yaml | scenario/stages_without_rollback/3_get_and_transfer_images.yaml |
preparation:
- create_snapshot: True
process:
- act_get_filter: True
- act_check_filter: True
- act_identity_trans: True
- act_get_info_images: True
- act_deploy_images: True
|
preparation:
- create_snapshot: True
process:
- act_get_filter: True
- act_check_filter: True
- act_get_info_images: True
- act_deploy_images: True
| Remove identity from glance with no rollback stage | Remove identity from glance with no rollback stage
| YAML | apache-2.0 | mgrygoriev/CloudFerry,MirantisWorkloadMobility/CloudFerry,MirantisWorkloadMobility/CloudFerry,MirantisWorkloadMobility/CloudFerry,mgrygoriev/CloudFerry,SVilgelm/CloudFerry,mgrygoriev/CloudFerry,mgrygoriev/CloudFerry,SVilgelm/CloudFerry,SVilgelm/CloudFerry,MirantisWorkloadMobility/CloudFerry,SVilgelm/CloudFerry | yaml | ## Code Before:
preparation:
- create_snapshot: True
process:
- act_get_filter: True
- act_check_filter: True
- act_identity_trans: True
- act_get_info_images: True
- act_deploy_images: True
## Instruction:
Remove identity from glance with no rollback stage
## Code After:
preparation:
- create_snapshot: True
process:
- act_get_filter: True
- act_check_filter: True
- act_get_info_images: True
- act_deploy_images: True
|
preparation:
- create_snapshot: True
process:
- act_get_filter: True
- act_check_filter: True
- - act_identity_trans: True
- act_get_info_images: True
- act_deploy_images: True | 1 | 0.1 | 0 | 1 |
1e2bcfd07626a1fdfeae2bc64f2daf5c9decbc17 | app/assets/javascripts/views/front/headerView.js | app/assets/javascripts/views/front/headerView.js | ((function (App) {
'use strict';
App.View.HeaderView = Backbone.View.extend({
events: {
'click .js-mobile-menu': 'toggleDrawer',
'click .js-search-button': 'toggleSearch',
},
initialize: function () {
this.drawer = this.el.querySelector('.js-mobile-drawer');
this.searchContainer = this.el.querySelector('.js-search');
this.el.classList.add('initialized');
this._setListeners();
},
/**
* Set the listeners not attached to any DOM element of this.el
*/
_setListeners: function () {
document.body.addEventListener('click', function (e) {
if ($(e.target).closest(this.searchContainer).length) return;
this.toggleSearch(false);
}.bind(this));
},
toggleDrawer: function () {
var opened = this.drawer.classList.toggle('-opened');
var overflow = 'auto';
if (opened) overflow = 'hidden';
document.querySelector('body').style.overflow = overflow;
},
/**
* Toggle the visibility of the search container
* @param {boolean} [show] Force the search to expand or contract
*/
toggleSearch: function(show) {
this.searchContainer.classList.toggle('-expanded', show);
}
});
})(this.App));
| ((function (App) {
'use strict';
App.View.HeaderView = Backbone.View.extend({
events: {
'click .js-mobile-menu': 'toggleDrawer',
'click .js-search-button': 'onClickSearchButton',
},
initialize: function () {
this.drawer = this.el.querySelector('.js-mobile-drawer');
this.searchContainer = this.el.querySelector('.js-search');
this.el.classList.add('initialized');
this._setListeners();
},
/**
* Set the listeners not attached to any DOM element of this.el
*/
_setListeners: function () {
document.body.addEventListener('click', function (e) {
if ($(e.target).closest(this.searchContainer).length) return;
this.toggleSearch(false);
}.bind(this));
},
onClickSearchButton: function () {
this.toggleSearch();
},
toggleDrawer: function () {
var opened = this.drawer.classList.toggle('-opened');
var overflow = 'auto';
if (opened) overflow = 'hidden';
document.querySelector('body').style.overflow = overflow;
},
/**
* Toggle the visibility of the search container
* @param {boolean} [show] Force the search to expand or contract
*/
toggleSearch: function(show) {
this.searchContainer.classList.toggle('-expanded', show);
}
});
})(this.App));
| Fix a bug where the search dropdown wouldn't toggle | Fix a bug where the search dropdown wouldn't toggle
When clicking on the magnifying glass again, the dropdown wouldn't hide.
| JavaScript | mit | Vizzuality/forest-atlas-landscape-cms,Vizzuality/forest-atlas-landscape-cms,Vizzuality/forest-atlas-landscape-cms,Vizzuality/forest-atlas-landscape-cms | javascript | ## Code Before:
((function (App) {
'use strict';
App.View.HeaderView = Backbone.View.extend({
events: {
'click .js-mobile-menu': 'toggleDrawer',
'click .js-search-button': 'toggleSearch',
},
initialize: function () {
this.drawer = this.el.querySelector('.js-mobile-drawer');
this.searchContainer = this.el.querySelector('.js-search');
this.el.classList.add('initialized');
this._setListeners();
},
/**
* Set the listeners not attached to any DOM element of this.el
*/
_setListeners: function () {
document.body.addEventListener('click', function (e) {
if ($(e.target).closest(this.searchContainer).length) return;
this.toggleSearch(false);
}.bind(this));
},
toggleDrawer: function () {
var opened = this.drawer.classList.toggle('-opened');
var overflow = 'auto';
if (opened) overflow = 'hidden';
document.querySelector('body').style.overflow = overflow;
},
/**
* Toggle the visibility of the search container
* @param {boolean} [show] Force the search to expand or contract
*/
toggleSearch: function(show) {
this.searchContainer.classList.toggle('-expanded', show);
}
});
})(this.App));
## Instruction:
Fix a bug where the search dropdown wouldn't toggle
When clicking on the magnifying glass again, the dropdown wouldn't hide.
## Code After:
((function (App) {
'use strict';
App.View.HeaderView = Backbone.View.extend({
events: {
'click .js-mobile-menu': 'toggleDrawer',
'click .js-search-button': 'onClickSearchButton',
},
initialize: function () {
this.drawer = this.el.querySelector('.js-mobile-drawer');
this.searchContainer = this.el.querySelector('.js-search');
this.el.classList.add('initialized');
this._setListeners();
},
/**
* Set the listeners not attached to any DOM element of this.el
*/
_setListeners: function () {
document.body.addEventListener('click', function (e) {
if ($(e.target).closest(this.searchContainer).length) return;
this.toggleSearch(false);
}.bind(this));
},
onClickSearchButton: function () {
this.toggleSearch();
},
toggleDrawer: function () {
var opened = this.drawer.classList.toggle('-opened');
var overflow = 'auto';
if (opened) overflow = 'hidden';
document.querySelector('body').style.overflow = overflow;
},
/**
* Toggle the visibility of the search container
* @param {boolean} [show] Force the search to expand or contract
*/
toggleSearch: function(show) {
this.searchContainer.classList.toggle('-expanded', show);
}
});
})(this.App));
| ((function (App) {
'use strict';
App.View.HeaderView = Backbone.View.extend({
events: {
'click .js-mobile-menu': 'toggleDrawer',
- 'click .js-search-button': 'toggleSearch',
? - ^^ ^
+ 'click .js-search-button': 'onClickSearchButton',
? ^^ ^^^ ++++++
},
initialize: function () {
this.drawer = this.el.querySelector('.js-mobile-drawer');
this.searchContainer = this.el.querySelector('.js-search');
this.el.classList.add('initialized');
this._setListeners();
},
/**
* Set the listeners not attached to any DOM element of this.el
*/
_setListeners: function () {
document.body.addEventListener('click', function (e) {
if ($(e.target).closest(this.searchContainer).length) return;
this.toggleSearch(false);
}.bind(this));
},
+ onClickSearchButton: function () {
+ this.toggleSearch();
+ },
+
toggleDrawer: function () {
var opened = this.drawer.classList.toggle('-opened');
var overflow = 'auto';
if (opened) overflow = 'hidden';
document.querySelector('body').style.overflow = overflow;
},
/**
* Toggle the visibility of the search container
* @param {boolean} [show] Force the search to expand or contract
*/
toggleSearch: function(show) {
this.searchContainer.classList.toggle('-expanded', show);
}
});
})(this.App)); | 6 | 0.136364 | 5 | 1 |
e0ad36d5a409134a28e70e3a8133d178ba1ced24 | recipes/ipyevents/meta.yaml | recipes/ipyevents/meta.yaml | {% set name = "ipyevents" %}
{% set version = "0.0.1" %}
{% set file_ext = "tar.gz" %}
{% set hash_type = "sha256" %}
{% set hash_value = "c389e5444fae86cd947563c1f680e98833624378abb2596fdda2deb08e435f72" %}
package:
name: '{{ name|lower }}'
version: '{{ version }}'
source:
fn: '{{ name }}-{{ version }}.{{ file_ext }}'
url: https://pypi.io/packages/source/{{ name[0] }}/{{ name }}/{{ name }}-{{ version }}.{{ file_ext }}
'{{ hash_type }}': '{{ hash_value }}'
build:
number: 0
script: python setup.py install --single-version-externally-managed --record=record.txt
requirements:
build:
- python
- setuptools
run:
- python
- ipywidgets >=7.0.0
test:
imports:
- ipyevents
about:
home: https://github.com/mwcraig/ipyevents
license: BSD 3-clause
license_family: BSD
license_file: 'LICENSE.md'
summary: A custom widget for returning mouse and keyboard events to Python
description: A custom widget for returning mouse and keyboard events to Python
dev_url: 'https://github/com/mwcraig/ipyevents'
extra:
recipe-maintainers:
- mwcraig
| {% set name = "ipyevents" %}
{% set version = "0.0.1" %}
{% set file_ext = "tar.gz" %}
{% set hash_type = "sha256" %}
{% set hash_value = "c389e5444fae86cd947563c1f680e98833624378abb2596fdda2deb08e435f72" %}
package:
name: '{{ name|lower }}'
version: '{{ version }}'
source:
fn: '{{ name }}-{{ version }}.{{ file_ext }}'
url: https://pypi.io/packages/source/{{ name[0] }}/{{ name }}/{{ name }}-{{ version }}.{{ file_ext }}
'{{ hash_type }}': '{{ hash_value }}'
build:
number: 0
noarch: python
script: python setup.py install --single-version-externally-managed --record=record.txt
requirements:
build:
- python
- setuptools
run:
- python
- ipywidgets >=7.0.0
test:
imports:
- ipyevents
about:
home: https://github.com/mwcraig/ipyevents
license: BSD 3-clause
license_family: BSD
license_file: 'LICENSE.md'
summary: A custom widget for returning mouse and keyboard events to Python
description: A custom widget for returning mouse and keyboard events to Python
dev_url: 'https://github/com/mwcraig/ipyevents'
extra:
recipe-maintainers:
- mwcraig
| Make this a noarch package | Make this a noarch package
| YAML | bsd-3-clause | johanneskoester/staged-recipes,petrushy/staged-recipes,petrushy/staged-recipes,ocefpaf/staged-recipes,conda-forge/staged-recipes,chohner/staged-recipes,jjhelmus/staged-recipes,jochym/staged-recipes,dschreij/staged-recipes,Cashalow/staged-recipes,jochym/staged-recipes,hadim/staged-recipes,jakirkham/staged-recipes,ceholden/staged-recipes,sodre/staged-recipes,mcs07/staged-recipes,goanpeca/staged-recipes,cpaulik/staged-recipes,chrisburr/staged-recipes,shadowwalkersb/staged-recipes,birdsarah/staged-recipes,ocefpaf/staged-recipes,scopatz/staged-recipes,guillochon/staged-recipes,hadim/staged-recipes,chrisburr/staged-recipes,cpaulik/staged-recipes,Juanlu001/staged-recipes,basnijholt/staged-recipes,pmlandwehr/staged-recipes,goanpeca/staged-recipes,SylvainCorlay/staged-recipes,rvalieris/staged-recipes,jakirkham/staged-recipes,rvalieris/staged-recipes,scopatz/staged-recipes,glemaitre/staged-recipes,conda-forge/staged-recipes,igortg/staged-recipes,johanneskoester/staged-recipes,rmcgibbo/staged-recipes,birdsarah/staged-recipes,asmeurer/staged-recipes,patricksnape/staged-recipes,guillochon/staged-recipes,isuruf/staged-recipes,sannykr/staged-recipes,isuruf/staged-recipes,shadowwalkersb/staged-recipes,stuertz/staged-recipes,Cashalow/staged-recipes,Juanlu001/staged-recipes,barkls/staged-recipes,sodre/staged-recipes,basnijholt/staged-recipes,ceholden/staged-recipes,jjhelmus/staged-recipes,rmcgibbo/staged-recipes,ReimarBauer/staged-recipes,barkls/staged-recipes,ReimarBauer/staged-recipes,sodre/staged-recipes,mariusvniekerk/staged-recipes,synapticarbors/staged-recipes,igortg/staged-recipes,dschreij/staged-recipes,mariusvniekerk/staged-recipes,sannykr/staged-recipes,NOAA-ORR-ERD/staged-recipes,chohner/staged-recipes,pmlandwehr/staged-recipes,kwilcox/staged-recipes,synapticarbors/staged-recipes,asmeurer/staged-recipes,NOAA-ORR-ERD/staged-recipes,kwilcox/staged-recipes,SylvainCorlay/staged-recipes,patricksnape/staged-recipes,mcs07/staged-recipes,stuertz/staged-recipes,glemaitre/staged-recipes | yaml | ## Code Before:
{% set name = "ipyevents" %}
{% set version = "0.0.1" %}
{% set file_ext = "tar.gz" %}
{% set hash_type = "sha256" %}
{% set hash_value = "c389e5444fae86cd947563c1f680e98833624378abb2596fdda2deb08e435f72" %}
package:
name: '{{ name|lower }}'
version: '{{ version }}'
source:
fn: '{{ name }}-{{ version }}.{{ file_ext }}'
url: https://pypi.io/packages/source/{{ name[0] }}/{{ name }}/{{ name }}-{{ version }}.{{ file_ext }}
'{{ hash_type }}': '{{ hash_value }}'
build:
number: 0
script: python setup.py install --single-version-externally-managed --record=record.txt
requirements:
build:
- python
- setuptools
run:
- python
- ipywidgets >=7.0.0
test:
imports:
- ipyevents
about:
home: https://github.com/mwcraig/ipyevents
license: BSD 3-clause
license_family: BSD
license_file: 'LICENSE.md'
summary: A custom widget for returning mouse and keyboard events to Python
description: A custom widget for returning mouse and keyboard events to Python
dev_url: 'https://github/com/mwcraig/ipyevents'
extra:
recipe-maintainers:
- mwcraig
## Instruction:
Make this a noarch package
## Code After:
{% set name = "ipyevents" %}
{% set version = "0.0.1" %}
{% set file_ext = "tar.gz" %}
{% set hash_type = "sha256" %}
{% set hash_value = "c389e5444fae86cd947563c1f680e98833624378abb2596fdda2deb08e435f72" %}
package:
name: '{{ name|lower }}'
version: '{{ version }}'
source:
fn: '{{ name }}-{{ version }}.{{ file_ext }}'
url: https://pypi.io/packages/source/{{ name[0] }}/{{ name }}/{{ name }}-{{ version }}.{{ file_ext }}
'{{ hash_type }}': '{{ hash_value }}'
build:
number: 0
noarch: python
script: python setup.py install --single-version-externally-managed --record=record.txt
requirements:
build:
- python
- setuptools
run:
- python
- ipywidgets >=7.0.0
test:
imports:
- ipyevents
about:
home: https://github.com/mwcraig/ipyevents
license: BSD 3-clause
license_family: BSD
license_file: 'LICENSE.md'
summary: A custom widget for returning mouse and keyboard events to Python
description: A custom widget for returning mouse and keyboard events to Python
dev_url: 'https://github/com/mwcraig/ipyevents'
extra:
recipe-maintainers:
- mwcraig
| {% set name = "ipyevents" %}
{% set version = "0.0.1" %}
{% set file_ext = "tar.gz" %}
{% set hash_type = "sha256" %}
{% set hash_value = "c389e5444fae86cd947563c1f680e98833624378abb2596fdda2deb08e435f72" %}
package:
name: '{{ name|lower }}'
version: '{{ version }}'
source:
fn: '{{ name }}-{{ version }}.{{ file_ext }}'
url: https://pypi.io/packages/source/{{ name[0] }}/{{ name }}/{{ name }}-{{ version }}.{{ file_ext }}
'{{ hash_type }}': '{{ hash_value }}'
build:
number: 0
+ noarch: python
script: python setup.py install --single-version-externally-managed --record=record.txt
requirements:
build:
- python
- setuptools
run:
- python
- ipywidgets >=7.0.0
test:
imports:
- ipyevents
about:
home: https://github.com/mwcraig/ipyevents
license: BSD 3-clause
license_family: BSD
license_file: 'LICENSE.md'
summary: A custom widget for returning mouse and keyboard events to Python
description: A custom widget for returning mouse and keyboard events to Python
dev_url: 'https://github/com/mwcraig/ipyevents'
extra:
recipe-maintainers:
- mwcraig | 1 | 0.023256 | 1 | 0 |
c0cb23dc8ac498fc02217091c9302559e1d1d7f8 | metadata/org.ligi.ipfsdroid.txt | metadata/org.ligi.ipfsdroid.txt | Categories:System
License:GPLv3
Web Site:https://github.com/ligi/IPFSDroid/blob/HEAD/README.md
Source Code:https://github.com/ligi/IPFSDroid
Issue Tracker:https://github.com/ligi/IPFSDroid/issues
Auto Name:IPFSDroid
Summary:IPFS Tool
Description:
This App does not represent a full IPFS node yet! Think of it as a dependency
injection for IPFS on android. Currently we use the centralized service ipfs.io
so you can use ipfs on android - later this will be exchanged for a full-node
for decentralisation but all things that use this app don't have to change/care.
.
Repo Type:git
Repo:https://github.com/ligi/IPFSDroid
Build:0.1,1
commit=0.1
subdir=app
gradle=yes
prebuild=sed -i -e '/android-sdk-manager/d' build.gradle
Build:0.5,5
commit=0.5
subdir=app
gradle=yes
prebuild=sed -i -e '/android-sdk-manager/d' build.gradle
Auto Update Mode:None
Update Check Mode:Tags
Current Version:0.7
Current Version Code:7
| Categories:System
License:GPLv3
Web Site:https://github.com/ligi/IPFSDroid/blob/HEAD/README.md
Source Code:https://github.com/ligi/IPFSDroid
Issue Tracker:https://github.com/ligi/IPFSDroid/issues
Auto Name:IPFSDroid
Summary:IPFS Tool
Description:
This App does not represent a full IPFS node yet! Think of it as a dependency
injection for IPFS on android. Currently we use the centralized service ipfs.io
so you can use ipfs on android - later this will be exchanged for a full-node
for decentralisation but all things that use this app don't have to change/care.
.
Repo Type:git
Repo:https://github.com/ligi/IPFSDroid
Build:0.1,1
commit=0.1
subdir=app
gradle=yes
prebuild=sed -i -e '/android-sdk-manager/d' build.gradle
Build:0.5,5
commit=0.5
subdir=app
gradle=yes
prebuild=sed -i -e '/android-sdk-manager/d' build.gradle
Build:0.7,7
commit=0.7
subdir=app
gradle=yes
prebuild=sed -i -e '/android-sdk-manager/d' build.gradle
Auto Update Mode:None
Update Check Mode:Tags
Current Version:0.7
Current Version Code:7
| Update IPFSDroid to 0.7 (7) | Update IPFSDroid to 0.7 (7)
| Text | agpl-3.0 | f-droid/fdroiddata,f-droid/fdroiddata,f-droid/fdroid-data | text | ## Code Before:
Categories:System
License:GPLv3
Web Site:https://github.com/ligi/IPFSDroid/blob/HEAD/README.md
Source Code:https://github.com/ligi/IPFSDroid
Issue Tracker:https://github.com/ligi/IPFSDroid/issues
Auto Name:IPFSDroid
Summary:IPFS Tool
Description:
This App does not represent a full IPFS node yet! Think of it as a dependency
injection for IPFS on android. Currently we use the centralized service ipfs.io
so you can use ipfs on android - later this will be exchanged for a full-node
for decentralisation but all things that use this app don't have to change/care.
.
Repo Type:git
Repo:https://github.com/ligi/IPFSDroid
Build:0.1,1
commit=0.1
subdir=app
gradle=yes
prebuild=sed -i -e '/android-sdk-manager/d' build.gradle
Build:0.5,5
commit=0.5
subdir=app
gradle=yes
prebuild=sed -i -e '/android-sdk-manager/d' build.gradle
Auto Update Mode:None
Update Check Mode:Tags
Current Version:0.7
Current Version Code:7
## Instruction:
Update IPFSDroid to 0.7 (7)
## Code After:
Categories:System
License:GPLv3
Web Site:https://github.com/ligi/IPFSDroid/blob/HEAD/README.md
Source Code:https://github.com/ligi/IPFSDroid
Issue Tracker:https://github.com/ligi/IPFSDroid/issues
Auto Name:IPFSDroid
Summary:IPFS Tool
Description:
This App does not represent a full IPFS node yet! Think of it as a dependency
injection for IPFS on android. Currently we use the centralized service ipfs.io
so you can use ipfs on android - later this will be exchanged for a full-node
for decentralisation but all things that use this app don't have to change/care.
.
Repo Type:git
Repo:https://github.com/ligi/IPFSDroid
Build:0.1,1
commit=0.1
subdir=app
gradle=yes
prebuild=sed -i -e '/android-sdk-manager/d' build.gradle
Build:0.5,5
commit=0.5
subdir=app
gradle=yes
prebuild=sed -i -e '/android-sdk-manager/d' build.gradle
Build:0.7,7
commit=0.7
subdir=app
gradle=yes
prebuild=sed -i -e '/android-sdk-manager/d' build.gradle
Auto Update Mode:None
Update Check Mode:Tags
Current Version:0.7
Current Version Code:7
| Categories:System
License:GPLv3
Web Site:https://github.com/ligi/IPFSDroid/blob/HEAD/README.md
Source Code:https://github.com/ligi/IPFSDroid
Issue Tracker:https://github.com/ligi/IPFSDroid/issues
Auto Name:IPFSDroid
Summary:IPFS Tool
Description:
This App does not represent a full IPFS node yet! Think of it as a dependency
injection for IPFS on android. Currently we use the centralized service ipfs.io
so you can use ipfs on android - later this will be exchanged for a full-node
for decentralisation but all things that use this app don't have to change/care.
.
Repo Type:git
Repo:https://github.com/ligi/IPFSDroid
Build:0.1,1
commit=0.1
subdir=app
gradle=yes
prebuild=sed -i -e '/android-sdk-manager/d' build.gradle
Build:0.5,5
commit=0.5
subdir=app
gradle=yes
prebuild=sed -i -e '/android-sdk-manager/d' build.gradle
+ Build:0.7,7
+ commit=0.7
+ subdir=app
+ gradle=yes
+ prebuild=sed -i -e '/android-sdk-manager/d' build.gradle
+
Auto Update Mode:None
Update Check Mode:Tags
Current Version:0.7
Current Version Code:7 | 6 | 0.176471 | 6 | 0 |
d7f2102a24005ade5dd52c7af90b829e50fdd8a8 | data/templates/error_norootpath.html | data/templates/error_norootpath.html | <%inherit file="error.html"/>
<p>
No root path was provided.
</p>
| <%inherit file="error.html"/>
<p>
No root path was provided. Try to set the <code>ASS2M_ROOT</code> environment variable.
</p>
| Make the "no root path" error more explicit | Make the "no root path" error more explicit
| HTML | agpl-3.0 | laurentb/assnet,laurentb/assnet | html | ## Code Before:
<%inherit file="error.html"/>
<p>
No root path was provided.
</p>
## Instruction:
Make the "no root path" error more explicit
## Code After:
<%inherit file="error.html"/>
<p>
No root path was provided. Try to set the <code>ASS2M_ROOT</code> environment variable.
</p>
| <%inherit file="error.html"/>
<p>
- No root path was provided.
+ No root path was provided. Try to set the <code>ASS2M_ROOT</code> environment variable.
</p> | 2 | 0.4 | 1 | 1 |
a9df0088bfa9713486f7de64d3ca03457a9e0d59 | app/serializers/endpoint_serializer.rb | app/serializers/endpoint_serializer.rb | class EndpointSerializer < ActiveModel::Serializer
attributes :name, :public, :certificate_id
end
| class EndpointSerializer < ActiveModel::Serializer
attributes :name, :public, :certificate_id
belongs_to :district
end
| Add district association to endpoint serializer | Add district association to endpoint serializer
| Ruby | mit | degica/barcelona,degica/barcelona,degica/barcelona,degica/barcelona,degica/barcelona,degica/barcelona | ruby | ## Code Before:
class EndpointSerializer < ActiveModel::Serializer
attributes :name, :public, :certificate_id
end
## Instruction:
Add district association to endpoint serializer
## Code After:
class EndpointSerializer < ActiveModel::Serializer
attributes :name, :public, :certificate_id
belongs_to :district
end
| class EndpointSerializer < ActiveModel::Serializer
attributes :name, :public, :certificate_id
+ belongs_to :district
end | 1 | 0.333333 | 1 | 0 |
726524a916d350f2a75ebe12b125482a97106aef | .travis.yml | .travis.yml | sudo: false
language: cpp
env:
- NODE_VERSION=0.10
- NODE_VERSION=0.12
os:
- linux
- osx
matrix:
allow_failures:
- os: osx
install:
- export CXX="g++-4.8" CC="gcc-4.8"
- rm -rf ~/.nvm
- wget -qO- https://raw.githubusercontent.com/creationix/nvm/v0.26.1/install.sh | bash
- source ~/.nvm/nvm.sh
- nvm install "${NODE_VERSION}"
- npm install -g npm@^2.14.2
- test "x${TRAVIS_OS_NAME}x" = "xosxx" && brew install scons || true
- npm install -g grunt-cli
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- libboost-all-dev
- libcurl4-openssl-dev
- uuid-dev
- g++-4.8
script: npm run-script ci
| sudo: false
language: cpp
matrix:
include:
- env: NODE_VERSION=0.10 BUILD_ONLY=false
os: linux
- env: NODE_VERSION=0.12 BUILD_ONLY=false
os: linux
- env: NODE_VERSION=0.12 BUILD_ONLY=true
os: osx
# Allowed failures must be specified twice - once in "include" to have them
# run, and again in "allowed_failures" to not have them break the build.
# Below are the allowed failures:
- env: NODE_VERSION=0.10 BUILD_ONLY=false
os: osx
- env: NODE_VERSION=0.12 BUILD_ONLY=false
os: osx
allow_failures:
- env: NODE_VERSION=0.10 BUILD_ONLY=false
os: osx
- env: NODE_VERSION=0.12 BUILD_ONLY=false
os: osx
install:
- if test "x${TRAVIS_OS_NAME}x" = "xlinuxx"; then export CXX="g++-4.8" CC="gcc-4.8"; fi
- rm -rf ~/.nvm
- wget -qO- https://raw.githubusercontent.com/creationix/nvm/v0.26.1/install.sh | bash
- source ~/.nvm/nvm.sh
- nvm install "${NODE_VERSION}"
- npm install -g npm@^2.14.2
- if test "x${TRAVIS_OS_NAME}x" = "xosxx"; then brew install scons; fi
- npm install -g grunt-cli
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- libboost-all-dev
- libcurl4-openssl-dev
- uuid-dev
- g++-4.8
script: if test "x${BUILD_ONLY}x" = "xtruex"; then npm install; else npm run-script ci; fi
| Use default compiler (clang) on OSX and force OSX build-only to pass | CI: Use default compiler (clang) on OSX and force OSX build-only to pass
This also introduces a more flexible way of defining travis jobs.
| YAML | mit | zqzhang/iotivity-node,zolkis/iotivity-node,zolkis/iotivity-node,zolkis/iotivity-node,zolkis/iotivity-node,zqzhang/iotivity-node,zqzhang/iotivity-node,zqzhang/iotivity-node,zolkis/iotivity-node,zqzhang/iotivity-node | yaml | ## Code Before:
sudo: false
language: cpp
env:
- NODE_VERSION=0.10
- NODE_VERSION=0.12
os:
- linux
- osx
matrix:
allow_failures:
- os: osx
install:
- export CXX="g++-4.8" CC="gcc-4.8"
- rm -rf ~/.nvm
- wget -qO- https://raw.githubusercontent.com/creationix/nvm/v0.26.1/install.sh | bash
- source ~/.nvm/nvm.sh
- nvm install "${NODE_VERSION}"
- npm install -g npm@^2.14.2
- test "x${TRAVIS_OS_NAME}x" = "xosxx" && brew install scons || true
- npm install -g grunt-cli
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- libboost-all-dev
- libcurl4-openssl-dev
- uuid-dev
- g++-4.8
script: npm run-script ci
## Instruction:
CI: Use default compiler (clang) on OSX and force OSX build-only to pass
This also introduces a more flexible way of defining travis jobs.
## Code After:
sudo: false
language: cpp
matrix:
include:
- env: NODE_VERSION=0.10 BUILD_ONLY=false
os: linux
- env: NODE_VERSION=0.12 BUILD_ONLY=false
os: linux
- env: NODE_VERSION=0.12 BUILD_ONLY=true
os: osx
# Allowed failures must be specified twice - once in "include" to have them
# run, and again in "allowed_failures" to not have them break the build.
# Below are the allowed failures:
- env: NODE_VERSION=0.10 BUILD_ONLY=false
os: osx
- env: NODE_VERSION=0.12 BUILD_ONLY=false
os: osx
allow_failures:
- env: NODE_VERSION=0.10 BUILD_ONLY=false
os: osx
- env: NODE_VERSION=0.12 BUILD_ONLY=false
os: osx
install:
- if test "x${TRAVIS_OS_NAME}x" = "xlinuxx"; then export CXX="g++-4.8" CC="gcc-4.8"; fi
- rm -rf ~/.nvm
- wget -qO- https://raw.githubusercontent.com/creationix/nvm/v0.26.1/install.sh | bash
- source ~/.nvm/nvm.sh
- nvm install "${NODE_VERSION}"
- npm install -g npm@^2.14.2
- if test "x${TRAVIS_OS_NAME}x" = "xosxx"; then brew install scons; fi
- npm install -g grunt-cli
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- libboost-all-dev
- libcurl4-openssl-dev
- uuid-dev
- g++-4.8
script: if test "x${BUILD_ONLY}x" = "xtruex"; then npm install; else npm run-script ci; fi
| sudo: false
language: cpp
- env:
- - NODE_VERSION=0.10
- - NODE_VERSION=0.12
+ matrix:
+ include:
+ - env: NODE_VERSION=0.10 BUILD_ONLY=false
+ os: linux
+ - env: NODE_VERSION=0.12 BUILD_ONLY=false
+ os: linux
+ - env: NODE_VERSION=0.12 BUILD_ONLY=true
+ os: osx
- os:
- - linux
- - osx
+ # Allowed failures must be specified twice - once in "include" to have them
+ # run, and again in "allowed_failures" to not have them break the build.
+ # Below are the allowed failures:
- matrix:
+ - env: NODE_VERSION=0.10 BUILD_ONLY=false
+ os: osx
+ - env: NODE_VERSION=0.12 BUILD_ONLY=false
+ os: osx
+
allow_failures:
+ - env: NODE_VERSION=0.10 BUILD_ONLY=false
- - os: osx
? ^
+ os: osx
? ^
+ - env: NODE_VERSION=0.12 BUILD_ONLY=false
+ os: osx
install:
- - export CXX="g++-4.8" CC="gcc-4.8"
+ - if test "x${TRAVIS_OS_NAME}x" = "xlinuxx"; then export CXX="g++-4.8" CC="gcc-4.8"; fi
- rm -rf ~/.nvm
- wget -qO- https://raw.githubusercontent.com/creationix/nvm/v0.26.1/install.sh | bash
- source ~/.nvm/nvm.sh
- nvm install "${NODE_VERSION}"
- npm install -g npm@^2.14.2
- - test "x${TRAVIS_OS_NAME}x" = "xosxx" && brew install scons || true
? ^^ ^^^^^^^
+ - if test "x${TRAVIS_OS_NAME}x" = "xosxx"; then brew install scons; fi
? +++ + ^^^^ + ^^
- npm install -g grunt-cli
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- libboost-all-dev
- libcurl4-openssl-dev
- uuid-dev
- g++-4.8
- script: npm run-script ci
+ script: if test "x${BUILD_ONLY}x" = "xtruex"; then npm install; else npm run-script ci; fi | 34 | 0.918919 | 23 | 11 |
302c3352b53194db9d91c8af05358f2d7a21c07b | promgen/templates/promgen/shard_list.html | promgen/templates/promgen/shard_list.html | {% extends "base.html" %}
{% load i18n %}
{% block content %}
<div class="page-header">
<h1>Shards</h1>
</div>
<div class="row">
{% for shard in shard_list %}
<div class="col-md-6">
<div class="panel panel-default">
<div class="panel-heading"><a href="{% url 'shard-detail' shard.id %}">{{ shard.name }}</a></div>
<div class="panel-body">
<dl class="dl-horizontal">
<dt>Services</dt>
<dd>{{ shard.service_set.count }}</dd>
<dt>Projects</dt>
<dd>{{ shard.service_set__project_set }}</dd>
</dl>
</div>
<table class="table">
<tr>
<th>Service</th>
</tr>
{% for service in shard.service_set.all %}
<tr>
<td><a href="{% url 'service-detail' service.id %}">{{ service.name }}</a></td>
</tr>
{% endfor %}
</table>
</div>
</div>
{% endfor %}
{% endblock %}
| {% extends "base.html" %}
{% load i18n %}
{% block content %}
<div class="page-header">
<h1>Shards</h1>
</div>
<div class="row">
{% for shard in shard_list %}
<div class="col-md-6">
<div class="panel panel-default">
<div class="panel-heading">
<a href="{% url 'shard-detail' shard.id %}">{{ shard.name }}</a>
<span class="pull-right form-inline">
<a href="{% url 'service-new' shard.id %}" class="btn btn-primary btn-xs">{% trans "Register Service" %}</a>
</span>
</div>
<div class="panel-body">
<dl class="dl-horizontal">
<dt>Services</dt>
<dd>{{ shard.service_set.count }}</dd>
<dt>Projects</dt>
<dd>{{ shard.service_set__project_set }}</dd>
</dl>
</div>
<table class="table">
<tr>
<th>Service</th>
</tr>
{% for service in shard.service_set.all %}
<tr>
<td><a href="{% url 'service-detail' service.id %}">{{ service.name }}</a></td>
</tr>
{% endfor %}
</table>
</div>
</div>
{% endfor %}
{% endblock %}
| Add 'Register Service' link to shard-list | Add 'Register Service' link to shard-list
| HTML | mit | kfdm/promgen,kfdm/promgen,kfdm/promgen,kfdm/promgen | html | ## Code Before:
{% extends "base.html" %}
{% load i18n %}
{% block content %}
<div class="page-header">
<h1>Shards</h1>
</div>
<div class="row">
{% for shard in shard_list %}
<div class="col-md-6">
<div class="panel panel-default">
<div class="panel-heading"><a href="{% url 'shard-detail' shard.id %}">{{ shard.name }}</a></div>
<div class="panel-body">
<dl class="dl-horizontal">
<dt>Services</dt>
<dd>{{ shard.service_set.count }}</dd>
<dt>Projects</dt>
<dd>{{ shard.service_set__project_set }}</dd>
</dl>
</div>
<table class="table">
<tr>
<th>Service</th>
</tr>
{% for service in shard.service_set.all %}
<tr>
<td><a href="{% url 'service-detail' service.id %}">{{ service.name }}</a></td>
</tr>
{% endfor %}
</table>
</div>
</div>
{% endfor %}
{% endblock %}
## Instruction:
Add 'Register Service' link to shard-list
## Code After:
{% extends "base.html" %}
{% load i18n %}
{% block content %}
<div class="page-header">
<h1>Shards</h1>
</div>
<div class="row">
{% for shard in shard_list %}
<div class="col-md-6">
<div class="panel panel-default">
<div class="panel-heading">
<a href="{% url 'shard-detail' shard.id %}">{{ shard.name }}</a>
<span class="pull-right form-inline">
<a href="{% url 'service-new' shard.id %}" class="btn btn-primary btn-xs">{% trans "Register Service" %}</a>
</span>
</div>
<div class="panel-body">
<dl class="dl-horizontal">
<dt>Services</dt>
<dd>{{ shard.service_set.count }}</dd>
<dt>Projects</dt>
<dd>{{ shard.service_set__project_set }}</dd>
</dl>
</div>
<table class="table">
<tr>
<th>Service</th>
</tr>
{% for service in shard.service_set.all %}
<tr>
<td><a href="{% url 'service-detail' service.id %}">{{ service.name }}</a></td>
</tr>
{% endfor %}
</table>
</div>
</div>
{% endfor %}
{% endblock %}
| {% extends "base.html" %}
{% load i18n %}
{% block content %}
<div class="page-header">
<h1>Shards</h1>
</div>
<div class="row">
{% for shard in shard_list %}
<div class="col-md-6">
<div class="panel panel-default">
+ <div class="panel-heading">
- <div class="panel-heading"><a href="{% url 'shard-detail' shard.id %}">{{ shard.name }}</a></div>
? ---- ^^^^^^^^^^^^^^^^^^^^^^ ------
+ <a href="{% url 'shard-detail' shard.id %}">{{ shard.name }}</a>
? ^
+ <span class="pull-right form-inline">
+ <a href="{% url 'service-new' shard.id %}" class="btn btn-primary btn-xs">{% trans "Register Service" %}</a>
+ </span>
+ </div>
<div class="panel-body">
<dl class="dl-horizontal">
<dt>Services</dt>
<dd>{{ shard.service_set.count }}</dd>
<dt>Projects</dt>
<dd>{{ shard.service_set__project_set }}</dd>
</dl>
</div>
<table class="table">
<tr>
<th>Service</th>
</tr>
{% for service in shard.service_set.all %}
<tr>
<td><a href="{% url 'service-detail' service.id %}">{{ service.name }}</a></td>
</tr>
{% endfor %}
</table>
</div>
</div>
{% endfor %}
{% endblock %} | 7 | 0.194444 | 6 | 1 |
04ce507b3b0920f4d1c53995eb8646ae99311090 | src/groovy/com/hida/imms/DomainClassUtil.groovy | src/groovy/com/hida/imms/DomainClassUtil.groovy | package com.hida.imms
import grails.util.Holders
import org.codehaus.groovy.grails.commons.GrailsClass
class DomainClassUtil {
static def getPrimaryKey(def domainClass) {
GrailsClass domainClz = domainClass instanceof GrailsClass ? domainClass :
Holders.grailsApplication.domainClasses.find { it.clazz == domainClass }
def identity = (domainClz.hasProperty('mapping') ? domainClz.clazz.mapping?.getMapping()?.getIdentity():null)
(identity.respondsTo("getPropertyNames") || identity.hasProperty("propertyNames")) ? identity.getPropertyNames() : ["id"]
// find alternative of : identity instanceof org.codehaus.groovy.grails.orm.hibernate.cfg.CompositeIdentity
// to remove dependency to Hibernate plugin
}
}
| package com.hida.imms
import grails.util.Holders
import org.codehaus.groovy.grails.commons.GrailsClass
import org.codehaus.groovy.grails.orm.hibernate.cfg.CompositeIdentity
import org.codehaus.groovy.grails.orm.hibernate.cfg.GrailsDomainBinder
import org.codehaus.groovy.grails.orm.hibernate.cfg.Mapping
class DomainClassUtil {
static final GrailsDomainBinder DOMAIN_BINDER = new GrailsDomainBinder()
static def getPrimaryKey(def domainClass) {
// GrailsClass domainClz = domainClass instanceof GrailsClass ? domainClass :
// Holders.grailsApplication.domainClasses.find { it.clazz == domainClass }
Mapping mapping = DOMAIN_BINDER.getMapping(domainClass);
if (mapping != null && mapping.getIdentity() instanceof CompositeIdentity) {
CompositeIdentity identity = (CompositeIdentity) mapping.getIdentity();
return Arrays.asList(identity.getPropertyNames())
} else {
["id"]
}
// def identity = (domainClz.hasProperty('mapping') ? domainClz.clazz.mapping?.getMapping()?.getIdentity():null)
// (identity.respondsTo("getPropertyNames") || identity.hasProperty("propertyNames")) ? identity.getPropertyNames() : ["id"]
// find alternative of : identity instanceof org.codehaus.groovy.grails.orm.hibernate.cfg.CompositeIdentity
// to remove dependency to Hibernate plugin
}
}
| Fix the way to determine composite key | Fix the way to determine composite key
| Groovy | apache-2.0 | arief-hidayat/imms-ui-plugin,arief-hidayat/imms-ui-plugin | groovy | ## Code Before:
package com.hida.imms
import grails.util.Holders
import org.codehaus.groovy.grails.commons.GrailsClass
class DomainClassUtil {
static def getPrimaryKey(def domainClass) {
GrailsClass domainClz = domainClass instanceof GrailsClass ? domainClass :
Holders.grailsApplication.domainClasses.find { it.clazz == domainClass }
def identity = (domainClz.hasProperty('mapping') ? domainClz.clazz.mapping?.getMapping()?.getIdentity():null)
(identity.respondsTo("getPropertyNames") || identity.hasProperty("propertyNames")) ? identity.getPropertyNames() : ["id"]
// find alternative of : identity instanceof org.codehaus.groovy.grails.orm.hibernate.cfg.CompositeIdentity
// to remove dependency to Hibernate plugin
}
}
## Instruction:
Fix the way to determine composite key
## Code After:
package com.hida.imms
import grails.util.Holders
import org.codehaus.groovy.grails.commons.GrailsClass
import org.codehaus.groovy.grails.orm.hibernate.cfg.CompositeIdentity
import org.codehaus.groovy.grails.orm.hibernate.cfg.GrailsDomainBinder
import org.codehaus.groovy.grails.orm.hibernate.cfg.Mapping
class DomainClassUtil {
static final GrailsDomainBinder DOMAIN_BINDER = new GrailsDomainBinder()
static def getPrimaryKey(def domainClass) {
// GrailsClass domainClz = domainClass instanceof GrailsClass ? domainClass :
// Holders.grailsApplication.domainClasses.find { it.clazz == domainClass }
Mapping mapping = DOMAIN_BINDER.getMapping(domainClass);
if (mapping != null && mapping.getIdentity() instanceof CompositeIdentity) {
CompositeIdentity identity = (CompositeIdentity) mapping.getIdentity();
return Arrays.asList(identity.getPropertyNames())
} else {
["id"]
}
// def identity = (domainClz.hasProperty('mapping') ? domainClz.clazz.mapping?.getMapping()?.getIdentity():null)
// (identity.respondsTo("getPropertyNames") || identity.hasProperty("propertyNames")) ? identity.getPropertyNames() : ["id"]
// find alternative of : identity instanceof org.codehaus.groovy.grails.orm.hibernate.cfg.CompositeIdentity
// to remove dependency to Hibernate plugin
}
}
| package com.hida.imms
import grails.util.Holders
import org.codehaus.groovy.grails.commons.GrailsClass
+ import org.codehaus.groovy.grails.orm.hibernate.cfg.CompositeIdentity
+ import org.codehaus.groovy.grails.orm.hibernate.cfg.GrailsDomainBinder
+ import org.codehaus.groovy.grails.orm.hibernate.cfg.Mapping
class DomainClassUtil {
+
+ static final GrailsDomainBinder DOMAIN_BINDER = new GrailsDomainBinder()
static def getPrimaryKey(def domainClass) {
- GrailsClass domainClz = domainClass instanceof GrailsClass ? domainClass :
+ // GrailsClass domainClz = domainClass instanceof GrailsClass ? domainClass :
? ++
- Holders.grailsApplication.domainClasses.find { it.clazz == domainClass }
+ // Holders.grailsApplication.domainClasses.find { it.clazz == domainClass }
? ++
+
+ Mapping mapping = DOMAIN_BINDER.getMapping(domainClass);
+ if (mapping != null && mapping.getIdentity() instanceof CompositeIdentity) {
+ CompositeIdentity identity = (CompositeIdentity) mapping.getIdentity();
+ return Arrays.asList(identity.getPropertyNames())
+ } else {
+ ["id"]
+ }
- def identity = (domainClz.hasProperty('mapping') ? domainClz.clazz.mapping?.getMapping()?.getIdentity():null)
+ // def identity = (domainClz.hasProperty('mapping') ? domainClz.clazz.mapping?.getMapping()?.getIdentity():null)
? ++
- (identity.respondsTo("getPropertyNames") || identity.hasProperty("propertyNames")) ? identity.getPropertyNames() : ["id"]
+ // (identity.respondsTo("getPropertyNames") || identity.hasProperty("propertyNames")) ? identity.getPropertyNames() : ["id"]
? ++
// find alternative of : identity instanceof org.codehaus.groovy.grails.orm.hibernate.cfg.CompositeIdentity
// to remove dependency to Hibernate plugin
}
} | 21 | 1.4 | 17 | 4 |
f4c4548ded94b031ba29cf8107db3cfbd02869d3 | src/app.php | src/app.php | <?php
use Symfony\Component\Routing;
$routes = new Routing\RouteCollection();
$routes->add('leap_year', new Routing\Route('/is-leap-year/{year}', array(
'year' => null,
'_controller' => 'Calendar\\Controller\\LeapYearController::indexAction'
)));
return $routes; | <?php
use Symfony\Component\Routing;
$routes = new Routing\RouteCollection();
$routes->add('leap_year', new Routing\Route('/is-leap-year/{year}', array(
'year' => null,
'_controller' => 'Calendar\\Controller\\LeapYearController::indexAction'
)));
$routes->add('prime_number', new Routing\Route('is-prime-number/{number}', array(
'number' => null,
'_controller' => 'Calendar\\Controller\\LeapYearController::primeAction'
)));
return $routes; | Add a new test route | Add a new test route
| PHP | mit | mangelsnc/Scarlett | php | ## Code Before:
<?php
use Symfony\Component\Routing;
$routes = new Routing\RouteCollection();
$routes->add('leap_year', new Routing\Route('/is-leap-year/{year}', array(
'year' => null,
'_controller' => 'Calendar\\Controller\\LeapYearController::indexAction'
)));
return $routes;
## Instruction:
Add a new test route
## Code After:
<?php
use Symfony\Component\Routing;
$routes = new Routing\RouteCollection();
$routes->add('leap_year', new Routing\Route('/is-leap-year/{year}', array(
'year' => null,
'_controller' => 'Calendar\\Controller\\LeapYearController::indexAction'
)));
$routes->add('prime_number', new Routing\Route('is-prime-number/{number}', array(
'number' => null,
'_controller' => 'Calendar\\Controller\\LeapYearController::primeAction'
)));
return $routes; | <?php
use Symfony\Component\Routing;
$routes = new Routing\RouteCollection();
$routes->add('leap_year', new Routing\Route('/is-leap-year/{year}', array(
'year' => null,
'_controller' => 'Calendar\\Controller\\LeapYearController::indexAction'
)));
+ $routes->add('prime_number', new Routing\Route('is-prime-number/{number}', array(
+ 'number' => null,
+ '_controller' => 'Calendar\\Controller\\LeapYearController::primeAction'
+ )));
+
return $routes; | 5 | 0.416667 | 5 | 0 |
161cdb45def0620ba716d3c043d90727f4cbefe4 | catalog/Background_Processing/scheduling.yml | catalog/Background_Processing/scheduling.yml | name: Scheduling
description: Execute tasks on a schedule
projects:
- clockwork
- latimes/craken
- recurrent
- resque-scheduler
- rufus-scheduler
- sidecloq
- sidekiq-cron
- sidekiq-scheduler
- simple_scheduler
- Swirrl/Taskit
- whenever
| name: Scheduling
description: Execute tasks on a schedule
projects:
- clockwork
- latimes/craken
- que-scheduler
- recurrent
- resque-scheduler
- rufus-scheduler
- sidecloq
- sidekiq-cron
- sidekiq-scheduler
- simple_scheduler
- Swirrl/Taskit
- whenever
| Add que-scheduler to Background Processing (Scheduling) category | Add que-scheduler to Background Processing (Scheduling) category
| YAML | mit | rubytoolbox/catalog | yaml | ## Code Before:
name: Scheduling
description: Execute tasks on a schedule
projects:
- clockwork
- latimes/craken
- recurrent
- resque-scheduler
- rufus-scheduler
- sidecloq
- sidekiq-cron
- sidekiq-scheduler
- simple_scheduler
- Swirrl/Taskit
- whenever
## Instruction:
Add que-scheduler to Background Processing (Scheduling) category
## Code After:
name: Scheduling
description: Execute tasks on a schedule
projects:
- clockwork
- latimes/craken
- que-scheduler
- recurrent
- resque-scheduler
- rufus-scheduler
- sidecloq
- sidekiq-cron
- sidekiq-scheduler
- simple_scheduler
- Swirrl/Taskit
- whenever
| name: Scheduling
description: Execute tasks on a schedule
projects:
- clockwork
- latimes/craken
+ - que-scheduler
- recurrent
- resque-scheduler
- rufus-scheduler
- sidecloq
- sidekiq-cron
- sidekiq-scheduler
- simple_scheduler
- Swirrl/Taskit
- whenever | 1 | 0.071429 | 1 | 0 |
f4bbb244716f9471b520f53ebffaf34a31503cd1 | Web/scripts/CPWeb/__init__.py | Web/scripts/CPWeb/__init__.py | from __future__ import division, absolute_import, print_function
import codecs
import csv
import cStringIO
def get_version():
return 5.0
if __name__ == "__main__":
print('You are using version %s of the Python package for creating CoolProp\' online documentation.'%(get_version()))
print()
| from __future__ import division, absolute_import, print_function
def get_version():
return 5.0
if __name__ == "__main__":
print('You are using version %s of the Python package for creating CoolProp\' online documentation.'%(get_version()))
print()
| Remove unused imports (besides they are Py 2.x only) | Remove unused imports (besides they are Py 2.x only)
| Python | mit | CoolProp/CoolProp,CoolProp/CoolProp,henningjp/CoolProp,henningjp/CoolProp,CoolProp/CoolProp,CoolProp/CoolProp,CoolProp/CoolProp,henningjp/CoolProp,henningjp/CoolProp,henningjp/CoolProp,henningjp/CoolProp,henningjp/CoolProp,CoolProp/CoolProp,CoolProp/CoolProp,henningjp/CoolProp,CoolProp/CoolProp | python | ## Code Before:
from __future__ import division, absolute_import, print_function
import codecs
import csv
import cStringIO
def get_version():
return 5.0
if __name__ == "__main__":
print('You are using version %s of the Python package for creating CoolProp\' online documentation.'%(get_version()))
print()
## Instruction:
Remove unused imports (besides they are Py 2.x only)
## Code After:
from __future__ import division, absolute_import, print_function
def get_version():
return 5.0
if __name__ == "__main__":
print('You are using version %s of the Python package for creating CoolProp\' online documentation.'%(get_version()))
print()
| from __future__ import division, absolute_import, print_function
-
- import codecs
- import csv
- import cStringIO
-
def get_version():
return 5.0
if __name__ == "__main__":
print('You are using version %s of the Python package for creating CoolProp\' online documentation.'%(get_version()))
print() | 5 | 0.357143 | 0 | 5 |
d74c517c7e994299bd24b4a5b1e109bfbd00c8fd | jenkins/build-era.sh | jenkins/build-era.sh | export remote_command="/usr/bin/ssh -i /var/lib/jenkins/.ssh/id_rsa ubuntu@era.eha.io "
$remote_command "cd /opt/infrastructure/docker/images/era/era && /usr/bin/sudo git pull origin master" &&\
$remote_command "/usr/bin/sudo docker build -t era /opt/infrastructure/docker/images/era"
|
cd /opt/infrastructure/docker/images/era/era && /usr/bin/sudo git pull origin master &&\
/usr/bin/sudo docker build -t era /opt/infrastructure/docker/images/era &&\
/usr/bin/sudo docker save era /tmp/era.tar &&\
aws s3 cp /tmp/era.tar s3://eha-docker-repo/era.tar &&\
rm /tmp/era.tar
| Update to build era on jenkins | Update to build era on jenkins
| Shell | apache-2.0 | ecohealthalliance/infrastructure,ecohealthalliance/infrastructure,ecohealthalliance/infrastructure,ecohealthalliance/infrastructure | shell | ## Code Before:
export remote_command="/usr/bin/ssh -i /var/lib/jenkins/.ssh/id_rsa ubuntu@era.eha.io "
$remote_command "cd /opt/infrastructure/docker/images/era/era && /usr/bin/sudo git pull origin master" &&\
$remote_command "/usr/bin/sudo docker build -t era /opt/infrastructure/docker/images/era"
## Instruction:
Update to build era on jenkins
## Code After:
cd /opt/infrastructure/docker/images/era/era && /usr/bin/sudo git pull origin master &&\
/usr/bin/sudo docker build -t era /opt/infrastructure/docker/images/era &&\
/usr/bin/sudo docker save era /tmp/era.tar &&\
aws s3 cp /tmp/era.tar s3://eha-docker-repo/era.tar &&\
rm /tmp/era.tar
| - export remote_command="/usr/bin/ssh -i /var/lib/jenkins/.ssh/id_rsa ubuntu@era.eha.io "
- $remote_command "cd /opt/infrastructure/docker/images/era/era && /usr/bin/sudo git pull origin master" &&\
? ----------------- -
+ cd /opt/infrastructure/docker/images/era/era && /usr/bin/sudo git pull origin master &&\
- $remote_command "/usr/bin/sudo docker build -t era /opt/infrastructure/docker/images/era"
? ----------------- ^
+ /usr/bin/sudo docker build -t era /opt/infrastructure/docker/images/era &&\
? ^^^^
+ /usr/bin/sudo docker save era /tmp/era.tar &&\
+ aws s3 cp /tmp/era.tar s3://eha-docker-repo/era.tar &&\
+ rm /tmp/era.tar
| 8 | 1.6 | 5 | 3 |
3ecdc6f4e150eacdf949574a1d7392872db20a04 | .travis.yml | .travis.yml | sudo: false
language: node_js
branches:
only:
- master
node_js:
- '12'
- '11'
- '10'
- '9'
- '8' | sudo: false
language: node_js
branches:
only:
- master
node_js:
- '12'
- '11'
- '10' | Remove unused versions of Node.js for tests | Remove unused versions of Node.js for tests
| YAML | mit | justcoded/web-starter-kit,justcoded/web-starter-kit | yaml | ## Code Before:
sudo: false
language: node_js
branches:
only:
- master
node_js:
- '12'
- '11'
- '10'
- '9'
- '8'
## Instruction:
Remove unused versions of Node.js for tests
## Code After:
sudo: false
language: node_js
branches:
only:
- master
node_js:
- '12'
- '11'
- '10' | sudo: false
language: node_js
branches:
only:
- master
node_js:
- '12'
- '11'
- '10'
- - '9'
- - '8' | 2 | 0.181818 | 0 | 2 |
951c97659ac274df2542625d7d4f389fed3d9a95 | templates/testing-log.tpl | templates/testing-log.tpl | % include('templates/header.tpl', title='Series')
<ul class="nav nav-tabs">
<li role="presentation"><a href="/series/{{series['message-id']}}">Thread</a></li>
<li role="presentation" class="active"><a href="#">Testing</a></li>
</ul>
<br>
<h3>
%if series.get('merged'):
<span class="label label-primary">Merged</span>
%elif series['testing-passed']:
<span class="label label-success">Passed</span>
%else:
<span class="label label-danger">Failed</span>
%end
</h3>
<br>
<pre>{{log}}</pre>
% include('templates/footer.tpl')
| % include('templates/header.tpl', title='Series')
<ul class="nav nav-tabs">
<li role="presentation"><a href="/series/{{series['message-id']}}">Thread</a></li>
<li role="presentation" class="active"><a href="#">Testing</a></li>
</ul>
<br>
<h3>
%if series.get('merged'):
<span class="label label-primary">Merged</span>
%elif series['testing-start-time']:
<span class="label label-warning">Testing</span>
%elif series['testing-passed']:
<span class="label label-success">Passed</span>
%elif series['testing-passed'] == False:
<span class="label label-danger">Failed</span>
%else:
<span class="label label-default">Not started</span>
%end
</h3>
<br>
%if log:
<pre>{{log}}</pre>
%end
% include('templates/footer.tpl')
| Handle "not started" case in template | testing: Handle "not started" case in template
And hide log if there isn't any
Signed-off-by: Fam Zheng <df9fd617995bad07eac70e27aa3fe5069270e3bd@redhat.com>
| Smarty | mit | pvital/patchew,patchew-project/patchew,patchew-project/patchew,patchew-project/patchew,famz/patchew,pvital/patchew,pvital/patchew,famz/patchew,famz/patchew,famz/patchew,patchew-project/patchew,pvital/patchew | smarty | ## Code Before:
% include('templates/header.tpl', title='Series')
<ul class="nav nav-tabs">
<li role="presentation"><a href="/series/{{series['message-id']}}">Thread</a></li>
<li role="presentation" class="active"><a href="#">Testing</a></li>
</ul>
<br>
<h3>
%if series.get('merged'):
<span class="label label-primary">Merged</span>
%elif series['testing-passed']:
<span class="label label-success">Passed</span>
%else:
<span class="label label-danger">Failed</span>
%end
</h3>
<br>
<pre>{{log}}</pre>
% include('templates/footer.tpl')
## Instruction:
testing: Handle "not started" case in template
And hide log if there isn't any
Signed-off-by: Fam Zheng <df9fd617995bad07eac70e27aa3fe5069270e3bd@redhat.com>
## Code After:
% include('templates/header.tpl', title='Series')
<ul class="nav nav-tabs">
<li role="presentation"><a href="/series/{{series['message-id']}}">Thread</a></li>
<li role="presentation" class="active"><a href="#">Testing</a></li>
</ul>
<br>
<h3>
%if series.get('merged'):
<span class="label label-primary">Merged</span>
%elif series['testing-start-time']:
<span class="label label-warning">Testing</span>
%elif series['testing-passed']:
<span class="label label-success">Passed</span>
%elif series['testing-passed'] == False:
<span class="label label-danger">Failed</span>
%else:
<span class="label label-default">Not started</span>
%end
</h3>
<br>
%if log:
<pre>{{log}}</pre>
%end
% include('templates/footer.tpl')
| % include('templates/header.tpl', title='Series')
<ul class="nav nav-tabs">
<li role="presentation"><a href="/series/{{series['message-id']}}">Thread</a></li>
<li role="presentation" class="active"><a href="#">Testing</a></li>
</ul>
<br>
<h3>
%if series.get('merged'):
<span class="label label-primary">Merged</span>
+ %elif series['testing-start-time']:
+ <span class="label label-warning">Testing</span>
%elif series['testing-passed']:
<span class="label label-success">Passed</span>
+ %elif series['testing-passed'] == False:
+ <span class="label label-danger">Failed</span>
%else:
- <span class="label label-danger">Failed</span>
? ^^^^ ^ ^^
+ <span class="label label-default">Not started</span>
? ++ ^^^ ^^^^^^ ^^
%end
</h3>
<br>
+ %if log:
- <pre>{{log}}</pre>
+ <pre>{{log}}</pre>
? ++++
+ %end
% include('templates/footer.tpl') | 10 | 0.454545 | 8 | 2 |
f87c291ce7ee7a54a987f3d8bd1a43e1cee2b6a0 | setup.py | setup.py |
from setuptools import setup
setup(
name = 'diabric',
version = '0.1',
license = 'MIT',
description = 'Diabolically atomic Python Fabric fabfile tasks and utilities.',
long_description = open('README.md').read(),
keywords = 'fabric fabfile boto ec2 virtualenv python wsgi webapp deployment',
url = 'https://github.com/todddeluca/diabric',
author = 'Todd Francis DeLuca',
author_email = 'todddeluca@yahoo.com',
classifiers = ['License :: OSI Approved :: MIT License',
'Development Status :: 2 - Pre-Alpha',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
packages = ['diabric'],
install_requires = ['setuptools', 'Fabric>=1.4','boto>=2.3'],
include_package_data = True,
package_data = {'' : ['README.md', 'LICENSE.txt']},
)
|
import os
from setuptools import setup, find_packages
setup(
name = 'diabric',
version = '0.1.1',
license = 'MIT',
description = 'Diabolically atomic Python Fabric fabfile tasks and utilities.',
long_description = open(os.path.join(os.path.dirname(__file__), 'README.md')).read(),
keywords = 'fabric fabfile boto ec2 virtualenv python wsgi webapp deployment',
url = 'https://github.com/todddeluca/diabric',
author = 'Todd Francis DeLuca',
author_email = 'todddeluca@yahoo.com',
classifiers = ['License :: OSI Approved :: MIT License',
'Development Status :: 2 - Pre-Alpha',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
install_requires = ['setuptools', 'Fabric>=1.4','boto>=2.3'],
packages = ['diabric'],
)
| Fix pip installation failure involving README.md | Fix pip installation failure involving README.md
Two bugs with "easy" fixes:
- README.md was not being included in the source distribution. I'm not
sure what I did to fix it, since the distutils/setuptools/distribute
docs are quite incomplete and convoluted on something so
straight-forward. The fix: I removed some 'package_data' type lines
from setup.py, and now LICENSE.txt and README.md are being included.
- In addition to README.md not being included in the distribution
(tar.gz file) it was being read by setup.py as open('README.md'),
which is relative to the current working directory, which I'm not sure
pip sets to the directory containing setup.py. The fix is to open the
file using the directory of setup.py, via the __file__ attribute.
| Python | mit | todddeluca/diabric | python | ## Code Before:
from setuptools import setup
setup(
name = 'diabric',
version = '0.1',
license = 'MIT',
description = 'Diabolically atomic Python Fabric fabfile tasks and utilities.',
long_description = open('README.md').read(),
keywords = 'fabric fabfile boto ec2 virtualenv python wsgi webapp deployment',
url = 'https://github.com/todddeluca/diabric',
author = 'Todd Francis DeLuca',
author_email = 'todddeluca@yahoo.com',
classifiers = ['License :: OSI Approved :: MIT License',
'Development Status :: 2 - Pre-Alpha',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
packages = ['diabric'],
install_requires = ['setuptools', 'Fabric>=1.4','boto>=2.3'],
include_package_data = True,
package_data = {'' : ['README.md', 'LICENSE.txt']},
)
## Instruction:
Fix pip installation failure involving README.md
Two bugs with "easy" fixes:
- README.md was not being included in the source distribution. I'm not
sure what I did to fix it, since the distutils/setuptools/distribute
docs are quite incomplete and convoluted on something so
straight-forward. The fix: I removed some 'package_data' type lines
from setup.py, and now LICENSE.txt and README.md are being included.
- In addition to README.md not being included in the distribution
(tar.gz file) it was being read by setup.py as open('README.md'),
which is relative to the current working directory, which I'm not sure
pip sets to the directory containing setup.py. The fix is to open the
file using the directory of setup.py, via the __file__ attribute.
## Code After:
import os
from setuptools import setup, find_packages
setup(
name = 'diabric',
version = '0.1.1',
license = 'MIT',
description = 'Diabolically atomic Python Fabric fabfile tasks and utilities.',
long_description = open(os.path.join(os.path.dirname(__file__), 'README.md')).read(),
keywords = 'fabric fabfile boto ec2 virtualenv python wsgi webapp deployment',
url = 'https://github.com/todddeluca/diabric',
author = 'Todd Francis DeLuca',
author_email = 'todddeluca@yahoo.com',
classifiers = ['License :: OSI Approved :: MIT License',
'Development Status :: 2 - Pre-Alpha',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
install_requires = ['setuptools', 'Fabric>=1.4','boto>=2.3'],
packages = ['diabric'],
)
|
+ import os
- from setuptools import setup
+ from setuptools import setup, find_packages
? +++++++++++++++
setup(
name = 'diabric',
- version = '0.1',
+ version = '0.1.1',
? ++
license = 'MIT',
description = 'Diabolically atomic Python Fabric fabfile tasks and utilities.',
- long_description = open('README.md').read(),
+ long_description = open(os.path.join(os.path.dirname(__file__), 'README.md')).read(),
keywords = 'fabric fabfile boto ec2 virtualenv python wsgi webapp deployment',
url = 'https://github.com/todddeluca/diabric',
author = 'Todd Francis DeLuca',
author_email = 'todddeluca@yahoo.com',
classifiers = ['License :: OSI Approved :: MIT License',
'Development Status :: 2 - Pre-Alpha',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
+ install_requires = ['setuptools', 'Fabric>=1.4','boto>=2.3'],
packages = ['diabric'],
- install_requires = ['setuptools', 'Fabric>=1.4','boto>=2.3'],
- include_package_data = True,
- package_data = {'' : ['README.md', 'LICENSE.txt']},
)
| 11 | 0.458333 | 5 | 6 |
86a0664d4c7d131f7d1afca6c021019083cc21b2 | README.md | README.md |




FuntastyKit iOS framework contains:
- MVVM-C architecture used at The Funtasty, the template for this architecture can be found at https://github.com/thefuntasty/MVVM-C-Template
- Service holder for code injection of services
- Some regularly used UIKit extensions
- Protocols for simple initialization from XIB files, storyboards and for handling keyboard
- Hairline constraint for one-pixel designs
## Installation
If use CocoaPods for dependency management add following line to your `Podfile`:
```ruby
pod 'FuntastyKit', '~> 1.4'
```
|




FuntastyKit iOS framework contains:
- MVVM-C architecture used at The Funtasty, the template for this architecture can be found at https://github.com/thefuntasty/MVVM-C-Template
- Service holder for code injection of services
- Some regularly used UIKit extensions
- Protocols for simple initialization from XIB files, storyboards and for handling keyboard
- Hairline constraint for one-pixel designs
## Installation
If you want to use CocoaPods for dependency management, add following line to your `Podfile`:
```ruby
pod 'FuntastyKit', '~> 1.4'
```
| Improve grammar in installation instruction | Improve grammar in installation instruction
Co-Authored-By: mkj-is <45cb3fe91f6af648b4c26f281b7e01e2297e0d27@gmail.com> | Markdown | mit | thefuntasty/FuntastyKit,thefuntasty/FuntastyKit | markdown | ## Code Before:




FuntastyKit iOS framework contains:
- MVVM-C architecture used at The Funtasty, the template for this architecture can be found at https://github.com/thefuntasty/MVVM-C-Template
- Service holder for code injection of services
- Some regularly used UIKit extensions
- Protocols for simple initialization from XIB files, storyboards and for handling keyboard
- Hairline constraint for one-pixel designs
## Installation
If use CocoaPods for dependency management add following line to your `Podfile`:
```ruby
pod 'FuntastyKit', '~> 1.4'
```
## Instruction:
Improve grammar in installation instruction
Co-Authored-By: mkj-is <45cb3fe91f6af648b4c26f281b7e01e2297e0d27@gmail.com>
## Code After:




FuntastyKit iOS framework contains:
- MVVM-C architecture used at The Funtasty, the template for this architecture can be found at https://github.com/thefuntasty/MVVM-C-Template
- Service holder for code injection of services
- Some regularly used UIKit extensions
- Protocols for simple initialization from XIB files, storyboards and for handling keyboard
- Hairline constraint for one-pixel designs
## Installation
If you want to use CocoaPods for dependency management, add following line to your `Podfile`:
```ruby
pod 'FuntastyKit', '~> 1.4'
```
|




FuntastyKit iOS framework contains:
- MVVM-C architecture used at The Funtasty, the template for this architecture can be found at https://github.com/thefuntasty/MVVM-C-Template
- Service holder for code injection of services
- Some regularly used UIKit extensions
- Protocols for simple initialization from XIB files, storyboards and for handling keyboard
- Hairline constraint for one-pixel designs
## Installation
- If use CocoaPods for dependency management add following line to your `Podfile`:
+ If you want to use CocoaPods for dependency management, add following line to your `Podfile`:
? ++++++++++++ +
```ruby
pod 'FuntastyKit', '~> 1.4'
``` | 2 | 0.095238 | 1 | 1 |
7b968518dc38bdf460bc76be2c9bfececb68ff29 | app/assets/javascripts/routes/popup.route.js.coffee | app/assets/javascripts/routes/popup.route.js.coffee |
Wheelmap.PopupRoute = Wheelmap.MapRoute.extend
model: (params)->
@get('store').find('node', params.node_id)
setupController: (controller, model, queryParams)->
@_super(controller, model, queryParams)
@controllerFor('map').set('poppingNode', controller)
renderTemplate: (controller, model)->
@render 'node-popup',
outlet: 'popup',
controller: controller |
Wheelmap.PopupRoute = Wheelmap.MapRoute.extend
model: (params, queryParams)->
self = @
@get('store').find('node', params.node_id).then(null, ()->
# if node was not found
self.transitionTo('index')
)
setupController: (controller, model, queryParams)->
@_super(controller, model, queryParams)
@controllerFor('map').set('poppingNode', controller)
renderTemplate: (controller, model)->
@render 'node-popup',
outlet: 'popup',
controller: controller | Fix blank page if node by id was not found. | Fix blank page if node by id was not found.
| CoffeeScript | agpl-3.0 | sozialhelden/wheelmap,sozialhelden/wheelmap,sozialhelden/wheelmap,sozialhelden/wheelmap,sozialhelden/wheelmap | coffeescript | ## Code Before:
Wheelmap.PopupRoute = Wheelmap.MapRoute.extend
model: (params)->
@get('store').find('node', params.node_id)
setupController: (controller, model, queryParams)->
@_super(controller, model, queryParams)
@controllerFor('map').set('poppingNode', controller)
renderTemplate: (controller, model)->
@render 'node-popup',
outlet: 'popup',
controller: controller
## Instruction:
Fix blank page if node by id was not found.
## Code After:
Wheelmap.PopupRoute = Wheelmap.MapRoute.extend
model: (params, queryParams)->
self = @
@get('store').find('node', params.node_id).then(null, ()->
# if node was not found
self.transitionTo('index')
)
setupController: (controller, model, queryParams)->
@_super(controller, model, queryParams)
@controllerFor('map').set('poppingNode', controller)
renderTemplate: (controller, model)->
@render 'node-popup',
outlet: 'popup',
controller: controller |
Wheelmap.PopupRoute = Wheelmap.MapRoute.extend
- model: (params)->
+ model: (params, queryParams)->
+ self = @
+
- @get('store').find('node', params.node_id)
+ @get('store').find('node', params.node_id).then(null, ()->
? ++++++++++++++++
+ # if node was not found
+ self.transitionTo('index')
+ )
setupController: (controller, model, queryParams)->
@_super(controller, model, queryParams)
@controllerFor('map').set('poppingNode', controller)
renderTemplate: (controller, model)->
@render 'node-popup',
outlet: 'popup',
controller: controller | 9 | 0.642857 | 7 | 2 |
fa129d218fb6b6c38300515e24cdd1155aed4025 | spec/support/external_request.rb | spec/support/external_request.rb | require 'webmock/rspec'
WebMock.disable_net_connect!(allow_localhost: true, allow: /stripe.com/)
RSpec.configure do |config|
config.before(:each) do
mock_commercial_request
end
end
def mock_commercial_request
response = {
author_name: 'Confreaks',
html: '<iframe width="560" height="315" frameborder="0" allowfullscreen></iframe>',
thumbnail_width: 480,
thumbnail_url: '/images/rails.png',
provider_name: 'YouTube',
width: 459,
type: 'video',
provider_url: 'http://www.youtube.com/',
version: '1.0',
thumbnail_height: 360,
title: 'RailsConf 2014 - Closing Keynote by Aaron Patterson',
author_url: 'https://www.youtube.com/user/Confreaks',
height: 344
}
WebMock.stub_request(:get, /.*youtube.*/)
.to_return(status: 200, body: response.to_json, headers: {})
end
| require 'webmock/rspec'
driver_urls = Webdrivers::Common.subclasses.map do |driver|
Addressable::URI.parse(driver.base_url).host
end
WebMock.disable_net_connect!(allow_localhost: true, allow: [*driver_urls, /stripe.com/])
RSpec.configure do |config|
config.before(:each) do
mock_commercial_request
end
end
def mock_commercial_request
response = {
author_name: 'Confreaks',
html: '<iframe width="560" height="315" frameborder="0" allowfullscreen></iframe>',
thumbnail_width: 480,
thumbnail_url: '/images/rails.png',
provider_name: 'YouTube',
width: 459,
type: 'video',
provider_url: 'http://www.youtube.com/',
version: '1.0',
thumbnail_height: 360,
title: 'RailsConf 2014 - Closing Keynote by Aaron Patterson',
author_url: 'https://www.youtube.com/user/Confreaks',
height: 344
}
WebMock.stub_request(:get, /.*youtube.*/)
.to_return(status: 200, body: response.to_json, headers: {})
end
| Whitelist Webdrivers update URLs in WebMock | Whitelist Webdrivers update URLs in WebMock
Resolves #2557:
$ docker-compose run --rm osem bundle exec rspec --tag js
…
WebMock::NetConnectNotAllowedError
See titusfortner/webdrivers#109 for details.
| Ruby | mit | AndrewKvalheim/osem,openSUSE/osem,hennevogel/osem,AndrewKvalheim/osem,differentreality/osem,openSUSE/osem,differentreality/osem,differentreality/osem,differentreality/osem,hennevogel/osem,SeaGL/osem,hennevogel/osem,openSUSE/osem,hennevogel/osem,AndrewKvalheim/osem,SeaGL/osem,SeaGL/osem | ruby | ## Code Before:
require 'webmock/rspec'
WebMock.disable_net_connect!(allow_localhost: true, allow: /stripe.com/)
RSpec.configure do |config|
config.before(:each) do
mock_commercial_request
end
end
def mock_commercial_request
response = {
author_name: 'Confreaks',
html: '<iframe width="560" height="315" frameborder="0" allowfullscreen></iframe>',
thumbnail_width: 480,
thumbnail_url: '/images/rails.png',
provider_name: 'YouTube',
width: 459,
type: 'video',
provider_url: 'http://www.youtube.com/',
version: '1.0',
thumbnail_height: 360,
title: 'RailsConf 2014 - Closing Keynote by Aaron Patterson',
author_url: 'https://www.youtube.com/user/Confreaks',
height: 344
}
WebMock.stub_request(:get, /.*youtube.*/)
.to_return(status: 200, body: response.to_json, headers: {})
end
## Instruction:
Whitelist Webdrivers update URLs in WebMock
Resolves #2557:
$ docker-compose run --rm osem bundle exec rspec --tag js
…
WebMock::NetConnectNotAllowedError
See titusfortner/webdrivers#109 for details.
## Code After:
require 'webmock/rspec'
driver_urls = Webdrivers::Common.subclasses.map do |driver|
Addressable::URI.parse(driver.base_url).host
end
WebMock.disable_net_connect!(allow_localhost: true, allow: [*driver_urls, /stripe.com/])
RSpec.configure do |config|
config.before(:each) do
mock_commercial_request
end
end
def mock_commercial_request
response = {
author_name: 'Confreaks',
html: '<iframe width="560" height="315" frameborder="0" allowfullscreen></iframe>',
thumbnail_width: 480,
thumbnail_url: '/images/rails.png',
provider_name: 'YouTube',
width: 459,
type: 'video',
provider_url: 'http://www.youtube.com/',
version: '1.0',
thumbnail_height: 360,
title: 'RailsConf 2014 - Closing Keynote by Aaron Patterson',
author_url: 'https://www.youtube.com/user/Confreaks',
height: 344
}
WebMock.stub_request(:get, /.*youtube.*/)
.to_return(status: 200, body: response.to_json, headers: {})
end
| require 'webmock/rspec'
+ driver_urls = Webdrivers::Common.subclasses.map do |driver|
+ Addressable::URI.parse(driver.base_url).host
+ end
- WebMock.disable_net_connect!(allow_localhost: true, allow: /stripe.com/)
+ WebMock.disable_net_connect!(allow_localhost: true, allow: [*driver_urls, /stripe.com/])
? +++++++++++++++ +
RSpec.configure do |config|
config.before(:each) do
mock_commercial_request
end
end
def mock_commercial_request
response = {
author_name: 'Confreaks',
html: '<iframe width="560" height="315" frameborder="0" allowfullscreen></iframe>',
thumbnail_width: 480,
thumbnail_url: '/images/rails.png',
provider_name: 'YouTube',
width: 459,
type: 'video',
provider_url: 'http://www.youtube.com/',
version: '1.0',
thumbnail_height: 360,
title: 'RailsConf 2014 - Closing Keynote by Aaron Patterson',
author_url: 'https://www.youtube.com/user/Confreaks',
height: 344
}
WebMock.stub_request(:get, /.*youtube.*/)
.to_return(status: 200, body: response.to_json, headers: {})
end | 5 | 0.178571 | 4 | 1 |
ba359f397ea9b434df07caa98be01844ab6d132a | lib/naught/null_class_builder/commands/traceable.rb | lib/naught/null_class_builder/commands/traceable.rb | require 'naught/null_class_builder/command'
module Naught::NullClassBuilder::Commands
class Traceable < Naught::NullClassBuilder::Command
def call
defer do |subject|
subject.module_eval do
attr_reader :__file__, :__line__
def initialize(options={})
backtrace = options.fetch(:caller) { Kernel.caller(3) }
@__file__, line, _ = backtrace[0].split(':')
@__line__ = line.to_i
end
end
end
end
end
end
| require 'naught/null_class_builder/command'
module Naught::NullClassBuilder::Commands
class Traceable < Naught::NullClassBuilder::Command
def call
defer do |subject|
subject.module_eval do
attr_reader :__file__, :__line__
def initialize(options={})
backtrace = if RUBY_VERSION.to_f == 1.9
options.fetch(:caller) { Kernel.caller(4) }
else
options.fetch(:caller) { Kernel.caller(3) }
end
@__file__, line, _ = backtrace[0].split(':')
@__line__ = line.to_i
end
end
end
end
end
end
| Fix specs on Ruby 1.9 | Fix specs on Ruby 1.9
| Ruby | mit | intfrr/naught,avdi/naught | ruby | ## Code Before:
require 'naught/null_class_builder/command'
module Naught::NullClassBuilder::Commands
class Traceable < Naught::NullClassBuilder::Command
def call
defer do |subject|
subject.module_eval do
attr_reader :__file__, :__line__
def initialize(options={})
backtrace = options.fetch(:caller) { Kernel.caller(3) }
@__file__, line, _ = backtrace[0].split(':')
@__line__ = line.to_i
end
end
end
end
end
end
## Instruction:
Fix specs on Ruby 1.9
## Code After:
require 'naught/null_class_builder/command'
module Naught::NullClassBuilder::Commands
class Traceable < Naught::NullClassBuilder::Command
def call
defer do |subject|
subject.module_eval do
attr_reader :__file__, :__line__
def initialize(options={})
backtrace = if RUBY_VERSION.to_f == 1.9
options.fetch(:caller) { Kernel.caller(4) }
else
options.fetch(:caller) { Kernel.caller(3) }
end
@__file__, line, _ = backtrace[0].split(':')
@__line__ = line.to_i
end
end
end
end
end
end
| require 'naught/null_class_builder/command'
module Naught::NullClassBuilder::Commands
class Traceable < Naught::NullClassBuilder::Command
def call
defer do |subject|
subject.module_eval do
attr_reader :__file__, :__line__
def initialize(options={})
+ backtrace = if RUBY_VERSION.to_f == 1.9
+ options.fetch(:caller) { Kernel.caller(4) }
+ else
- backtrace = options.fetch(:caller) { Kernel.caller(3) }
? --------- -
+ options.fetch(:caller) { Kernel.caller(3) }
+ end
@__file__, line, _ = backtrace[0].split(':')
@__line__ = line.to_i
end
end
end
end
end
end | 6 | 0.315789 | 5 | 1 |
abd2fc3e615c476a990fb079c36f9667225e9ef0 | src/builtIns/index.js | src/builtIns/index.js | import collectionHandlers from './collections'
// eslint-disable-next-line
const globalObj = Function("return this")();
// built-in object can not be wrapped by Proxies
// their methods expect the object instance as the 'this' instead of the Proxy wrapper
// complex objects are wrapped with a Proxy of instrumented methods
// which switch the proxy to the raw object and to add reactive wiring
const handlers = new Map([
[Map, collectionHandlers],
[Set, collectionHandlers],
[WeakMap, collectionHandlers],
[WeakSet, collectionHandlers],
[Object, false],
[Array, false],
[Int8Array, false],
[Uint8Array, false],
[Uint8ClampedArray, false],
[Int16Array, false],
[Uint16Array, false],
[Int32Array, false],
[Uint32Array, false],
[Float32Array, false],
[Float64Array, false]
])
export function shouldInstrument ({ constructor }) {
const isBuiltIn =
typeof constructor === 'function' &&
constructor.name in globalObj &&
globalObj[constructor.name] === constructor
return !isBuiltIn || handlers.has(constructor)
}
export function getHandlers (obj) {
return handlers.get(obj.constructor)
}
| import collectionHandlers from './collections'
// eslint-disable-next-line
const globalObj = typeof window === 'object' ? window : Function('return this')();
// built-in object can not be wrapped by Proxies
// their methods expect the object instance as the 'this' instead of the Proxy wrapper
// complex objects are wrapped with a Proxy of instrumented methods
// which switch the proxy to the raw object and to add reactive wiring
const handlers = new Map([
[Map, collectionHandlers],
[Set, collectionHandlers],
[WeakMap, collectionHandlers],
[WeakSet, collectionHandlers],
[Object, false],
[Array, false],
[Int8Array, false],
[Uint8Array, false],
[Uint8ClampedArray, false],
[Int16Array, false],
[Uint16Array, false],
[Int32Array, false],
[Uint32Array, false],
[Float32Array, false],
[Float64Array, false]
])
export function shouldInstrument ({ constructor }) {
const isBuiltIn =
typeof constructor === 'function' &&
constructor.name in globalObj &&
globalObj[constructor.name] === constructor
return !isBuiltIn || handlers.has(constructor)
}
export function getHandlers (obj) {
return handlers.get(obj.constructor)
}
| Use window as global by default, fall back to eval | Use window as global by default, fall back to eval
| JavaScript | mit | nx-js/observer-util,nx-js/observer-util | javascript | ## Code Before:
import collectionHandlers from './collections'
// eslint-disable-next-line
const globalObj = Function("return this")();
// built-in object can not be wrapped by Proxies
// their methods expect the object instance as the 'this' instead of the Proxy wrapper
// complex objects are wrapped with a Proxy of instrumented methods
// which switch the proxy to the raw object and to add reactive wiring
const handlers = new Map([
[Map, collectionHandlers],
[Set, collectionHandlers],
[WeakMap, collectionHandlers],
[WeakSet, collectionHandlers],
[Object, false],
[Array, false],
[Int8Array, false],
[Uint8Array, false],
[Uint8ClampedArray, false],
[Int16Array, false],
[Uint16Array, false],
[Int32Array, false],
[Uint32Array, false],
[Float32Array, false],
[Float64Array, false]
])
export function shouldInstrument ({ constructor }) {
const isBuiltIn =
typeof constructor === 'function' &&
constructor.name in globalObj &&
globalObj[constructor.name] === constructor
return !isBuiltIn || handlers.has(constructor)
}
export function getHandlers (obj) {
return handlers.get(obj.constructor)
}
## Instruction:
Use window as global by default, fall back to eval
## Code After:
import collectionHandlers from './collections'
// eslint-disable-next-line
const globalObj = typeof window === 'object' ? window : Function('return this')();
// built-in object can not be wrapped by Proxies
// their methods expect the object instance as the 'this' instead of the Proxy wrapper
// complex objects are wrapped with a Proxy of instrumented methods
// which switch the proxy to the raw object and to add reactive wiring
const handlers = new Map([
[Map, collectionHandlers],
[Set, collectionHandlers],
[WeakMap, collectionHandlers],
[WeakSet, collectionHandlers],
[Object, false],
[Array, false],
[Int8Array, false],
[Uint8Array, false],
[Uint8ClampedArray, false],
[Int16Array, false],
[Uint16Array, false],
[Int32Array, false],
[Uint32Array, false],
[Float32Array, false],
[Float64Array, false]
])
export function shouldInstrument ({ constructor }) {
const isBuiltIn =
typeof constructor === 'function' &&
constructor.name in globalObj &&
globalObj[constructor.name] === constructor
return !isBuiltIn || handlers.has(constructor)
}
export function getHandlers (obj) {
return handlers.get(obj.constructor)
}
| import collectionHandlers from './collections'
// eslint-disable-next-line
- const globalObj = Function("return this")();
+ const globalObj = typeof window === 'object' ? window : Function('return this')();
// built-in object can not be wrapped by Proxies
// their methods expect the object instance as the 'this' instead of the Proxy wrapper
// complex objects are wrapped with a Proxy of instrumented methods
// which switch the proxy to the raw object and to add reactive wiring
const handlers = new Map([
[Map, collectionHandlers],
[Set, collectionHandlers],
[WeakMap, collectionHandlers],
[WeakSet, collectionHandlers],
[Object, false],
[Array, false],
[Int8Array, false],
[Uint8Array, false],
[Uint8ClampedArray, false],
[Int16Array, false],
[Uint16Array, false],
[Int32Array, false],
[Uint32Array, false],
[Float32Array, false],
[Float64Array, false]
])
export function shouldInstrument ({ constructor }) {
const isBuiltIn =
typeof constructor === 'function' &&
constructor.name in globalObj &&
globalObj[constructor.name] === constructor
return !isBuiltIn || handlers.has(constructor)
}
export function getHandlers (obj) {
return handlers.get(obj.constructor)
} | 2 | 0.052632 | 1 | 1 |
ebd8a166cecbdb0945fb18008a8becc17f22358d | cla_public/static-src/javascripts/modules/currencyInputValidation.js | cla_public/static-src/javascripts/modules/currencyInputValidation.js | // ($(this).val() && Number($(this).val()) > 0)
function isValidAmount(input) {
var maxVal = 100000000
var minVal = 0
var value = input.split('.')
var pounds = value[0]
var pence = value[1]
pounds = pounds.replaceAll(/^£|[\s,]+/g, "")
if(pence) {
var lengthOfPence = pence.length
if(lengthOfPence > 2) {
return false
}
}
var amount = Number(pounds * 100)
if(pence) {
amount += Number(pence)
}
if(amount) {
console.log('Amount: ' + amount)
if(amount > maxVal || amount < minVal) {
return false
}
} else {
return false
}
return true
}
exports.isValidAmount = isValidAmount | // ($(this).val() && Number($(this).val()) > 0)
function isValidAmount(input) {
var maxVal = 100000000
var minVal = 0
var pounds;
var pence;
var trimmedInput = input.trim()
var decimalPointPosition = trimmedInput.indexOf('.')
if(decimalPointPosition === -1) {
pounds = trimmedInput
} else {
pounds = trimmedInput.slice(0, decimalPointPosition)
pence = trimmedInput.slice(decimalPointPosition + 1)
}
pounds = pounds.replace(/^£|[\s,]+/g, "")
if(pence) {
if(pence.length > 2) {
return false
}
}
var amount = Number(pounds * 100)
if(pence) {
amount += Number(pence)
}
if(amount) {
if(amount > maxVal || amount < minVal) {
return false
}
} else {
return false
}
return true
}
exports.isValidAmount = isValidAmount | Refactor and split input amount into pounds and pence | Refactor and split input amount into pounds and pence | JavaScript | mit | ministryofjustice/cla_public,ministryofjustice/cla_public,ministryofjustice/cla_public,ministryofjustice/cla_public | javascript | ## Code Before:
// ($(this).val() && Number($(this).val()) > 0)
function isValidAmount(input) {
var maxVal = 100000000
var minVal = 0
var value = input.split('.')
var pounds = value[0]
var pence = value[1]
pounds = pounds.replaceAll(/^£|[\s,]+/g, "")
if(pence) {
var lengthOfPence = pence.length
if(lengthOfPence > 2) {
return false
}
}
var amount = Number(pounds * 100)
if(pence) {
amount += Number(pence)
}
if(amount) {
console.log('Amount: ' + amount)
if(amount > maxVal || amount < minVal) {
return false
}
} else {
return false
}
return true
}
exports.isValidAmount = isValidAmount
## Instruction:
Refactor and split input amount into pounds and pence
## Code After:
// ($(this).val() && Number($(this).val()) > 0)
function isValidAmount(input) {
var maxVal = 100000000
var minVal = 0
var pounds;
var pence;
var trimmedInput = input.trim()
var decimalPointPosition = trimmedInput.indexOf('.')
if(decimalPointPosition === -1) {
pounds = trimmedInput
} else {
pounds = trimmedInput.slice(0, decimalPointPosition)
pence = trimmedInput.slice(decimalPointPosition + 1)
}
pounds = pounds.replace(/^£|[\s,]+/g, "")
if(pence) {
if(pence.length > 2) {
return false
}
}
var amount = Number(pounds * 100)
if(pence) {
amount += Number(pence)
}
if(amount) {
if(amount > maxVal || amount < minVal) {
return false
}
} else {
return false
}
return true
}
exports.isValidAmount = isValidAmount | // ($(this).val() && Number($(this).val()) > 0)
function isValidAmount(input) {
var maxVal = 100000000
var minVal = 0
+ var pounds;
+ var pence;
- var value = input.split('.')
- var pounds = value[0]
- var pence = value[1]
+ var trimmedInput = input.trim()
+
+ var decimalPointPosition = trimmedInput.indexOf('.')
+ if(decimalPointPosition === -1) {
+ pounds = trimmedInput
+ } else {
+ pounds = trimmedInput.slice(0, decimalPointPosition)
+ pence = trimmedInput.slice(decimalPointPosition + 1)
+ }
- pounds = pounds.replaceAll(/^£|[\s,]+/g, "")
? ---
+ pounds = pounds.replace(/^£|[\s,]+/g, "")
if(pence) {
- var lengthOfPence = pence.length
- if(lengthOfPence > 2) {
? -------
+ if(pence.length > 2) {
? ++++++
return false
}
}
var amount = Number(pounds * 100)
if(pence) {
amount += Number(pence)
}
if(amount) {
- console.log('Amount: ' + amount)
if(amount > maxVal || amount < minVal) {
return false
}
} else {
return false
}
return true
}
exports.isValidAmount = isValidAmount | 20 | 0.555556 | 13 | 7 |
c02ddf45a2586949a3e707e984baf5016593a426 | playbooks/external-ip/external-ip.yaml | playbooks/external-ip/external-ip.yaml | ---
# For Amazon EC2 hosts, use the ec2 fact
- hosts: external-ip:&amazon
tasks:
- set_fact: external_hostname="{{ansible_ec2_public_hostname}}"
# For non-EC2 hosts, use a detected external IP address
- hosts: external-ip:!amazon:&apt
sudo: yes
tasks:
- name: Install curl
apt: name=curl state=present
- hosts: external-ip:!amazon
tasks:
- name: get host's external IP address
command: curl --silent --fail http://169.254.169.254/latest/meta-data/public-ipv4
register: host_external_ip
- set_fact: external_hostname="{{host_external_ip.stdout}}"
| ---
# For Amazon EC2 hosts, use the ec2 fact
- hosts: external-ip:&amazon
tasks:
- set_fact: external_hostname="{{ansible_ec2_public_hostname}}"
# For non-EC2 hosts, use a detected external IP address
- hosts: external-ip:!amazon:&apt
sudo: yes
tasks:
- name: Install curl
apt: name=curl state=present
- hosts: external-ip:!amazon
tasks:
- name: get host's external IP address
# See http://www.commandlinefu.com/commands/view/5427/get-your-external-ip-address
# for alternatives to this command
command: curl --silent --fail http://ifconfig.me/ip
register: host_external_ip
- set_fact: external_hostname="{{host_external_ip.stdout}}"
| Use a more general method to detect external IP address. Existing method only worked for Amazon instances. | Use a more general method to detect external IP address.
Existing method only worked for Amazon instances.
| YAML | apache-2.0 | BioVeL/ansible-playbooks,scman1/ansible-portal | yaml | ## Code Before:
---
# For Amazon EC2 hosts, use the ec2 fact
- hosts: external-ip:&amazon
tasks:
- set_fact: external_hostname="{{ansible_ec2_public_hostname}}"
# For non-EC2 hosts, use a detected external IP address
- hosts: external-ip:!amazon:&apt
sudo: yes
tasks:
- name: Install curl
apt: name=curl state=present
- hosts: external-ip:!amazon
tasks:
- name: get host's external IP address
command: curl --silent --fail http://169.254.169.254/latest/meta-data/public-ipv4
register: host_external_ip
- set_fact: external_hostname="{{host_external_ip.stdout}}"
## Instruction:
Use a more general method to detect external IP address.
Existing method only worked for Amazon instances.
## Code After:
---
# For Amazon EC2 hosts, use the ec2 fact
- hosts: external-ip:&amazon
tasks:
- set_fact: external_hostname="{{ansible_ec2_public_hostname}}"
# For non-EC2 hosts, use a detected external IP address
- hosts: external-ip:!amazon:&apt
sudo: yes
tasks:
- name: Install curl
apt: name=curl state=present
- hosts: external-ip:!amazon
tasks:
- name: get host's external IP address
# See http://www.commandlinefu.com/commands/view/5427/get-your-external-ip-address
# for alternatives to this command
command: curl --silent --fail http://ifconfig.me/ip
register: host_external_ip
- set_fact: external_hostname="{{host_external_ip.stdout}}"
| ---
# For Amazon EC2 hosts, use the ec2 fact
- hosts: external-ip:&amazon
tasks:
- set_fact: external_hostname="{{ansible_ec2_public_hostname}}"
# For non-EC2 hosts, use a detected external IP address
- hosts: external-ip:!amazon:&apt
sudo: yes
tasks:
- name: Install curl
apt: name=curl state=present
- hosts: external-ip:!amazon
tasks:
- name: get host's external IP address
- command: curl --silent --fail http://169.254.169.254/latest/meta-data/public-ipv4
+ # See http://www.commandlinefu.com/commands/view/5427/get-your-external-ip-address
+ # for alternatives to this command
+ command: curl --silent --fail http://ifconfig.me/ip
register: host_external_ip
- set_fact: external_hostname="{{host_external_ip.stdout}}"
| 4 | 0.181818 | 3 | 1 |
3c12dfb10c1a6999cb3f4b757ee36b92cbdcc253 | composer.json | composer.json | {
"name": "daemonalchemist/atp-core",
"description": "Core components for the ATP framework",
"license": "MIT",
"authors": [
{
"name": "Andrew Wittrock",
"email": "andy@wittrock.us"
}
],
"require": {
"imagine/imagine": "0.6.1",
"rwoverdijk/assetmanager": "dev-master",
"zendframework/zend-cache": "2.3.*",
"zendframework/zend-config": "2.3.*",
"zendframework/zend-console": "2.3.*",
"zendframework/zend-crypt": "2.3.*",
"zendframework/zend-db": "2.3.*",
"zendframework/zend-form": "2.3.*",
"zendframework/zend-http": "2.3.*",
"zendframework/zend-i18n": "2.3.*",
"zendframework/zend-loader": "2.3.*",
"zendframework/zend-log": "2.3.*",
"zendframework/zend-math": "2.3.*",
"zendframework/zend-modulemanager": "2.3.*",
"zendframework/zend-mvc": "2.3.*",
"zendframework/zend-serializer": "2.3.*",
"zendframework/zend-uri": "2.3.*",
"zendframework/zend-view": "2.3.*"
}
}
| {
"name": "daemonalchemist/atp-core",
"description": "Core components for the ATP framework",
"license": "MIT",
"authors": [
{
"name": "Andrew Wittrock",
"email": "andy@wittrock.us"
}
],
"require": {
"daemonalchemist/atp": "dev-master",
"imagine/imagine": "0.6.1",
"rwoverdijk/assetmanager": "dev-master",
"zendframework/zend-cache": "2.3.*",
"zendframework/zend-config": "2.3.*",
"zendframework/zend-console": "2.3.*",
"zendframework/zend-crypt": "2.3.*",
"zendframework/zend-db": "2.3.*",
"zendframework/zend-form": "2.3.*",
"zendframework/zend-http": "2.3.*",
"zendframework/zend-i18n": "2.3.*",
"zendframework/zend-loader": "2.3.*",
"zendframework/zend-log": "2.3.*",
"zendframework/zend-math": "2.3.*",
"zendframework/zend-modulemanager": "2.3.*",
"zendframework/zend-mvc": "2.3.*",
"zendframework/zend-serializer": "2.3.*",
"zendframework/zend-uri": "2.3.*",
"zendframework/zend-view": "2.3.*"
}
}
| Add ATP library as a dependency | Add ATP library as a dependency
| JSON | mit | DaemonAlchemist/atp-core,DaemonAlchemist/atp-core,DaemonAlchemist/atp-core | json | ## Code Before:
{
"name": "daemonalchemist/atp-core",
"description": "Core components for the ATP framework",
"license": "MIT",
"authors": [
{
"name": "Andrew Wittrock",
"email": "andy@wittrock.us"
}
],
"require": {
"imagine/imagine": "0.6.1",
"rwoverdijk/assetmanager": "dev-master",
"zendframework/zend-cache": "2.3.*",
"zendframework/zend-config": "2.3.*",
"zendframework/zend-console": "2.3.*",
"zendframework/zend-crypt": "2.3.*",
"zendframework/zend-db": "2.3.*",
"zendframework/zend-form": "2.3.*",
"zendframework/zend-http": "2.3.*",
"zendframework/zend-i18n": "2.3.*",
"zendframework/zend-loader": "2.3.*",
"zendframework/zend-log": "2.3.*",
"zendframework/zend-math": "2.3.*",
"zendframework/zend-modulemanager": "2.3.*",
"zendframework/zend-mvc": "2.3.*",
"zendframework/zend-serializer": "2.3.*",
"zendframework/zend-uri": "2.3.*",
"zendframework/zend-view": "2.3.*"
}
}
## Instruction:
Add ATP library as a dependency
## Code After:
{
"name": "daemonalchemist/atp-core",
"description": "Core components for the ATP framework",
"license": "MIT",
"authors": [
{
"name": "Andrew Wittrock",
"email": "andy@wittrock.us"
}
],
"require": {
"daemonalchemist/atp": "dev-master",
"imagine/imagine": "0.6.1",
"rwoverdijk/assetmanager": "dev-master",
"zendframework/zend-cache": "2.3.*",
"zendframework/zend-config": "2.3.*",
"zendframework/zend-console": "2.3.*",
"zendframework/zend-crypt": "2.3.*",
"zendframework/zend-db": "2.3.*",
"zendframework/zend-form": "2.3.*",
"zendframework/zend-http": "2.3.*",
"zendframework/zend-i18n": "2.3.*",
"zendframework/zend-loader": "2.3.*",
"zendframework/zend-log": "2.3.*",
"zendframework/zend-math": "2.3.*",
"zendframework/zend-modulemanager": "2.3.*",
"zendframework/zend-mvc": "2.3.*",
"zendframework/zend-serializer": "2.3.*",
"zendframework/zend-uri": "2.3.*",
"zendframework/zend-view": "2.3.*"
}
}
| {
"name": "daemonalchemist/atp-core",
"description": "Core components for the ATP framework",
"license": "MIT",
"authors": [
{
"name": "Andrew Wittrock",
"email": "andy@wittrock.us"
}
],
"require": {
+ "daemonalchemist/atp": "dev-master",
"imagine/imagine": "0.6.1",
"rwoverdijk/assetmanager": "dev-master",
"zendframework/zend-cache": "2.3.*",
"zendframework/zend-config": "2.3.*",
"zendframework/zend-console": "2.3.*",
"zendframework/zend-crypt": "2.3.*",
"zendframework/zend-db": "2.3.*",
"zendframework/zend-form": "2.3.*",
"zendframework/zend-http": "2.3.*",
"zendframework/zend-i18n": "2.3.*",
"zendframework/zend-loader": "2.3.*",
"zendframework/zend-log": "2.3.*",
"zendframework/zend-math": "2.3.*",
"zendframework/zend-modulemanager": "2.3.*",
"zendframework/zend-mvc": "2.3.*",
"zendframework/zend-serializer": "2.3.*",
"zendframework/zend-uri": "2.3.*",
"zendframework/zend-view": "2.3.*"
}
} | 1 | 0.032258 | 1 | 0 |
d07848a5085efc13f4e260de7a6a98f63170bbd8 | test/helpers/magic_test_helper.rb | test/helpers/magic_test_helper.rb |
module MagicTestHelpers
def capture_stderr
require 'stringio'
saved_stderr, $stderr = $stderr, StringIO.new
[yield, $stderr.string]
ensure
$stderr = saved_stderr
end
def with_fixtures(fixtures = 'fixtures', &block)
new = @version && @version >= 519
cwd = File.join('test', fixtures)
Dir.chdir(cwd) do
yield(Dir.pwd, File.join(new ? 'new-format' : 'old-format'))
end
end
def with_attribute_override(attribute, value, &block)
writer = "#{attribute}="
old = @magic.send(attribute)
@magic.send(writer, value)
ensure
@magic.send(writer, old)
end
end
|
module MagicTestHelpers
def capture_stderr(children: false)
require 'thread'
semaphore = Mutex.new
if children
require 'tempfile'
captured_stderr = Tempfile.new('captured_stderr')
semaphore.synchronize do
backup_stderr = $stderr.dup
$stderr.reopen captured_stderr
begin
yield
$stderr.rewind
captured_stderr.read
ensure
captured_stderr.unlink
$stderr.reopen backup_stderr
end
end
else
require 'stringio'
captured_stderr = StringIO.new
semaphore.synchronize do
backup_stderr = $stderr
$stderr = captured_stderr
begin
yield
ensure
$stderr = backup_stderr
end
end
captured_stderr.string
end
end
def with_fixtures(fixtures = 'fixtures', &block)
new = @version && @version >= 519
cwd = File.join('test', fixtures)
Dir.chdir(cwd) do
yield(Dir.pwd, File.join(new ? 'new-format' : 'old-format'))
end
end
def with_attribute_override(attribute, value, &block)
writer = "#{attribute}="
old = @magic.send(attribute)
@magic.send(writer, value)
ensure
@magic.send(writer, old)
end
end
| Refactor the capture_stderr helper to capture child processes output | Refactor the capture_stderr helper to capture child processes output
Signed-off-by: Krzysztof Wilczyński <5f1c0be89013f8fde969a8dcb2fa1d522e94ee00@linux.com>
| Ruby | apache-2.0 | kwilczynski/ruby-magic,kwilczynski/ruby-magic | ruby | ## Code Before:
module MagicTestHelpers
def capture_stderr
require 'stringio'
saved_stderr, $stderr = $stderr, StringIO.new
[yield, $stderr.string]
ensure
$stderr = saved_stderr
end
def with_fixtures(fixtures = 'fixtures', &block)
new = @version && @version >= 519
cwd = File.join('test', fixtures)
Dir.chdir(cwd) do
yield(Dir.pwd, File.join(new ? 'new-format' : 'old-format'))
end
end
def with_attribute_override(attribute, value, &block)
writer = "#{attribute}="
old = @magic.send(attribute)
@magic.send(writer, value)
ensure
@magic.send(writer, old)
end
end
## Instruction:
Refactor the capture_stderr helper to capture child processes output
Signed-off-by: Krzysztof Wilczyński <5f1c0be89013f8fde969a8dcb2fa1d522e94ee00@linux.com>
## Code After:
module MagicTestHelpers
def capture_stderr(children: false)
require 'thread'
semaphore = Mutex.new
if children
require 'tempfile'
captured_stderr = Tempfile.new('captured_stderr')
semaphore.synchronize do
backup_stderr = $stderr.dup
$stderr.reopen captured_stderr
begin
yield
$stderr.rewind
captured_stderr.read
ensure
captured_stderr.unlink
$stderr.reopen backup_stderr
end
end
else
require 'stringio'
captured_stderr = StringIO.new
semaphore.synchronize do
backup_stderr = $stderr
$stderr = captured_stderr
begin
yield
ensure
$stderr = backup_stderr
end
end
captured_stderr.string
end
end
def with_fixtures(fixtures = 'fixtures', &block)
new = @version && @version >= 519
cwd = File.join('test', fixtures)
Dir.chdir(cwd) do
yield(Dir.pwd, File.join(new ? 'new-format' : 'old-format'))
end
end
def with_attribute_override(attribute, value, &block)
writer = "#{attribute}="
old = @magic.send(attribute)
@magic.send(writer, value)
ensure
@magic.send(writer, old)
end
end
|
module MagicTestHelpers
- def capture_stderr
+ def capture_stderr(children: false)
+ require 'thread'
+ semaphore = Mutex.new
+ if children
+ require 'tempfile'
+ captured_stderr = Tempfile.new('captured_stderr')
+ semaphore.synchronize do
+ backup_stderr = $stderr.dup
+ $stderr.reopen captured_stderr
+ begin
+ yield
+ $stderr.rewind
+ captured_stderr.read
+ ensure
+ captured_stderr.unlink
+ $stderr.reopen backup_stderr
+ end
+ end
+ else
- require 'stringio'
+ require 'stringio'
? ++
- saved_stderr, $stderr = $stderr, StringIO.new
- [yield, $stderr.string]
- ensure
+ captured_stderr = StringIO.new
+ semaphore.synchronize do
+ backup_stderr = $stderr
- $stderr = saved_stderr
? ^ ^
+ $stderr = captured_stderr
? ++++ ^ ^^^^
+ begin
+ yield
+ ensure
+ $stderr = backup_stderr
+ end
+ end
+ captured_stderr.string
+ end
end
def with_fixtures(fixtures = 'fixtures', &block)
new = @version && @version >= 519
cwd = File.join('test', fixtures)
Dir.chdir(cwd) do
yield(Dir.pwd, File.join(new ? 'new-format' : 'old-format'))
end
end
def with_attribute_override(attribute, value, &block)
writer = "#{attribute}="
old = @magic.send(attribute)
@magic.send(writer, value)
ensure
@magic.send(writer, old)
end
end | 38 | 1.461538 | 32 | 6 |
808ffec412d75b732906ca1406a6a18589ec0a4b | lib/bh/helpers/horizontal_helper.rb | lib/bh/helpers/horizontal_helper.rb | require 'bh/helpers/base_helper'
module Bh
# Provides the `horizontal` helper.
module HorizontalHelper
include BaseHelper
def horizontal(content_or_options_with_block = nil, options = nil, &block)
if block_given?
horizontal_string (content_or_options_with_block || {}), &block
else
horizontal_string (options || {}), &Proc.new { content_or_options_with_block }
end
end
private
def horizontal_string(options = {}, &block)
append_class! options, 'collapse navbar-collapse'
options[:id] = navbar_id
content_tag :div, options, &block
end
end
end | require 'bh/classes/base'
module Bh
# Provides the `horizontal` helper.
module HorizontalHelper
def horizontal(*args, &block)
horizontal = Bh::Base.new self, *args, &block
horizontal.append_class! :'collapse navbar-collapse'
horizontal.merge! id: navbar_id
horizontal.render_tag :div
end
end
end | Reduce public API methods from HorizontalHelper | Reduce public API methods from HorizontalHelper
Before this PR, including `bh` in an app would include more methods
than necessary for horizontal: methods like `horizontal_string`
that should only be accessed privately.
| Ruby | mit | Shadhopson/bh,luciuschoi/bh,luciuschoi/bh,Fullscreen/bh,chrismayu/bh,juliobetta/bh,MichaelSp/bh,Fullscreen/bh,MichaelSp/bh,chrismayu/bh,juliobetta/bh,buren/bh,Shadhopson/bh | ruby | ## Code Before:
require 'bh/helpers/base_helper'
module Bh
# Provides the `horizontal` helper.
module HorizontalHelper
include BaseHelper
def horizontal(content_or_options_with_block = nil, options = nil, &block)
if block_given?
horizontal_string (content_or_options_with_block || {}), &block
else
horizontal_string (options || {}), &Proc.new { content_or_options_with_block }
end
end
private
def horizontal_string(options = {}, &block)
append_class! options, 'collapse navbar-collapse'
options[:id] = navbar_id
content_tag :div, options, &block
end
end
end
## Instruction:
Reduce public API methods from HorizontalHelper
Before this PR, including `bh` in an app would include more methods
than necessary for horizontal: methods like `horizontal_string`
that should only be accessed privately.
## Code After:
require 'bh/classes/base'
module Bh
# Provides the `horizontal` helper.
module HorizontalHelper
def horizontal(*args, &block)
horizontal = Bh::Base.new self, *args, &block
horizontal.append_class! :'collapse navbar-collapse'
horizontal.merge! id: navbar_id
horizontal.render_tag :div
end
end
end | - require 'bh/helpers/base_helper'
+ require 'bh/classes/base'
module Bh
# Provides the `horizontal` helper.
module HorizontalHelper
- include BaseHelper
+ def horizontal(*args, &block)
+ horizontal = Bh::Base.new self, *args, &block
- def horizontal(content_or_options_with_block = nil, options = nil, &block)
- if block_given?
- horizontal_string (content_or_options_with_block || {}), &block
- else
- horizontal_string (options || {}), &Proc.new { content_or_options_with_block }
- end
- end
-
- private
-
- def horizontal_string(options = {}, &block)
- append_class! options, 'collapse navbar-collapse'
? ^^^^^^^^^
+ horizontal.append_class! :'collapse navbar-collapse'
? +++++++++++ ^
- options[:id] = navbar_id
- content_tag :div, options, &block
+ horizontal.merge! id: navbar_id
+ horizontal.render_tag :div
end
end
end | 22 | 0.916667 | 6 | 16 |
afcdb9da284c117cbf90649fe915371723d35ee8 | src/Data/LLVM/Testing.hs | src/Data/LLVM/Testing.hs | -- | Various functions to help test this library and analyses based on
-- it.
module Data.LLVM.Testing (
readInputAndExpected,
testAgainstExpected,
module Data.LLVM.Testing.BuildModule
) where
import System.FilePath
import System.FilePath.Glob
import Data.LLVM.Types
import Data.LLVM.Testing.BuildModule
readInputAndExpected :: (Read a) => (FilePath -> FilePath) -> Bool -> FilePath -> IO (Module, a)
readInputAndExpected expectedFunc optimize inputFile = do
let exFile = expectedFunc inputFile
exContent <- readFile exFile
-- use seq here to force the full evaluation of the read file.
let expected = length exContent `seq` read exContent
m <- buildModule inputFile optimize
return (m, expected)
testAgainstExpected :: (Read a) => FilePath -> String -> (FilePath -> FilePath) -> Bool ->
(Module -> a) -> (a -> a -> IO ()) -> IO ()
testAgainstExpected testDir testExt expectedMap optimize buildResult compareResults = do
-- Glob up all of the files in the test directory with the target extension
let inputPattern = testDir </> ("*" <.> testExt)
testInputFiles <- namesMatching inputPattern
inputsAndExpecteds <- mapM (readInputAndExpected expectedMap optimize) testInputFiles
mapM_ runAndCompare inputsAndExpecteds
where
runAndCompare (m, expected) = do
let actual = buildResult m
compareResults actual expected | -- | Various functions to help test this library and analyses based on
-- it.
module Data.LLVM.Testing (
readInputAndExpected,
testAgainstExpected,
module Data.LLVM.Testing.BuildModule
) where
import System.FilePath.Glob
import Data.LLVM.Types
import Data.LLVM.Testing.BuildModule
readInputAndExpected :: (Read a) => (FilePath -> FilePath) -> Bool -> FilePath ->
IO (FilePath, Module, a)
readInputAndExpected expectedFunc optimize inputFile = do
let exFile = expectedFunc inputFile
exContent <- readFile exFile
-- use seq here to force the full evaluation of the read file.
let expected = length exContent `seq` read exContent
m <- buildModule inputFile optimize
return (inputFile, m, expected)
testAgainstExpected :: (Read a) => String -> (FilePath -> FilePath) -> Bool ->
(Module -> a) -> (String -> a -> a -> IO ()) -> IO ()
testAgainstExpected testPattern expectedMap optimize buildResult compareResults = do
-- Glob up all of the files in the test directory with the target extension
testInputFiles <- namesMatching testPattern
inputsAndExpecteds <- mapM (readInputAndExpected expectedMap optimize) testInputFiles
mapM_ runAndCompare inputsAndExpecteds
where
runAndCompare (file, m, expected) = do
let actual = buildResult m
compareResults file actual expected | Clean up the testing interface. | Clean up the testing interface.
Just take a pattern to glob instead of a directory and extension.
Tweak the comparison function to take a descriptive string (the
filename) as the first parameter. This is useful for diagnosis and it
happens to match the signature of assertEqual in HUnit.
| Haskell | bsd-3-clause | wangxiayang/llvm-analysis,wangxiayang/llvm-analysis,travitch/llvm-analysis,travitch/llvm-analysis | haskell | ## Code Before:
-- | Various functions to help test this library and analyses based on
-- it.
module Data.LLVM.Testing (
readInputAndExpected,
testAgainstExpected,
module Data.LLVM.Testing.BuildModule
) where
import System.FilePath
import System.FilePath.Glob
import Data.LLVM.Types
import Data.LLVM.Testing.BuildModule
readInputAndExpected :: (Read a) => (FilePath -> FilePath) -> Bool -> FilePath -> IO (Module, a)
readInputAndExpected expectedFunc optimize inputFile = do
let exFile = expectedFunc inputFile
exContent <- readFile exFile
-- use seq here to force the full evaluation of the read file.
let expected = length exContent `seq` read exContent
m <- buildModule inputFile optimize
return (m, expected)
testAgainstExpected :: (Read a) => FilePath -> String -> (FilePath -> FilePath) -> Bool ->
(Module -> a) -> (a -> a -> IO ()) -> IO ()
testAgainstExpected testDir testExt expectedMap optimize buildResult compareResults = do
-- Glob up all of the files in the test directory with the target extension
let inputPattern = testDir </> ("*" <.> testExt)
testInputFiles <- namesMatching inputPattern
inputsAndExpecteds <- mapM (readInputAndExpected expectedMap optimize) testInputFiles
mapM_ runAndCompare inputsAndExpecteds
where
runAndCompare (m, expected) = do
let actual = buildResult m
compareResults actual expected
## Instruction:
Clean up the testing interface.
Just take a pattern to glob instead of a directory and extension.
Tweak the comparison function to take a descriptive string (the
filename) as the first parameter. This is useful for diagnosis and it
happens to match the signature of assertEqual in HUnit.
## Code After:
-- | Various functions to help test this library and analyses based on
-- it.
module Data.LLVM.Testing (
readInputAndExpected,
testAgainstExpected,
module Data.LLVM.Testing.BuildModule
) where
import System.FilePath.Glob
import Data.LLVM.Types
import Data.LLVM.Testing.BuildModule
readInputAndExpected :: (Read a) => (FilePath -> FilePath) -> Bool -> FilePath ->
IO (FilePath, Module, a)
readInputAndExpected expectedFunc optimize inputFile = do
let exFile = expectedFunc inputFile
exContent <- readFile exFile
-- use seq here to force the full evaluation of the read file.
let expected = length exContent `seq` read exContent
m <- buildModule inputFile optimize
return (inputFile, m, expected)
testAgainstExpected :: (Read a) => String -> (FilePath -> FilePath) -> Bool ->
(Module -> a) -> (String -> a -> a -> IO ()) -> IO ()
testAgainstExpected testPattern expectedMap optimize buildResult compareResults = do
-- Glob up all of the files in the test directory with the target extension
testInputFiles <- namesMatching testPattern
inputsAndExpecteds <- mapM (readInputAndExpected expectedMap optimize) testInputFiles
mapM_ runAndCompare inputsAndExpecteds
where
runAndCompare (file, m, expected) = do
let actual = buildResult m
compareResults file actual expected | -- | Various functions to help test this library and analyses based on
-- it.
module Data.LLVM.Testing (
readInputAndExpected,
testAgainstExpected,
module Data.LLVM.Testing.BuildModule
) where
- import System.FilePath
import System.FilePath.Glob
import Data.LLVM.Types
import Data.LLVM.Testing.BuildModule
- readInputAndExpected :: (Read a) => (FilePath -> FilePath) -> Bool -> FilePath -> IO (Module, a)
? ---------------
+ readInputAndExpected :: (Read a) => (FilePath -> FilePath) -> Bool -> FilePath ->
+ IO (FilePath, Module, a)
readInputAndExpected expectedFunc optimize inputFile = do
let exFile = expectedFunc inputFile
exContent <- readFile exFile
-- use seq here to force the full evaluation of the read file.
let expected = length exContent `seq` read exContent
m <- buildModule inputFile optimize
- return (m, expected)
+ return (inputFile, m, expected)
? +++++++++++
- testAgainstExpected :: (Read a) => FilePath -> String -> (FilePath -> FilePath) -> Bool ->
? ------------
+ testAgainstExpected :: (Read a) => String -> (FilePath -> FilePath) -> Bool ->
- (Module -> a) -> (a -> a -> IO ()) -> IO ()
+ (Module -> a) -> (String -> a -> a -> IO ()) -> IO ()
? ++++++++++
- testAgainstExpected testDir testExt expectedMap optimize buildResult compareResults = do
? ^^^^ ^^^^^
+ testAgainstExpected testPattern expectedMap optimize buildResult compareResults = do
? ^^^ ^^
-- Glob up all of the files in the test directory with the target extension
- let inputPattern = testDir </> ("*" <.> testExt)
- testInputFiles <- namesMatching inputPattern
? ^^^^
+ testInputFiles <- namesMatching testPattern
? ^^^
inputsAndExpecteds <- mapM (readInputAndExpected expectedMap optimize) testInputFiles
mapM_ runAndCompare inputsAndExpecteds
where
- runAndCompare (m, expected) = do
+ runAndCompare (file, m, expected) = do
? ++++++
let actual = buildResult m
- compareResults actual expected
+ compareResults file actual expected
? +++++
| 19 | 0.542857 | 9 | 10 |
299db6b86cdc1434c9ab4ce181a7c58affb36afd | lib/decryptStream.js | lib/decryptStream.js | var crypto = require('crypto'),
util = require('util'),
Transform = require('stream').Transform;
function DecryptStream(options) {
if (!(this instanceof DecryptStream))
return new DecryptStream(options);
Transform.call(this, options);
this.key = options.key;
this._decipher = crypto.createDecipheriv('aes-128-ecb', this.key, '');
this._decipher.setAutoPadding(true);
};
util.inherits(DecryptStream, Transform);
DecryptStream.prototype._transform = function(chunk, encoding, done) {
this.push(this._decipher.update(chunk));
done();
};
module.exports = DecryptStream; | var crypto = require('crypto'),
util = require('util'),
Transform = require('stream').Transform;
function DecryptStream(options) {
if (!(this instanceof DecryptStream))
return new DecryptStream(options);
Transform.call(this, options);
this.key = options.key;
this._decipher = crypto.createDecipheriv('aes-128-ecb', this.key, '');
this._decipher.setAutoPadding(true);
};
util.inherits(DecryptStream, Transform);
DecryptStream.prototype._transform = function(chunk, encoding, callback) {
this.push(this._decipher.update(chunk));
callback();
};
DecryptStream.prototype._flush = function(callback) {
this.push(this._decipher.final());
callback();
}
module.exports = DecryptStream; | Call final() on stream flush. | Call final() on stream flush.
| JavaScript | mit | oliviert/node-snapchat | javascript | ## Code Before:
var crypto = require('crypto'),
util = require('util'),
Transform = require('stream').Transform;
function DecryptStream(options) {
if (!(this instanceof DecryptStream))
return new DecryptStream(options);
Transform.call(this, options);
this.key = options.key;
this._decipher = crypto.createDecipheriv('aes-128-ecb', this.key, '');
this._decipher.setAutoPadding(true);
};
util.inherits(DecryptStream, Transform);
DecryptStream.prototype._transform = function(chunk, encoding, done) {
this.push(this._decipher.update(chunk));
done();
};
module.exports = DecryptStream;
## Instruction:
Call final() on stream flush.
## Code After:
var crypto = require('crypto'),
util = require('util'),
Transform = require('stream').Transform;
function DecryptStream(options) {
if (!(this instanceof DecryptStream))
return new DecryptStream(options);
Transform.call(this, options);
this.key = options.key;
this._decipher = crypto.createDecipheriv('aes-128-ecb', this.key, '');
this._decipher.setAutoPadding(true);
};
util.inherits(DecryptStream, Transform);
DecryptStream.prototype._transform = function(chunk, encoding, callback) {
this.push(this._decipher.update(chunk));
callback();
};
DecryptStream.prototype._flush = function(callback) {
this.push(this._decipher.final());
callback();
}
module.exports = DecryptStream; | var crypto = require('crypto'),
util = require('util'),
Transform = require('stream').Transform;
function DecryptStream(options) {
if (!(this instanceof DecryptStream))
return new DecryptStream(options);
Transform.call(this, options);
this.key = options.key;
this._decipher = crypto.createDecipheriv('aes-128-ecb', this.key, '');
this._decipher.setAutoPadding(true);
};
util.inherits(DecryptStream, Transform);
- DecryptStream.prototype._transform = function(chunk, encoding, done) {
? ^^^^
+ DecryptStream.prototype._transform = function(chunk, encoding, callback) {
? ^^^^^^^^
this.push(this._decipher.update(chunk));
- done();
+ callback();
};
+ DecryptStream.prototype._flush = function(callback) {
+ this.push(this._decipher.final());
+ callback();
+ }
+
module.exports = DecryptStream; | 9 | 0.375 | 7 | 2 |
e8921ca8ba74b559d472dc6d5ae11c8614819020 | dotnet/tools/chocolateyInstall.ps1 | dotnet/tools/chocolateyInstall.ps1 | $packageName = "dotnet"
$installerType = "EXE"
$installerArgs = "/passive /norestart"
$url = "http://download.microsoft.com/download/E/2/1/E21644B5-2DF2-47C2-91BD-63C560427900/NDP452-KB2901907-x86-x64-AllOS-ENU.exe"
if ($psISE) {
Import-Module -name "$env:ChocolateyInstall\chocolateyinstall\helpers\chocolateyInstaller.psm1"
$ErrorActionPreference = "Stop"
}
try
{
Install-ChocolateyPackage $packageName $installerType $installerArgs $url $url
Write-ChocolateySuccess $packageName
}
catch
{
Write-ChocolateyFailure $packageName $($_.Exception.Message)
throw
}
| $packageName = "dotnet"
$installerType = "EXE"
$installerArgs = "/passive /norestart"
$url = "http://download.microsoft.com/download/E/2/1/E21644B5-2DF2-47C2-91BD-63C560427900/NDP452-KB2901907-x86-x64-AllOS-ENU.exe"
$errorCode = @(0, 3010)
if ($psISE) {
Import-Module -name "$env:ChocolateyInstall\chocolateyinstall\helpers\chocolateyInstaller.psm1"
$ErrorActionPreference = "Stop"
}
try
{
Install-ChocolateyPackage $packageName $installerType $installerArgs $url `
-ValidExitCodes $errorCode
Write-ChocolateySuccess $packageName
}
catch
{
Write-ChocolateyFailure $packageName $($_.Exception.Message)
throw
}
| Exit Code 3010 is ok as a exit code | Exit Code 3010 is ok as a exit code
| PowerShell | apache-2.0 | dcjulian29/choco-packages | powershell | ## Code Before:
$packageName = "dotnet"
$installerType = "EXE"
$installerArgs = "/passive /norestart"
$url = "http://download.microsoft.com/download/E/2/1/E21644B5-2DF2-47C2-91BD-63C560427900/NDP452-KB2901907-x86-x64-AllOS-ENU.exe"
if ($psISE) {
Import-Module -name "$env:ChocolateyInstall\chocolateyinstall\helpers\chocolateyInstaller.psm1"
$ErrorActionPreference = "Stop"
}
try
{
Install-ChocolateyPackage $packageName $installerType $installerArgs $url $url
Write-ChocolateySuccess $packageName
}
catch
{
Write-ChocolateyFailure $packageName $($_.Exception.Message)
throw
}
## Instruction:
Exit Code 3010 is ok as a exit code
## Code After:
$packageName = "dotnet"
$installerType = "EXE"
$installerArgs = "/passive /norestart"
$url = "http://download.microsoft.com/download/E/2/1/E21644B5-2DF2-47C2-91BD-63C560427900/NDP452-KB2901907-x86-x64-AllOS-ENU.exe"
$errorCode = @(0, 3010)
if ($psISE) {
Import-Module -name "$env:ChocolateyInstall\chocolateyinstall\helpers\chocolateyInstaller.psm1"
$ErrorActionPreference = "Stop"
}
try
{
Install-ChocolateyPackage $packageName $installerType $installerArgs $url `
-ValidExitCodes $errorCode
Write-ChocolateySuccess $packageName
}
catch
{
Write-ChocolateyFailure $packageName $($_.Exception.Message)
throw
}
| $packageName = "dotnet"
$installerType = "EXE"
$installerArgs = "/passive /norestart"
$url = "http://download.microsoft.com/download/E/2/1/E21644B5-2DF2-47C2-91BD-63C560427900/NDP452-KB2901907-x86-x64-AllOS-ENU.exe"
+ $errorCode = @(0, 3010)
if ($psISE) {
Import-Module -name "$env:ChocolateyInstall\chocolateyinstall\helpers\chocolateyInstaller.psm1"
$ErrorActionPreference = "Stop"
}
try
{
- Install-ChocolateyPackage $packageName $installerType $installerArgs $url $url
? ^^^^
+ Install-ChocolateyPackage $packageName $installerType $installerArgs $url `
? ^
+ -ValidExitCodes $errorCode
Write-ChocolateySuccess $packageName
}
catch
{
Write-ChocolateyFailure $packageName $($_.Exception.Message)
throw
} | 4 | 0.190476 | 3 | 1 |
1fa14b413568ec83d2e5e0fb02ca6f8cd107998c | .travis.yml | .travis.yml | language: java
jdk:
- oraclejdk8
# gradle projects have "gradle assemble" and "gradle check" run automatically
# so a script setting is not required to verify the code compiles or run tests
# script:
# from: https://docs.travis-ci.com/user/languages/java/#Caching
# A peculiarity of dependency caching in Gradle means that
# to avoid uploading the cache after every build
# you need to add the following lines to your .travis.yml:
before_cache:
- rm -f $HOME/.gradle/caches/modules-2/modules-2.lock
- rm -fr $HOME/.gradle/caches/*/plugin-resolution/
cache:
directories:
- $HOME/.gradle/caches/
- $HOME/.gradle/wrapper/
script:
- ./gradlew check
after_success:
- bash <(curl -s https://codecov.io/bash)
- test $TRAVIS_PULL_REQUEST == "false" && test $TRAVIS_BRANCH == "master" && bash deploy.sh
notifications:
slack: mitrecorp:HNikOgsZALD9WxBZh5oSJve9
webhooks:
urls:
- https://synthea.zulipchat.com/api/v1/external/travis?stream=development&topic=build-status&api_key=XQ5SyfiEkfgjoT9uelJboOTQQOsKYCV2
| language: java
jdk:
- oraclejdk8
- oraclejdk10
# gradle projects have "gradle assemble" and "gradle check" run automatically
# so a script setting is not required to verify the code compiles or run tests
# script:
# from: https://docs.travis-ci.com/user/languages/java/#Caching
# A peculiarity of dependency caching in Gradle means that
# to avoid uploading the cache after every build
# you need to add the following lines to your .travis.yml:
before_cache:
- rm -f $HOME/.gradle/caches/modules-2/modules-2.lock
- rm -fr $HOME/.gradle/caches/*/plugin-resolution/
cache:
directories:
- $HOME/.gradle/caches/
- $HOME/.gradle/wrapper/
script:
- ./gradlew check
after_success:
- bash <(curl -s https://codecov.io/bash)
- test $TRAVIS_PULL_REQUEST == "false" && test $TRAVIS_BRANCH == "master" && bash deploy.sh
notifications:
slack: mitrecorp:HNikOgsZALD9WxBZh5oSJve9
webhooks:
urls:
- https://synthea.zulipchat.com/api/v1/external/travis?stream=development&topic=build-status&api_key=XQ5SyfiEkfgjoT9uelJboOTQQOsKYCV2
| Add JDK 10 to Travis. | Add JDK 10 to Travis.
| YAML | apache-2.0 | synthetichealth/synthea,synthetichealth/synthea,synthetichealth/synthea | yaml | ## Code Before:
language: java
jdk:
- oraclejdk8
# gradle projects have "gradle assemble" and "gradle check" run automatically
# so a script setting is not required to verify the code compiles or run tests
# script:
# from: https://docs.travis-ci.com/user/languages/java/#Caching
# A peculiarity of dependency caching in Gradle means that
# to avoid uploading the cache after every build
# you need to add the following lines to your .travis.yml:
before_cache:
- rm -f $HOME/.gradle/caches/modules-2/modules-2.lock
- rm -fr $HOME/.gradle/caches/*/plugin-resolution/
cache:
directories:
- $HOME/.gradle/caches/
- $HOME/.gradle/wrapper/
script:
- ./gradlew check
after_success:
- bash <(curl -s https://codecov.io/bash)
- test $TRAVIS_PULL_REQUEST == "false" && test $TRAVIS_BRANCH == "master" && bash deploy.sh
notifications:
slack: mitrecorp:HNikOgsZALD9WxBZh5oSJve9
webhooks:
urls:
- https://synthea.zulipchat.com/api/v1/external/travis?stream=development&topic=build-status&api_key=XQ5SyfiEkfgjoT9uelJboOTQQOsKYCV2
## Instruction:
Add JDK 10 to Travis.
## Code After:
language: java
jdk:
- oraclejdk8
- oraclejdk10
# gradle projects have "gradle assemble" and "gradle check" run automatically
# so a script setting is not required to verify the code compiles or run tests
# script:
# from: https://docs.travis-ci.com/user/languages/java/#Caching
# A peculiarity of dependency caching in Gradle means that
# to avoid uploading the cache after every build
# you need to add the following lines to your .travis.yml:
before_cache:
- rm -f $HOME/.gradle/caches/modules-2/modules-2.lock
- rm -fr $HOME/.gradle/caches/*/plugin-resolution/
cache:
directories:
- $HOME/.gradle/caches/
- $HOME/.gradle/wrapper/
script:
- ./gradlew check
after_success:
- bash <(curl -s https://codecov.io/bash)
- test $TRAVIS_PULL_REQUEST == "false" && test $TRAVIS_BRANCH == "master" && bash deploy.sh
notifications:
slack: mitrecorp:HNikOgsZALD9WxBZh5oSJve9
webhooks:
urls:
- https://synthea.zulipchat.com/api/v1/external/travis?stream=development&topic=build-status&api_key=XQ5SyfiEkfgjoT9uelJboOTQQOsKYCV2
| language: java
jdk:
- oraclejdk8
+ - oraclejdk10
# gradle projects have "gradle assemble" and "gradle check" run automatically
# so a script setting is not required to verify the code compiles or run tests
# script:
# from: https://docs.travis-ci.com/user/languages/java/#Caching
# A peculiarity of dependency caching in Gradle means that
# to avoid uploading the cache after every build
# you need to add the following lines to your .travis.yml:
before_cache:
- rm -f $HOME/.gradle/caches/modules-2/modules-2.lock
- rm -fr $HOME/.gradle/caches/*/plugin-resolution/
cache:
directories:
- $HOME/.gradle/caches/
- $HOME/.gradle/wrapper/
script:
- ./gradlew check
after_success:
- bash <(curl -s https://codecov.io/bash)
- test $TRAVIS_PULL_REQUEST == "false" && test $TRAVIS_BRANCH == "master" && bash deploy.sh
notifications:
slack: mitrecorp:HNikOgsZALD9WxBZh5oSJve9
webhooks:
urls:
- https://synthea.zulipchat.com/api/v1/external/travis?stream=development&topic=build-status&api_key=XQ5SyfiEkfgjoT9uelJboOTQQOsKYCV2
| 1 | 0.033333 | 1 | 0 |
46e684faa4d39b98f9f72b19f5ec8366994ec206 | server/src/main/scala/cromwell/CromwellApp.scala | server/src/main/scala/cromwell/CromwellApp.scala | package cromwell
object CromwellApp extends App {
sealed trait Command
case object Run extends Command
case object Server extends Command
case object Submit extends Command
def buildParser(): scopt.OptionParser[CommandLineArguments] = new CommandLineParser()
def runCromwell(args: CommandLineArguments): Unit = {
args.command match {
case Some(Run) => CromwellEntryPoint.runSingle(args)
case Some(Server) => CromwellEntryPoint.runServer()
case Some(Submit) => CromwellEntryPoint.submitToServer(args)
case None => parser.showUsage()
}
}
val parser = buildParser()
val parsedArgs = parser.parse(args, CommandLineArguments())
parsedArgs match {
case Some(pa) => runCromwell(pa)
case None => parser.showUsage()
}
}
| package cromwell
object CromwellApp extends App {
sealed trait Command
case object Run extends Command
case object Server extends Command
case object Submit extends Command
def buildParser(): scopt.OptionParser[CommandLineArguments] = new CommandLineParser()
def runCromwell(args: CommandLineArguments): Unit = {
args.command match {
case Some(Run) => CromwellEntryPoint.runSingle(args)
case Some(Server) => CromwellEntryPoint.runServer()
case Some(Submit) => CromwellEntryPoint.submitToServer(args)
case None => showUsageAndExitWithError()
}
}
val parser = buildParser()
val parsedArgs = parser.parse(args, CommandLineArguments())
parsedArgs match {
case Some(pa) => runCromwell(pa)
case None => showUsageAndExitWithError()
}
private def showUsageAndExitWithError(): Unit = {
parser.showUsage()
System.exit(1)
}
}
| Exit with code "1" on malformed CLI invocation | Exit with code "1" on malformed CLI invocation [BA-3094]
| Scala | bsd-3-clause | broadinstitute/cromwell,broadinstitute/cromwell,broadinstitute/cromwell,broadinstitute/cromwell,broadinstitute/cromwell | scala | ## Code Before:
package cromwell
object CromwellApp extends App {
sealed trait Command
case object Run extends Command
case object Server extends Command
case object Submit extends Command
def buildParser(): scopt.OptionParser[CommandLineArguments] = new CommandLineParser()
def runCromwell(args: CommandLineArguments): Unit = {
args.command match {
case Some(Run) => CromwellEntryPoint.runSingle(args)
case Some(Server) => CromwellEntryPoint.runServer()
case Some(Submit) => CromwellEntryPoint.submitToServer(args)
case None => parser.showUsage()
}
}
val parser = buildParser()
val parsedArgs = parser.parse(args, CommandLineArguments())
parsedArgs match {
case Some(pa) => runCromwell(pa)
case None => parser.showUsage()
}
}
## Instruction:
Exit with code "1" on malformed CLI invocation [BA-3094]
## Code After:
package cromwell
object CromwellApp extends App {
sealed trait Command
case object Run extends Command
case object Server extends Command
case object Submit extends Command
def buildParser(): scopt.OptionParser[CommandLineArguments] = new CommandLineParser()
def runCromwell(args: CommandLineArguments): Unit = {
args.command match {
case Some(Run) => CromwellEntryPoint.runSingle(args)
case Some(Server) => CromwellEntryPoint.runServer()
case Some(Submit) => CromwellEntryPoint.submitToServer(args)
case None => showUsageAndExitWithError()
}
}
val parser = buildParser()
val parsedArgs = parser.parse(args, CommandLineArguments())
parsedArgs match {
case Some(pa) => runCromwell(pa)
case None => showUsageAndExitWithError()
}
private def showUsageAndExitWithError(): Unit = {
parser.showUsage()
System.exit(1)
}
}
| package cromwell
object CromwellApp extends App {
sealed trait Command
case object Run extends Command
case object Server extends Command
case object Submit extends Command
def buildParser(): scopt.OptionParser[CommandLineArguments] = new CommandLineParser()
def runCromwell(args: CommandLineArguments): Unit = {
args.command match {
case Some(Run) => CromwellEntryPoint.runSingle(args)
case Some(Server) => CromwellEntryPoint.runServer()
case Some(Submit) => CromwellEntryPoint.submitToServer(args)
- case None => parser.showUsage()
+ case None => showUsageAndExitWithError()
}
}
val parser = buildParser()
val parsedArgs = parser.parse(args, CommandLineArguments())
parsedArgs match {
case Some(pa) => runCromwell(pa)
+ case None => showUsageAndExitWithError()
+ }
+
+ private def showUsageAndExitWithError(): Unit = {
- case None => parser.showUsage()
? -------------
+ parser.showUsage()
+ System.exit(1)
}
} | 9 | 0.321429 | 7 | 2 |
71b6d8e2c3fcdc8d156cf2ba4009590a6e48a4b6 | src/LevelSelect.js | src/LevelSelect.js | import html from "innerself";
import Scene from "./Scene";
import { SCENES } from "./actions"
import { connect } from "./store";
function LevelScore(score, idx) {
return html`
<div class="box action"
onclick="goto(${SCENES.FIND}, ${idx})"
style="padding: .5rem; color: #666;">
${score}</div>
`;
}
function LevelSelect({results}) {
return Scene(
{id: SCENES.LEVELS, from: "black", to: "black"},
html`
<div class="ui black">
<div class="pad">
${results.map(LevelScore)}
<div class="action"
style="padding: .5rem;"
onclick="goto(${SCENES.FIND}, ${results.length})">next</div>
</div>
</div>`
);
}
export default connect(LevelSelect);
| import html from "innerself";
import Scene from "./Scene";
import { SCENES } from "./actions"
import { connect } from "./store";
function LevelScore(score, idx) {
return html`
<div class="box action"
onclick="goto(${SCENES.FIND}, ${idx})"
style="padding: .5rem; color: #666;">
${score}</div>
`;
}
function LevelSelect({results}) {
const total = results.reduce((acc, cur) => acc + cur);
const average = Math.floor(total / results.length);
// An inverted hyperbola with lim(x → ∞) = 1.
const threshold = 100 * (1 - 2.5 / results.length);
return Scene(
{id: SCENES.LEVELS, from: "black", to: "black"},
html`
<div class="ui black">
<div class="pad">
${results.map(LevelScore)}
${ average > threshold
? `<div class="action" style="padding: .5rem;"
onclick="goto(${SCENES.FIND}, ${results.length})">next</div>`
: `<div class="action" style="padding: .5rem;"
title="Your average so far is too low to advance.">
…?
</div>`
}
</div>
</div>`
);
}
export default connect(LevelSelect);
| Add a scalable threshold for advancing | Add a scalable threshold for advancing
| JavaScript | isc | piesku/moment-lost,piesku/moment-lost | javascript | ## Code Before:
import html from "innerself";
import Scene from "./Scene";
import { SCENES } from "./actions"
import { connect } from "./store";
function LevelScore(score, idx) {
return html`
<div class="box action"
onclick="goto(${SCENES.FIND}, ${idx})"
style="padding: .5rem; color: #666;">
${score}</div>
`;
}
function LevelSelect({results}) {
return Scene(
{id: SCENES.LEVELS, from: "black", to: "black"},
html`
<div class="ui black">
<div class="pad">
${results.map(LevelScore)}
<div class="action"
style="padding: .5rem;"
onclick="goto(${SCENES.FIND}, ${results.length})">next</div>
</div>
</div>`
);
}
export default connect(LevelSelect);
## Instruction:
Add a scalable threshold for advancing
## Code After:
import html from "innerself";
import Scene from "./Scene";
import { SCENES } from "./actions"
import { connect } from "./store";
function LevelScore(score, idx) {
return html`
<div class="box action"
onclick="goto(${SCENES.FIND}, ${idx})"
style="padding: .5rem; color: #666;">
${score}</div>
`;
}
function LevelSelect({results}) {
const total = results.reduce((acc, cur) => acc + cur);
const average = Math.floor(total / results.length);
// An inverted hyperbola with lim(x → ∞) = 1.
const threshold = 100 * (1 - 2.5 / results.length);
return Scene(
{id: SCENES.LEVELS, from: "black", to: "black"},
html`
<div class="ui black">
<div class="pad">
${results.map(LevelScore)}
${ average > threshold
? `<div class="action" style="padding: .5rem;"
onclick="goto(${SCENES.FIND}, ${results.length})">next</div>`
: `<div class="action" style="padding: .5rem;"
title="Your average so far is too low to advance.">
…?
</div>`
}
</div>
</div>`
);
}
export default connect(LevelSelect);
| import html from "innerself";
import Scene from "./Scene";
import { SCENES } from "./actions"
import { connect } from "./store";
function LevelScore(score, idx) {
return html`
<div class="box action"
onclick="goto(${SCENES.FIND}, ${idx})"
style="padding: .5rem; color: #666;">
${score}</div>
`;
}
function LevelSelect({results}) {
+ const total = results.reduce((acc, cur) => acc + cur);
+ const average = Math.floor(total / results.length);
+ // An inverted hyperbola with lim(x → ∞) = 1.
+ const threshold = 100 * (1 - 2.5 / results.length);
+
return Scene(
{id: SCENES.LEVELS, from: "black", to: "black"},
html`
<div class="ui black">
<div class="pad">
${results.map(LevelScore)}
- <div class="action"
+ ${ average > threshold
- style="padding: .5rem;"
+ ? `<div class="action" style="padding: .5rem;"
? +++++++++++++++++++++++
- onclick="goto(${SCENES.FIND}, ${results.length})">next</div>
+ onclick="goto(${SCENES.FIND}, ${results.length})">next</div>`
? ++++ +
+ : `<div class="action" style="padding: .5rem;"
+ title="Your average so far is too low to advance.">
+ …?
+ </div>`
+ }
</div>
</div>`
);
}
export default connect(LevelSelect); | 16 | 0.533333 | 13 | 3 |
2ea3ef9f775dc9432efa25ebe240c41cf74da1c0 | lib/active_file/blob.rb | lib/active_file/blob.rb | class ActiveFile::Blob < ActiveRecord::Base
self.table_name = "rails_active_file_blobs"
store :metadata, coder: JSON
has_secure_token
class_attribute :verifier, default: -> { Rails.application.message_verifier('ActiveFile') }
class_attribute :storage
class << self
def find_verified(signed_id)
find(verifier.verify(signed_id))
end
def build_after_upload(data:, filename:, content_type: nil, metadata: nil)
new.tap do |blob|
blob.filename = name
blob.content_type = Marcel::MimeType.for(data, name: name, declared_type: content_type)
blob.data = data
end
end
def create_after_upload!(data:, filename:, content_type: nil, metadata: nil)
build_after_upload(data: data, filename: filename, content_type: content_type, metadata: metadata).tap(&:save!)
end
end
def filename
Filename.new(filename)
end
def delete
storage.delete token
end
def purge
delete
destroy
end
def purge_later
ActiveFile::PurgeJob.perform_later(self)
end
end
| class ActiveFile::Blob < ActiveRecord::Base
self.table_name = "rails_active_file_blobs"
has_secure_token :key
store :metadata, coder: JSON
class_attribute :verifier, default: -> { Rails.application.message_verifier('ActiveFile') }
class_attribute :storage
class << self
def find_verified(signed_id)
find(verifier.verify(signed_id))
end
def build_after_upload(data:, filename:, content_type: nil, metadata: nil)
new.tap do |blob|
blob.filename = name
blob.content_type = Marcel::MimeType.for(data, name: name, declared_type: content_type)
blob.data = data
end
end
def create_after_upload!(data:, filename:, content_type: nil, metadata: nil)
build_after_upload(data: data, filename: filename, content_type: content_type, metadata: metadata).tap(&:save!)
end
end
def filename
Filename.new(filename)
end
def delete
storage.delete token
end
def purge
delete
destroy
end
def purge_later
ActiveFile::PurgeJob.perform_later(self)
end
end
| Use key instead of token | Use key instead of token
More familiar in this context
| Ruby | mit | untidy-hair/rails,yawboakye/rails,travisofthenorth/rails,rafaelfranca/omg-rails,gfvcastro/rails,gauravtiwari/rails,yahonda/rails,aditya-kapoor/rails,eileencodes/rails,illacceptanything/illacceptanything,ledestin/rails,iainbeeston/rails,lcreid/rails,yahonda/rails,vipulnsward/rails,yasslab/railsguides.jp,notapatch/rails,mechanicles/rails,kddeisz/rails,notapatch/rails,gfvcastro/rails,betesh/rails,yasslab/railsguides.jp,joonyou/rails,tjschuck/rails,starknx/rails,yalab/rails,utilum/rails,illacceptanything/illacceptanything,mechanicles/rails,MSP-Greg/rails,travisofthenorth/rails,tjschuck/rails,Envek/rails,MSP-Greg/rails,Stellenticket/rails,Vasfed/rails,arunagw/rails,ledestin/rails,yawboakye/rails,eileencodes/rails,mohitnatoo/rails,iainbeeston/rails,aditya-kapoor/rails,alecspopa/rails,vipulnsward/rails,Stellenticket/rails,gfvcastro/rails,shioyama/rails,palkan/rails,pvalena/rails,gfvcastro/rails,tjschuck/rails,notapatch/rails,EmmaB/rails-1,rafaelfranca/omg-rails,kmcphillips/rails,bogdanvlviv/rails,Envek/rails,baerjam/rails,yhirano55/rails,alecspopa/rails,deraru/rails,illacceptanything/illacceptanything,mohitnatoo/rails,brchristian/rails,tgxworld/rails,felipecvo/rails,notapatch/rails,utilum/rails,lcreid/rails,bogdanvlviv/rails,travisofthenorth/rails,printercu/rails,Stellenticket/rails,tgxworld/rails,Envek/rails,Vasfed/rails,EmmaB/rails-1,eileencodes/rails,fabianoleittes/rails,yalab/rails,schuetzm/rails,schuetzm/rails,illacceptanything/illacceptanything,repinel/rails,vipulnsward/rails,iainbeeston/rails,felipecvo/rails,rails/rails,untidy-hair/rails,iainbeeston/rails,kirs/rails-1,esparta/rails,yawboakye/rails,yasslab/railsguides.jp,BlakeWilliams/rails,odedniv/rails,fabianoleittes/rails,deraru/rails,jeremy/rails,betesh/rails,flanger001/rails,odedniv/rails,eileencodes/rails,alecspopa/rails,Erol/rails,yhirano55/rails,jeremy/rails,rails/rails,lcreid/rails,esparta/rails,aditya-kapoor/rails,kmayer/rails,flanger001/rails,illacceptanything/illacceptanything,starknx/rails,fabianoleittes/rails,odedniv/rails,tgxworld/rails,repinel/rails,kddeisz/rails,jeremy/rails,prathamesh-sonpatki/rails,kirs/rails-1,printercu/rails,rails/rails,kamipo/rails,kirs/rails-1,kddeisz/rails,georgeclaghorn/rails,brchristian/rails,mathieujobin/reduced-rails-for-travis,joonyou/rails,georgeclaghorn/rails,betesh/rails,fabianoleittes/rails,MSP-Greg/rails,kmayer/rails,bogdanvlviv/rails,aditya-kapoor/rails,flanger001/rails,yawboakye/rails,betesh/rails,arunagw/rails,kaspth/rails,esparta/rails,arunagw/rails,illacceptanything/illacceptanything,baerjam/rails,illacceptanything/illacceptanything,schuetzm/rails,assain/rails,assain/rails,felipecvo/rails,georgeclaghorn/rails,rafaelfranca/omg-rails,yalab/rails,kmcphillips/rails,repinel/rails,travisofthenorth/rails,Erol/rails,mechanicles/rails,gcourtemanche/rails,baerjam/rails,schuetzm/rails,tgxworld/rails,repinel/rails,yhirano55/rails,illacceptanything/illacceptanything,brchristian/rails,illacceptanything/illacceptanything,mathieujobin/reduced-rails-for-travis,kmayer/rails,shioyama/rails,mathieujobin/reduced-rails-for-travis,prathamesh-sonpatki/rails,prathamesh-sonpatki/rails,palkan/rails,illacceptanything/illacceptanything,pvalena/rails,Sen-Zhang/rails,yasslab/railsguides.jp,utilum/rails,Sen-Zhang/rails,Stellenticket/rails,Vasfed/rails,gcourtemanche/rails,rails/rails,Erol/rails,yalab/rails,illacceptanything/illacceptanything,utilum/rails,illacceptanything/illacceptanything,Edouard-chin/rails,pvalena/rails,illacceptanything/illacceptanything,gcourtemanche/rails,illacceptanything/illacceptanything,kmcphillips/rails,mechanicles/rails,gauravtiwari/rails,yahonda/rails,assain/rails,kaspth/rails,Envek/rails,kaspth/rails,deraru/rails,lcreid/rails,baerjam/rails,Vasfed/rails,palkan/rails,printercu/rails,ledestin/rails,gauravtiwari/rails,shioyama/rails,vipulnsward/rails,EmmaB/rails-1,mohitnatoo/rails,tjschuck/rails,mohitnatoo/rails,joonyou/rails,starknx/rails,kamipo/rails,palkan/rails,pvalena/rails,kamipo/rails,MSP-Greg/rails,Edouard-chin/rails,kmcphillips/rails,Edouard-chin/rails,arunagw/rails,shioyama/rails,yhirano55/rails,kddeisz/rails,bogdanvlviv/rails,untidy-hair/rails,assain/rails,yahonda/rails,Erol/rails,illacceptanything/illacceptanything,prathamesh-sonpatki/rails,esparta/rails,georgeclaghorn/rails,flanger001/rails,joonyou/rails,untidy-hair/rails,Sen-Zhang/rails,jeremy/rails,BlakeWilliams/rails,Edouard-chin/rails,BlakeWilliams/rails,BlakeWilliams/rails,deraru/rails,printercu/rails | ruby | ## Code Before:
class ActiveFile::Blob < ActiveRecord::Base
self.table_name = "rails_active_file_blobs"
store :metadata, coder: JSON
has_secure_token
class_attribute :verifier, default: -> { Rails.application.message_verifier('ActiveFile') }
class_attribute :storage
class << self
def find_verified(signed_id)
find(verifier.verify(signed_id))
end
def build_after_upload(data:, filename:, content_type: nil, metadata: nil)
new.tap do |blob|
blob.filename = name
blob.content_type = Marcel::MimeType.for(data, name: name, declared_type: content_type)
blob.data = data
end
end
def create_after_upload!(data:, filename:, content_type: nil, metadata: nil)
build_after_upload(data: data, filename: filename, content_type: content_type, metadata: metadata).tap(&:save!)
end
end
def filename
Filename.new(filename)
end
def delete
storage.delete token
end
def purge
delete
destroy
end
def purge_later
ActiveFile::PurgeJob.perform_later(self)
end
end
## Instruction:
Use key instead of token
More familiar in this context
## Code After:
class ActiveFile::Blob < ActiveRecord::Base
self.table_name = "rails_active_file_blobs"
has_secure_token :key
store :metadata, coder: JSON
class_attribute :verifier, default: -> { Rails.application.message_verifier('ActiveFile') }
class_attribute :storage
class << self
def find_verified(signed_id)
find(verifier.verify(signed_id))
end
def build_after_upload(data:, filename:, content_type: nil, metadata: nil)
new.tap do |blob|
blob.filename = name
blob.content_type = Marcel::MimeType.for(data, name: name, declared_type: content_type)
blob.data = data
end
end
def create_after_upload!(data:, filename:, content_type: nil, metadata: nil)
build_after_upload(data: data, filename: filename, content_type: content_type, metadata: metadata).tap(&:save!)
end
end
def filename
Filename.new(filename)
end
def delete
storage.delete token
end
def purge
delete
destroy
end
def purge_later
ActiveFile::PurgeJob.perform_later(self)
end
end
| class ActiveFile::Blob < ActiveRecord::Base
self.table_name = "rails_active_file_blobs"
+ has_secure_token :key
store :metadata, coder: JSON
- has_secure_token
class_attribute :verifier, default: -> { Rails.application.message_verifier('ActiveFile') }
class_attribute :storage
class << self
def find_verified(signed_id)
find(verifier.verify(signed_id))
end
def build_after_upload(data:, filename:, content_type: nil, metadata: nil)
new.tap do |blob|
blob.filename = name
blob.content_type = Marcel::MimeType.for(data, name: name, declared_type: content_type)
blob.data = data
end
end
def create_after_upload!(data:, filename:, content_type: nil, metadata: nil)
build_after_upload(data: data, filename: filename, content_type: content_type, metadata: metadata).tap(&:save!)
end
end
def filename
Filename.new(filename)
end
def delete
storage.delete token
end
def purge
delete
destroy
end
def purge_later
ActiveFile::PurgeJob.perform_later(self)
end
end | 2 | 0.045455 | 1 | 1 |
84e2ef62b1faff132d69ad3384be68fb364d86c9 | src/string.js | src/string.js | // =================================================================================================
// Core.js | String Functions
// (c) 2014 Mathigon / Philipp Legner
// =================================================================================================
(function() {
M.extend(String.prototype, {
endsWith: function(search) {
var end = this.length;
var start = end - search.length;
return (this.substring(start, end) === search);
},
strip: function() {
return this.replace(/^\s+/, '').replace(/\s+$/, '');
},
collapse: function() {
return this.trim().replace(/\s+/g, ' ');
},
toTitleCase: function() {
return this.replace(/\S+/g, function(a){
return a.charAt(0).toUpperCase() + a.slice(1);
});
},
words: function() {
return this.strip().split(/\s+/);
}
}, true);
if ( !String.prototype.contains ) {
M.extend(String.prototype, {
contains: function() {
return String.prototype.indexOf.apply( this, arguments ) !== -1;
}
}, true);
}
})();
| // =================================================================================================
// Core.js | String Functions
// (c) 2014 Mathigon / Philipp Legner
// =================================================================================================
(function() {
M.extend(String.prototype, {
strip: function() {
return this.replace(/^\s+/, '').replace(/\s+$/, '');
},
collapse: function() {
return this.trim().replace(/\s+/g, ' ');
},
toTitleCase: function() {
return this.replace(/\S+/g, function(a){
return a.charAt(0).toUpperCase() + a.slice(1);
});
},
words: function() {
return this.strip().split(/\s+/);
}
}, true);
if (!String.prototype.endsWith) {
M.extend(String.prototype, {
endsWith: function(search) {
var end = this.length;
var start = end - search.length;
return (this.substring(start, end) === search);
}
}, true);
}
if (!String.prototype.contains) {
M.extend(String.prototype, {
contains: function() {
return String.prototype.indexOf.apply( this, arguments ) !== -1;
}
}, true);
}
})();
| Fix String endsWith prototype shadowing | Fix String endsWith prototype shadowing
| JavaScript | mit | Mathigon/core.js | javascript | ## Code Before:
// =================================================================================================
// Core.js | String Functions
// (c) 2014 Mathigon / Philipp Legner
// =================================================================================================
(function() {
M.extend(String.prototype, {
endsWith: function(search) {
var end = this.length;
var start = end - search.length;
return (this.substring(start, end) === search);
},
strip: function() {
return this.replace(/^\s+/, '').replace(/\s+$/, '');
},
collapse: function() {
return this.trim().replace(/\s+/g, ' ');
},
toTitleCase: function() {
return this.replace(/\S+/g, function(a){
return a.charAt(0).toUpperCase() + a.slice(1);
});
},
words: function() {
return this.strip().split(/\s+/);
}
}, true);
if ( !String.prototype.contains ) {
M.extend(String.prototype, {
contains: function() {
return String.prototype.indexOf.apply( this, arguments ) !== -1;
}
}, true);
}
})();
## Instruction:
Fix String endsWith prototype shadowing
## Code After:
// =================================================================================================
// Core.js | String Functions
// (c) 2014 Mathigon / Philipp Legner
// =================================================================================================
(function() {
M.extend(String.prototype, {
strip: function() {
return this.replace(/^\s+/, '').replace(/\s+$/, '');
},
collapse: function() {
return this.trim().replace(/\s+/g, ' ');
},
toTitleCase: function() {
return this.replace(/\S+/g, function(a){
return a.charAt(0).toUpperCase() + a.slice(1);
});
},
words: function() {
return this.strip().split(/\s+/);
}
}, true);
if (!String.prototype.endsWith) {
M.extend(String.prototype, {
endsWith: function(search) {
var end = this.length;
var start = end - search.length;
return (this.substring(start, end) === search);
}
}, true);
}
if (!String.prototype.contains) {
M.extend(String.prototype, {
contains: function() {
return String.prototype.indexOf.apply( this, arguments ) !== -1;
}
}, true);
}
})();
| // =================================================================================================
// Core.js | String Functions
// (c) 2014 Mathigon / Philipp Legner
// =================================================================================================
(function() {
M.extend(String.prototype, {
- endsWith: function(search) {
- var end = this.length;
- var start = end - search.length;
- return (this.substring(start, end) === search);
- },
strip: function() {
return this.replace(/^\s+/, '').replace(/\s+$/, '');
},
collapse: function() {
return this.trim().replace(/\s+/g, ' ');
},
toTitleCase: function() {
return this.replace(/\S+/g, function(a){
return a.charAt(0).toUpperCase() + a.slice(1);
});
},
words: function() {
return this.strip().split(/\s+/);
}
}, true);
- if ( !String.prototype.contains ) {
? - ^^ ^^^^^
+ if (!String.prototype.endsWith) {
? ^ ++++ ^
M.extend(String.prototype, {
+ endsWith: function(search) {
+ var end = this.length;
+ var start = end - search.length;
+ return (this.substring(start, end) === search);
+ }
+ }, true);
+ }
+ if (!String.prototype.contains) {
+ M.extend(String.prototype, {
contains: function() {
return String.prototype.indexOf.apply( this, arguments ) !== -1;
}
}, true);
}
})(); | 16 | 0.347826 | 10 | 6 |
6ba88357a840e8c4042f0838b686cd664cd563fd | app/models/frontend_router.rb | app/models/frontend_router.rb | class FrontendRouter
attr_reader :id_type, :id
def initialize(id_type, id)
@id_type = id_type || ''
@id = id
end
def url
(entity, query_field) = query_info
if [entity, query_field, id].any? { |i| i.blank? }
nil
else
if entity.new.respond_to?('tag')
id.sub!(entity.new.tag, '')
end
obj = entity.find_by!(query_field => id)
adaptor = "LinkAdaptors::#{obj.class}".constantize.new(obj)
"#{domain}#{adaptor.base_path}"
end
end
private
def query_info
case id_type
when /genes?/
[ Gene, :id, ]
when /variants?\z/
[ Variant, :id, ]
when /evidence/, /evidence_items?/
[ EvidenceItem, :id, ]
when /entrez/
[ Gene, :entrez_id, ]
when /variant_groups?/
[ VariantGroup, :id, ]
when /revisions?/
[ SuggestedChange, :id ]
when /assertions?/
[ Assertion, :id ]
else
[]
end
end
def domain
'https://civic.genome.wustl.edu/'
end
end
| class FrontendRouter
attr_reader :id_type, :id
def initialize(id_type, id)
@id_type = id_type || ''
@id = id
end
def url
(entity, query_field) = query_info
if [entity, query_field, id].any? { |i| i.blank? }
nil
else
if entity.new.respond_to?('tag')
id.sub!(entity.new.tag, '')
end
obj = entity.find_by!(query_field => id)
adaptor = "LinkAdaptors::#{obj.class}".constantize.new(obj)
"#{domain}#{adaptor.base_path}"
end
end
private
def query_info
case id_type
when /genes?/
[ Gene, :id, ]
when /variants?\z/
[ Variant, :id, ]
when /evidence/, /evidence_items?/
[ EvidenceItem, :id, ]
when /entrez/
[ Gene, :entrez_id, ]
when /variant_groups?/
[ VariantGroup, :id, ]
when /revisions?/
[ SuggestedChange, :id ]
when /assertions?/
[ Assertion, :id ]
when /allele_registry/
[ Variant, :allele_registry_id, ]
else
[]
end
end
def domain
'https://civic.genome.wustl.edu/'
end
end
| Add direct link to variants by allele registry id | Add direct link to variants by allele registry id
| Ruby | mit | genome/civic-server,genome/civic-server,genome/civic-server,genome/civic-server,genome/civic-server | ruby | ## Code Before:
class FrontendRouter
attr_reader :id_type, :id
def initialize(id_type, id)
@id_type = id_type || ''
@id = id
end
def url
(entity, query_field) = query_info
if [entity, query_field, id].any? { |i| i.blank? }
nil
else
if entity.new.respond_to?('tag')
id.sub!(entity.new.tag, '')
end
obj = entity.find_by!(query_field => id)
adaptor = "LinkAdaptors::#{obj.class}".constantize.new(obj)
"#{domain}#{adaptor.base_path}"
end
end
private
def query_info
case id_type
when /genes?/
[ Gene, :id, ]
when /variants?\z/
[ Variant, :id, ]
when /evidence/, /evidence_items?/
[ EvidenceItem, :id, ]
when /entrez/
[ Gene, :entrez_id, ]
when /variant_groups?/
[ VariantGroup, :id, ]
when /revisions?/
[ SuggestedChange, :id ]
when /assertions?/
[ Assertion, :id ]
else
[]
end
end
def domain
'https://civic.genome.wustl.edu/'
end
end
## Instruction:
Add direct link to variants by allele registry id
## Code After:
class FrontendRouter
attr_reader :id_type, :id
def initialize(id_type, id)
@id_type = id_type || ''
@id = id
end
def url
(entity, query_field) = query_info
if [entity, query_field, id].any? { |i| i.blank? }
nil
else
if entity.new.respond_to?('tag')
id.sub!(entity.new.tag, '')
end
obj = entity.find_by!(query_field => id)
adaptor = "LinkAdaptors::#{obj.class}".constantize.new(obj)
"#{domain}#{adaptor.base_path}"
end
end
private
def query_info
case id_type
when /genes?/
[ Gene, :id, ]
when /variants?\z/
[ Variant, :id, ]
when /evidence/, /evidence_items?/
[ EvidenceItem, :id, ]
when /entrez/
[ Gene, :entrez_id, ]
when /variant_groups?/
[ VariantGroup, :id, ]
when /revisions?/
[ SuggestedChange, :id ]
when /assertions?/
[ Assertion, :id ]
when /allele_registry/
[ Variant, :allele_registry_id, ]
else
[]
end
end
def domain
'https://civic.genome.wustl.edu/'
end
end
| class FrontendRouter
attr_reader :id_type, :id
def initialize(id_type, id)
@id_type = id_type || ''
@id = id
end
def url
(entity, query_field) = query_info
if [entity, query_field, id].any? { |i| i.blank? }
nil
else
if entity.new.respond_to?('tag')
id.sub!(entity.new.tag, '')
end
obj = entity.find_by!(query_field => id)
adaptor = "LinkAdaptors::#{obj.class}".constantize.new(obj)
"#{domain}#{adaptor.base_path}"
end
end
private
def query_info
case id_type
when /genes?/
[ Gene, :id, ]
when /variants?\z/
[ Variant, :id, ]
when /evidence/, /evidence_items?/
[ EvidenceItem, :id, ]
when /entrez/
[ Gene, :entrez_id, ]
when /variant_groups?/
[ VariantGroup, :id, ]
when /revisions?/
[ SuggestedChange, :id ]
when /assertions?/
[ Assertion, :id ]
+ when /allele_registry/
+ [ Variant, :allele_registry_id, ]
else
[]
end
end
def domain
'https://civic.genome.wustl.edu/'
end
end | 2 | 0.042553 | 2 | 0 |
d4fa8a21e89da9063dc43b66ab479554429768e7 | app.js | app.js | var express = require('express')
var app = express()
var isUrl = require('is-url')
var normalizeUrl = require('normalize-url')
var scrutinize = require('scrutinize')
app.use(function (req, res, next) {
res.setHeader('Access-Control-Allow-Origin', 'http://localhost:8000')
res.setHeader('Access-Control-Allow-Methods', 'GET')
res.setHeader('Access-Control-Allow-Headers', 'X-Requested-With,content-type')
next()
})
app.get('/', function(req, res) {
var url = normalizeUrl(req.param('url') || '')
res.type('application/json')
if (isUrl(url)) {
scrutinize(url, {}, function(data) {
res.send(JSON.stringify(data))
})
} else {
res.status(406)
res.send(JSON.stringify({
error: 'No valid url specified',
example: 'http://api.scrutinize.io?url=example.com'
}))
}
})
app.listen(process.env.PORT || 3030) // Deltron
| var express = require('express')
var app = express()
var isUrl = require('is-url')
var normalizeUrl = require('normalize-url')
var scrutinize = require('scrutinize')
var cors = {
origin: ['localhost:8000', 'scrutinize.divshot.io/'],
default: 'scrutinize.divshot.io'
}
app.use(function(req, res, next) {
var origin = cors.origin.indexOf(req.header('host').toLowerCase()) > -1 ? req.headers.origin : cors.default
res.header('Access-Control-Allow-Origin', origin)
res.setHeader('Access-Control-Allow-Methods', 'GET')
res.header('Access-Control-Allow-Headers', 'X-Requested-With,content-type')
next()
})
app.get('/', function(req, res) {
var url = normalizeUrl(req.param('url') || '')
res.type('application/json')
if (isUrl(url)) {
scrutinize(url, {}, function(data) {
res.send(JSON.stringify(data))
})
} else {
res.status(406)
res.send(JSON.stringify({
error: 'No valid url specified',
example: 'http://api.scrutinize.io?url=example.com'
}))
}
})
app.listen(process.env.PORT || 3030) // Deltron
| Update access control for allow origin | Update access control for allow origin
| JavaScript | mit | johnotander/scrutinize-api | javascript | ## Code Before:
var express = require('express')
var app = express()
var isUrl = require('is-url')
var normalizeUrl = require('normalize-url')
var scrutinize = require('scrutinize')
app.use(function (req, res, next) {
res.setHeader('Access-Control-Allow-Origin', 'http://localhost:8000')
res.setHeader('Access-Control-Allow-Methods', 'GET')
res.setHeader('Access-Control-Allow-Headers', 'X-Requested-With,content-type')
next()
})
app.get('/', function(req, res) {
var url = normalizeUrl(req.param('url') || '')
res.type('application/json')
if (isUrl(url)) {
scrutinize(url, {}, function(data) {
res.send(JSON.stringify(data))
})
} else {
res.status(406)
res.send(JSON.stringify({
error: 'No valid url specified',
example: 'http://api.scrutinize.io?url=example.com'
}))
}
})
app.listen(process.env.PORT || 3030) // Deltron
## Instruction:
Update access control for allow origin
## Code After:
var express = require('express')
var app = express()
var isUrl = require('is-url')
var normalizeUrl = require('normalize-url')
var scrutinize = require('scrutinize')
var cors = {
origin: ['localhost:8000', 'scrutinize.divshot.io/'],
default: 'scrutinize.divshot.io'
}
app.use(function(req, res, next) {
var origin = cors.origin.indexOf(req.header('host').toLowerCase()) > -1 ? req.headers.origin : cors.default
res.header('Access-Control-Allow-Origin', origin)
res.setHeader('Access-Control-Allow-Methods', 'GET')
res.header('Access-Control-Allow-Headers', 'X-Requested-With,content-type')
next()
})
app.get('/', function(req, res) {
var url = normalizeUrl(req.param('url') || '')
res.type('application/json')
if (isUrl(url)) {
scrutinize(url, {}, function(data) {
res.send(JSON.stringify(data))
})
} else {
res.status(406)
res.send(JSON.stringify({
error: 'No valid url specified',
example: 'http://api.scrutinize.io?url=example.com'
}))
}
})
app.listen(process.env.PORT || 3030) // Deltron
| var express = require('express')
var app = express()
var isUrl = require('is-url')
var normalizeUrl = require('normalize-url')
var scrutinize = require('scrutinize')
+ var cors = {
+ origin: ['localhost:8000', 'scrutinize.divshot.io/'],
+ default: 'scrutinize.divshot.io'
+ }
+
- app.use(function (req, res, next) {
? -
+ app.use(function(req, res, next) {
- res.setHeader('Access-Control-Allow-Origin', 'http://localhost:8000')
+ var origin = cors.origin.indexOf(req.header('host').toLowerCase()) > -1 ? req.headers.origin : cors.default
+
+ res.header('Access-Control-Allow-Origin', origin)
res.setHeader('Access-Control-Allow-Methods', 'GET')
- res.setHeader('Access-Control-Allow-Headers', 'X-Requested-With,content-type')
? ^^^^
+ res.header('Access-Control-Allow-Headers', 'X-Requested-With,content-type')
? ^
next()
})
app.get('/', function(req, res) {
var url = normalizeUrl(req.param('url') || '')
res.type('application/json')
if (isUrl(url)) {
scrutinize(url, {}, function(data) {
res.send(JSON.stringify(data))
})
} else {
res.status(406)
res.send(JSON.stringify({
error: 'No valid url specified',
example: 'http://api.scrutinize.io?url=example.com'
}))
}
})
app.listen(process.env.PORT || 3030) // Deltron | 13 | 0.393939 | 10 | 3 |
1a6c02603fadd3bacda3750d6dfd261f3d996bb8 | core/src/us/thirdmillenium/desktoptrainer/DesktopTrainer.java | core/src/us/thirdmillenium/desktoptrainer/DesktopTrainer.java | package us.thirdmillenium.desktoptrainer;
import com.badlogic.gdx.ApplicationAdapter;
import com.badlogic.gdx.Gdx;
import us.thirdmillenium.desktoptrainer.environment.SinglePlayEnvironment;
import java.util.Random;
public class DesktopTrainer extends ApplicationAdapter {
// Test Map Index (1 - 5)
private int TestMapIndex = 3;
// Environment
private SinglePlayEnvironment MyEnvironment;
@Override
public void create () {
Random random = new Random();
this.MyEnvironment = new SinglePlayEnvironment(TrainingParams.PathToBaseNN, random, TestMapIndex);
}
@Override
public void render () {
this.MyEnvironment.simulate(Gdx.graphics.getDeltaTime());
}
}
| package us.thirdmillenium.desktoptrainer;
import com.badlogic.gdx.ApplicationAdapter;
import com.badlogic.gdx.Gdx;
import us.thirdmillenium.desktoptrainer.environment.Environment;
import java.util.Random;
public class DesktopTrainer extends ApplicationAdapter {
// Environment
private Environment MyEnvironment;
@Override
public void create () {
Random random = new Random();
this.MyEnvironment = new Environment(TrainingParams.PathToBaseNN, random, 5);
}
@Override
public void render () {
this.MyEnvironment.render(Gdx.graphics.getDeltaTime());
}
}
| Revert "Environment is now an extendible Java class" | Revert "Environment is now an extendible Java class"
This reverts commit c1a40a0f0ea170a3111e4e5e807080b859d626cf.
| Java | apache-2.0 | dwaybright/SAS-DesktopTrainer | java | ## Code Before:
package us.thirdmillenium.desktoptrainer;
import com.badlogic.gdx.ApplicationAdapter;
import com.badlogic.gdx.Gdx;
import us.thirdmillenium.desktoptrainer.environment.SinglePlayEnvironment;
import java.util.Random;
public class DesktopTrainer extends ApplicationAdapter {
// Test Map Index (1 - 5)
private int TestMapIndex = 3;
// Environment
private SinglePlayEnvironment MyEnvironment;
@Override
public void create () {
Random random = new Random();
this.MyEnvironment = new SinglePlayEnvironment(TrainingParams.PathToBaseNN, random, TestMapIndex);
}
@Override
public void render () {
this.MyEnvironment.simulate(Gdx.graphics.getDeltaTime());
}
}
## Instruction:
Revert "Environment is now an extendible Java class"
This reverts commit c1a40a0f0ea170a3111e4e5e807080b859d626cf.
## Code After:
package us.thirdmillenium.desktoptrainer;
import com.badlogic.gdx.ApplicationAdapter;
import com.badlogic.gdx.Gdx;
import us.thirdmillenium.desktoptrainer.environment.Environment;
import java.util.Random;
public class DesktopTrainer extends ApplicationAdapter {
// Environment
private Environment MyEnvironment;
@Override
public void create () {
Random random = new Random();
this.MyEnvironment = new Environment(TrainingParams.PathToBaseNN, random, 5);
}
@Override
public void render () {
this.MyEnvironment.render(Gdx.graphics.getDeltaTime());
}
}
| package us.thirdmillenium.desktoptrainer;
import com.badlogic.gdx.ApplicationAdapter;
import com.badlogic.gdx.Gdx;
- import us.thirdmillenium.desktoptrainer.environment.SinglePlayEnvironment;
? ----------
+ import us.thirdmillenium.desktoptrainer.environment.Environment;
import java.util.Random;
public class DesktopTrainer extends ApplicationAdapter {
- // Test Map Index (1 - 5)
- private int TestMapIndex = 3;
-
// Environment
- private SinglePlayEnvironment MyEnvironment;
? ----------
+ private Environment MyEnvironment;
-
@Override
public void create () {
Random random = new Random();
- this.MyEnvironment = new SinglePlayEnvironment(TrainingParams.PathToBaseNN, random, TestMapIndex);
? ---------- ^^^^^^^^^^^^
+ this.MyEnvironment = new Environment(TrainingParams.PathToBaseNN, random, 5);
? ^
}
@Override
public void render () {
- this.MyEnvironment.simulate(Gdx.graphics.getDeltaTime());
? ^^^^^^^
+ this.MyEnvironment.render(Gdx.graphics.getDeltaTime());
? ^ ++++
}
} | 12 | 0.4 | 4 | 8 |
d4a7d361f49c3f0bd1d65d1364bcf9c14dcf2736 | tests/Integration/IntegrationTestCase.php | tests/Integration/IntegrationTestCase.php | <?php
/*
* This file is part of the GraphAware Neo4j Client package.
*
* (c) GraphAware Limited <http://graphaware.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace GraphAware\Neo4j\Client\Tests\Integration;
use GraphAware\Neo4j\Client\ClientBuilder;
class IntegrationTestCase extends \PHPUnit_Framework_TestCase
{
/**
* @var \GraphAware\Neo4j\Client\Client
*/
protected $client;
public function setUp()
{
$this->client = ClientBuilder::create()
->addConnection('http', 'http://localhost:7474')
->addConnection('bolt', 'bolt://localhost')
->build();
}
/**
* Empties the graph database.
*
* @void
*/
public function emptyDb()
{
$this->client->run('MATCH (n) OPTIONAL MATCH (n)-[r]-() DELETE r,n', null, null);
}
}
| <?php
/*
* This file is part of the GraphAware Neo4j Client package.
*
* (c) GraphAware Limited <http://graphaware.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace GraphAware\Neo4j\Client\Tests\Integration;
use GraphAware\Neo4j\Client\ClientBuilder;
class IntegrationTestCase extends \PHPUnit_Framework_TestCase
{
/**
* @var \GraphAware\Neo4j\Client\Client
*/
protected $client;
public function setUp()
{
$httpUri = 'http://localhost:7474';
if (isset($_ENV['NEO4J_USER'])) {
$httpUri = sprintf(
'%s://%s:%s@%s:%s',
getenv('NEO4J_SCHEMA'),
getenv('NEO4J_USER'),
getenv('NEO4J_PASSWORD'),
getenv('NEO4J_HOST'),
getenv('NEO4J_PORT')
);
}
$boltUrl = 'bolt://localhost';
if (isset($_ENV['NEO4J_USER'])) {
$boltUrl = sprintf(
'bolt://%s:%s@%',
getenv('NEO4J_USER'),
getenv('NEO4J_PASSWORD'),
getenv('NEO4J_HOST')
);
}
$this->client = ClientBuilder::create()
->addConnection('http', $httpUri)
->addConnection('bolt', $boltUrl)
->build();
}
/**
* Empties the graph database.
*
* @void
*/
public function emptyDb()
{
$this->client->run('MATCH (n) OPTIONAL MATCH (n)-[r]-() DELETE r,n', null, null);
}
}
| Allow to specify neo4j credentials in phpunit.xml | Allow to specify neo4j credentials in phpunit.xml
| PHP | mit | graphaware/neo4j-php-client,PandawanTechnology/neo4j-php-client,graphaware/neo4j-php-client,PandawanTechnology/neo4j-php-client | php | ## Code Before:
<?php
/*
* This file is part of the GraphAware Neo4j Client package.
*
* (c) GraphAware Limited <http://graphaware.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace GraphAware\Neo4j\Client\Tests\Integration;
use GraphAware\Neo4j\Client\ClientBuilder;
class IntegrationTestCase extends \PHPUnit_Framework_TestCase
{
/**
* @var \GraphAware\Neo4j\Client\Client
*/
protected $client;
public function setUp()
{
$this->client = ClientBuilder::create()
->addConnection('http', 'http://localhost:7474')
->addConnection('bolt', 'bolt://localhost')
->build();
}
/**
* Empties the graph database.
*
* @void
*/
public function emptyDb()
{
$this->client->run('MATCH (n) OPTIONAL MATCH (n)-[r]-() DELETE r,n', null, null);
}
}
## Instruction:
Allow to specify neo4j credentials in phpunit.xml
## Code After:
<?php
/*
* This file is part of the GraphAware Neo4j Client package.
*
* (c) GraphAware Limited <http://graphaware.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace GraphAware\Neo4j\Client\Tests\Integration;
use GraphAware\Neo4j\Client\ClientBuilder;
class IntegrationTestCase extends \PHPUnit_Framework_TestCase
{
/**
* @var \GraphAware\Neo4j\Client\Client
*/
protected $client;
public function setUp()
{
$httpUri = 'http://localhost:7474';
if (isset($_ENV['NEO4J_USER'])) {
$httpUri = sprintf(
'%s://%s:%s@%s:%s',
getenv('NEO4J_SCHEMA'),
getenv('NEO4J_USER'),
getenv('NEO4J_PASSWORD'),
getenv('NEO4J_HOST'),
getenv('NEO4J_PORT')
);
}
$boltUrl = 'bolt://localhost';
if (isset($_ENV['NEO4J_USER'])) {
$boltUrl = sprintf(
'bolt://%s:%s@%',
getenv('NEO4J_USER'),
getenv('NEO4J_PASSWORD'),
getenv('NEO4J_HOST')
);
}
$this->client = ClientBuilder::create()
->addConnection('http', $httpUri)
->addConnection('bolt', $boltUrl)
->build();
}
/**
* Empties the graph database.
*
* @void
*/
public function emptyDb()
{
$this->client->run('MATCH (n) OPTIONAL MATCH (n)-[r]-() DELETE r,n', null, null);
}
}
| <?php
/*
* This file is part of the GraphAware Neo4j Client package.
*
* (c) GraphAware Limited <http://graphaware.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace GraphAware\Neo4j\Client\Tests\Integration;
use GraphAware\Neo4j\Client\ClientBuilder;
class IntegrationTestCase extends \PHPUnit_Framework_TestCase
{
/**
* @var \GraphAware\Neo4j\Client\Client
*/
protected $client;
public function setUp()
{
+ $httpUri = 'http://localhost:7474';
+ if (isset($_ENV['NEO4J_USER'])) {
+ $httpUri = sprintf(
+ '%s://%s:%s@%s:%s',
+ getenv('NEO4J_SCHEMA'),
+ getenv('NEO4J_USER'),
+ getenv('NEO4J_PASSWORD'),
+ getenv('NEO4J_HOST'),
+ getenv('NEO4J_PORT')
+ );
+ }
+
+ $boltUrl = 'bolt://localhost';
+ if (isset($_ENV['NEO4J_USER'])) {
+ $boltUrl = sprintf(
+ 'bolt://%s:%s@%',
+ getenv('NEO4J_USER'),
+ getenv('NEO4J_PASSWORD'),
+ getenv('NEO4J_HOST')
+ );
+ }
+
$this->client = ClientBuilder::create()
- ->addConnection('http', 'http://localhost:7474')
? ^ ^^^^^^^^^^^^^^^^^^
+ ->addConnection('http', $httpUri)
? ^ ^^^
- ->addConnection('bolt', 'bolt://localhost')
? ^ ^^^ ---------
+ ->addConnection('bolt', $boltUrl)
? ^ ^^
->build();
}
/**
* Empties the graph database.
*
* @void
*/
public function emptyDb()
{
$this->client->run('MATCH (n) OPTIONAL MATCH (n)-[r]-() DELETE r,n', null, null);
}
} | 26 | 0.65 | 24 | 2 |
3f5f26d1e811c3936a0bbbeb3eb26e03b1d09dc7 | .travis.yml | .travis.yml | language: scala
jdk: oraclejdk8
env:
global:
- SBT_GHPAGES_COMMIT_MESSAGE='Publishing Scaladoc [ci skip]'
- SBT_OPTS="-XX:+UseConcMarkSweepGC -XX:+CMSClassUnloadingEnabled -XX:MetaspaceSize=256M -XX:MaxMetaspaceSize=512M -Xmx2G"
script: sbt clean coverage +test
deploy:
- provider: script
skip_cleanup: true
script: sbt updateImpactSubmit coverageReport coverageAggregate codacyCoverage makeSite +publish
on:
branch: master
- provider: script
skip_cleanup: true
script: bash <(curl -s https://codecov.io/bash)
on:
branch: master
- provider: pages
skip_cleanup: true
github-token: $GITHUB_TOKEN
local-dir: target/site
verbose: true
on:
branch: master
# Copied from https://github.com/typelevel/cats/blob/master/.travis.yml
cache:
directories:
- $HOME/.sbt
- $HOME/.ivy2/cache
- $HOME/.m2
| language: scala
jdk: oraclejdk8
env:
global:
- SBT_GHPAGES_COMMIT_MESSAGE='Publishing Scaladoc [ci skip]'
- SBT_OPTS="-XX:+UseConcMarkSweepGC -XX:+CMSClassUnloadingEnabled -XX:MetaspaceSize=256M -XX:MaxMetaspaceSize=512M -Xmx2G"
script: sbt clean coverage +test
deploy:
- provider: script
skip_cleanup: true
script: sbt updateImpactSubmit coverageReport coverageAggregate codacyCoverage makeSite +publish
on:
branch: master
- provider: script
skip_cleanup: true
script: bash <(curl -s https://codecov.io/bash)
on:
branch: master
- provider: pages
skip_cleanup: true
github-token: $GITHUB_TOKEN
local-dir: target/site
verbose: true
on:
branch: master
- provider: script
skip_cleanup: true
script: sbt +publish
on:
branch: master
# Copied from https://github.com/typelevel/cats/blob/master/.travis.yml
cache:
directories:
- $HOME/.sbt
- $HOME/.ivy2/cache
- $HOME/.m2
| Make publish stand alone step | Make publish stand alone step
| YAML | mit | pathikrit/better-files,pathikrit/better-files,pathikrit/better-files | yaml | ## Code Before:
language: scala
jdk: oraclejdk8
env:
global:
- SBT_GHPAGES_COMMIT_MESSAGE='Publishing Scaladoc [ci skip]'
- SBT_OPTS="-XX:+UseConcMarkSweepGC -XX:+CMSClassUnloadingEnabled -XX:MetaspaceSize=256M -XX:MaxMetaspaceSize=512M -Xmx2G"
script: sbt clean coverage +test
deploy:
- provider: script
skip_cleanup: true
script: sbt updateImpactSubmit coverageReport coverageAggregate codacyCoverage makeSite +publish
on:
branch: master
- provider: script
skip_cleanup: true
script: bash <(curl -s https://codecov.io/bash)
on:
branch: master
- provider: pages
skip_cleanup: true
github-token: $GITHUB_TOKEN
local-dir: target/site
verbose: true
on:
branch: master
# Copied from https://github.com/typelevel/cats/blob/master/.travis.yml
cache:
directories:
- $HOME/.sbt
- $HOME/.ivy2/cache
- $HOME/.m2
## Instruction:
Make publish stand alone step
## Code After:
language: scala
jdk: oraclejdk8
env:
global:
- SBT_GHPAGES_COMMIT_MESSAGE='Publishing Scaladoc [ci skip]'
- SBT_OPTS="-XX:+UseConcMarkSweepGC -XX:+CMSClassUnloadingEnabled -XX:MetaspaceSize=256M -XX:MaxMetaspaceSize=512M -Xmx2G"
script: sbt clean coverage +test
deploy:
- provider: script
skip_cleanup: true
script: sbt updateImpactSubmit coverageReport coverageAggregate codacyCoverage makeSite +publish
on:
branch: master
- provider: script
skip_cleanup: true
script: bash <(curl -s https://codecov.io/bash)
on:
branch: master
- provider: pages
skip_cleanup: true
github-token: $GITHUB_TOKEN
local-dir: target/site
verbose: true
on:
branch: master
- provider: script
skip_cleanup: true
script: sbt +publish
on:
branch: master
# Copied from https://github.com/typelevel/cats/blob/master/.travis.yml
cache:
directories:
- $HOME/.sbt
- $HOME/.ivy2/cache
- $HOME/.m2
| language: scala
jdk: oraclejdk8
env:
global:
- SBT_GHPAGES_COMMIT_MESSAGE='Publishing Scaladoc [ci skip]'
- SBT_OPTS="-XX:+UseConcMarkSweepGC -XX:+CMSClassUnloadingEnabled -XX:MetaspaceSize=256M -XX:MaxMetaspaceSize=512M -Xmx2G"
script: sbt clean coverage +test
deploy:
- provider: script
skip_cleanup: true
script: sbt updateImpactSubmit coverageReport coverageAggregate codacyCoverage makeSite +publish
on:
branch: master
- provider: script
skip_cleanup: true
script: bash <(curl -s https://codecov.io/bash)
on:
branch: master
- provider: pages
- skip_cleanup: true
? --
+ skip_cleanup: true
- github-token: $GITHUB_TOKEN
? --
+ github-token: $GITHUB_TOKEN
- local-dir: target/site
? --
+ local-dir: target/site
- verbose: true
? --
+ verbose: true
- on:
? --
+ on:
- branch: master
? --
+ branch: master
+
+ - provider: script
+ skip_cleanup: true
+ script: sbt +publish
+ on:
+ branch: master
# Copied from https://github.com/typelevel/cats/blob/master/.travis.yml
cache:
directories:
- $HOME/.sbt
- $HOME/.ivy2/cache
- $HOME/.m2 | 18 | 0.486486 | 12 | 6 |
babf451f384c29920a79535a38ce529b4cf6483c | vim/coc-settings.json | vim/coc-settings.json | {
"coc.preferences.extensionUpdateCheck": "never",
"diagnostic.errorSign": "✗",
"diagnostic.warningSign": "‼",
"diagnostic.infoSign": "𝙄",
"diagnostic.hintSign": "▸",
"diagnostic.enableHighlightLineNumber": false,
"diagnostic.enableMessage": "jump",
"java.format.settings.url": "https://raw.githubusercontent.com/google/styleguide/gh-pages/eclipse-java-google-style.xml",
"snippets.ultisnips.directories": ["UltiSnips"],
"snippets.extends": {
"javascriptreact": ["javascript"],
"typescriptreact": ["javascript"],
"typescript": ["javascript"]
},
"suggest.autoTrigger": "none",
"suggest.noselect": false
}
| {
"coc.preferences.extensionUpdateCheck": "never",
"diagnostic.errorSign": "✗",
"diagnostic.warningSign": "‼",
"diagnostic.infoSign": "𝙄",
"diagnostic.hintSign": "▸",
"diagnostic.enableHighlightLineNumber": false,
"diagnostic.enableMessage": "jump",
"java.format.settings.url": "https://raw.githubusercontent.com/google/styleguide/gh-pages/eclipse-java-google-style.xml",
"snippets.ultisnips.directories": ["UltiSnips"],
"snippets.userSnippetsDirectory": "~/.config/dotfiles-local/snippets",
"snippets.extends": {
"javascriptreact": ["javascript"],
"typescriptreact": ["javascript"],
"typescript": ["javascript"]
},
"suggest.autoTrigger": "none",
"suggest.noselect": false
}
| Set user snippets directory in coc.nvim config | Set user snippets directory in coc.nvim config
| JSON | mit | kjhaber/dotfiles | json | ## Code Before:
{
"coc.preferences.extensionUpdateCheck": "never",
"diagnostic.errorSign": "✗",
"diagnostic.warningSign": "‼",
"diagnostic.infoSign": "𝙄",
"diagnostic.hintSign": "▸",
"diagnostic.enableHighlightLineNumber": false,
"diagnostic.enableMessage": "jump",
"java.format.settings.url": "https://raw.githubusercontent.com/google/styleguide/gh-pages/eclipse-java-google-style.xml",
"snippets.ultisnips.directories": ["UltiSnips"],
"snippets.extends": {
"javascriptreact": ["javascript"],
"typescriptreact": ["javascript"],
"typescript": ["javascript"]
},
"suggest.autoTrigger": "none",
"suggest.noselect": false
}
## Instruction:
Set user snippets directory in coc.nvim config
## Code After:
{
"coc.preferences.extensionUpdateCheck": "never",
"diagnostic.errorSign": "✗",
"diagnostic.warningSign": "‼",
"diagnostic.infoSign": "𝙄",
"diagnostic.hintSign": "▸",
"diagnostic.enableHighlightLineNumber": false,
"diagnostic.enableMessage": "jump",
"java.format.settings.url": "https://raw.githubusercontent.com/google/styleguide/gh-pages/eclipse-java-google-style.xml",
"snippets.ultisnips.directories": ["UltiSnips"],
"snippets.userSnippetsDirectory": "~/.config/dotfiles-local/snippets",
"snippets.extends": {
"javascriptreact": ["javascript"],
"typescriptreact": ["javascript"],
"typescript": ["javascript"]
},
"suggest.autoTrigger": "none",
"suggest.noselect": false
}
| {
"coc.preferences.extensionUpdateCheck": "never",
"diagnostic.errorSign": "✗",
"diagnostic.warningSign": "‼",
"diagnostic.infoSign": "𝙄",
"diagnostic.hintSign": "▸",
"diagnostic.enableHighlightLineNumber": false,
"diagnostic.enableMessage": "jump",
"java.format.settings.url": "https://raw.githubusercontent.com/google/styleguide/gh-pages/eclipse-java-google-style.xml",
"snippets.ultisnips.directories": ["UltiSnips"],
+ "snippets.userSnippetsDirectory": "~/.config/dotfiles-local/snippets",
"snippets.extends": {
"javascriptreact": ["javascript"],
"typescriptreact": ["javascript"],
"typescript": ["javascript"]
},
"suggest.autoTrigger": "none",
"suggest.noselect": false
} | 1 | 0.055556 | 1 | 0 |
96c3801b47ddb31e2489d7ca57e204e4cfe538d0 | README.md | README.md |
Mystery project with Rob and Zion.
## How to Develop
You will need Ruby and Rubygems installed.
```bash
bundle install
bundle exec rails s
```
Visit [localhost:3000](http://localhost:3000).
|
A web app that allows Overwatch players to plan out their team composition
for every point of every map on both offense and defense. Players should be
able to save their team comps as well as be able to easily share their team
comps.
## App Structure
- Ruby on Rails web app using [React](https://facebook.github.io/react/) for a speedy,
single-page front end
- This means most views will be implemented in
[JSX](https://facebook.github.io/react/docs/jsx-in-depth.html) in
app/assets/javascripts/components/ instead of ERB in app/views/.
- Rails endpoints will provide a JSON REST API for the front end
- PostgreSQL database for convenient deployment to Heroku
- Potential integration with OAuth providers such as Discord for authentication
built on top of [Devise](https://github.com/plataformatec/devise)
## How to Develop
You will need Ruby and Rubygems installed.
```bash
bundle install
bundle exec rails s
```
Visit [localhost:3000](http://localhost:3000).
| Add some notes about app structure | Add some notes about app structure
| Markdown | mit | cheshire137/overwatch-team-comps,cheshire137/overwatch-team-comps,cheshire137/overwatch-team-comps | markdown | ## Code Before:
Mystery project with Rob and Zion.
## How to Develop
You will need Ruby and Rubygems installed.
```bash
bundle install
bundle exec rails s
```
Visit [localhost:3000](http://localhost:3000).
## Instruction:
Add some notes about app structure
## Code After:
A web app that allows Overwatch players to plan out their team composition
for every point of every map on both offense and defense. Players should be
able to save their team comps as well as be able to easily share their team
comps.
## App Structure
- Ruby on Rails web app using [React](https://facebook.github.io/react/) for a speedy,
single-page front end
- This means most views will be implemented in
[JSX](https://facebook.github.io/react/docs/jsx-in-depth.html) in
app/assets/javascripts/components/ instead of ERB in app/views/.
- Rails endpoints will provide a JSON REST API for the front end
- PostgreSQL database for convenient deployment to Heroku
- Potential integration with OAuth providers such as Discord for authentication
built on top of [Devise](https://github.com/plataformatec/devise)
## How to Develop
You will need Ruby and Rubygems installed.
```bash
bundle install
bundle exec rails s
```
Visit [localhost:3000](http://localhost:3000).
|
- Mystery project with Rob and Zion.
+ A web app that allows Overwatch players to plan out their team composition
+ for every point of every map on both offense and defense. Players should be
+ able to save their team comps as well as be able to easily share their team
+ comps.
+
+ ## App Structure
+
+ - Ruby on Rails web app using [React](https://facebook.github.io/react/) for a speedy,
+ single-page front end
+ - This means most views will be implemented in
+ [JSX](https://facebook.github.io/react/docs/jsx-in-depth.html) in
+ app/assets/javascripts/components/ instead of ERB in app/views/.
+ - Rails endpoints will provide a JSON REST API for the front end
+ - PostgreSQL database for convenient deployment to Heroku
+ - Potential integration with OAuth providers such as Discord for authentication
+ built on top of [Devise](https://github.com/plataformatec/devise)
## How to Develop
You will need Ruby and Rubygems installed.
```bash
bundle install
bundle exec rails s
```
Visit [localhost:3000](http://localhost:3000). | 17 | 1.307692 | 16 | 1 |
2ba05e3c9f6a34636420ed38a422bd3c9ed19176 | provisioning/packages/R-packages.txt | provisioning/packages/R-packages.txt | caret
chron
class
colorspace
DBI
devtools
dichromat
digest
doMC
doParallel
dplyr
earth
e1071
fields
foreach
gbm
ggplot2
glmnet
gridExtra
gtable
Hmisc
inline
iterators
kernlab
knitr
labeling
lme4
mapproj
maps
mda
mgcv
munsell
plyr
proto
pROC
randomForest
rbenchmark
RColorBrewer
Rcpp
readstata13
reshape2
rJava
ROCR
rpart
RSQLite
scales
spam
stringr
tidyr
xlsx
xlsxjars
xtable
| caret
chron
class
colorspace
DBI
devtools
dichromat
digest
doMC
doParallel
dplyr
earth
e1071
fields
foreach
gbm
ggplot2
glmnet
gridExtra
gtable
Hmisc
inline
iterators
kernlab
knitr
labeling
lme4
mapproj
maps
mda
mgcv
munsell
openNLP
plyr
proto
pROC
qdap
randomForest
rbenchmark
RColorBrewer
Rcpp
readstata13
reshape2
rJava
ROCR
rpart
RSQLite
scales
SnowballC
spam
stringr
tidyr
tm
xlsx
xlsxjars
xtable
| Add some R text mining packages | Add some R text mining packages | Text | apache-2.0 | ucberkeley/bce | text | ## Code Before:
caret
chron
class
colorspace
DBI
devtools
dichromat
digest
doMC
doParallel
dplyr
earth
e1071
fields
foreach
gbm
ggplot2
glmnet
gridExtra
gtable
Hmisc
inline
iterators
kernlab
knitr
labeling
lme4
mapproj
maps
mda
mgcv
munsell
plyr
proto
pROC
randomForest
rbenchmark
RColorBrewer
Rcpp
readstata13
reshape2
rJava
ROCR
rpart
RSQLite
scales
spam
stringr
tidyr
xlsx
xlsxjars
xtable
## Instruction:
Add some R text mining packages
## Code After:
caret
chron
class
colorspace
DBI
devtools
dichromat
digest
doMC
doParallel
dplyr
earth
e1071
fields
foreach
gbm
ggplot2
glmnet
gridExtra
gtable
Hmisc
inline
iterators
kernlab
knitr
labeling
lme4
mapproj
maps
mda
mgcv
munsell
openNLP
plyr
proto
pROC
qdap
randomForest
rbenchmark
RColorBrewer
Rcpp
readstata13
reshape2
rJava
ROCR
rpart
RSQLite
scales
SnowballC
spam
stringr
tidyr
tm
xlsx
xlsxjars
xtable
| caret
chron
class
colorspace
DBI
devtools
dichromat
digest
doMC
doParallel
dplyr
earth
e1071
fields
foreach
gbm
ggplot2
glmnet
gridExtra
gtable
Hmisc
inline
iterators
kernlab
knitr
labeling
lme4
mapproj
maps
mda
mgcv
munsell
+ openNLP
plyr
proto
pROC
+ qdap
randomForest
rbenchmark
RColorBrewer
Rcpp
readstata13
reshape2
rJava
ROCR
rpart
RSQLite
scales
+ SnowballC
spam
stringr
tidyr
+ tm
xlsx
xlsxjars
xtable | 4 | 0.076923 | 4 | 0 |
9881f39875043c3a02d6aa03174cc5a50baccae6 | lib/userbin/support/rails.rb | lib/userbin/support/rails.rb | module Userbin
module UserbinClient
def userbin
@userbin ||= Userbin::Client.new(request, response)
end
end
ActiveSupport.on_load(:action_controller) do
include UserbinClient
end
end
| module Userbin
module UserbinClient
def userbin
@userbin ||= env['userbin'] || Userbin::Client.new(request, response)
end
end
ActiveSupport.on_load(:action_controller) do
include UserbinClient
end
end
| Use client from env if present | Use client from env if present
| Ruby | mit | cloud66/castle-ruby,castle/castle-ruby,castle/castle-ruby,givey/castle-ruby | ruby | ## Code Before:
module Userbin
module UserbinClient
def userbin
@userbin ||= Userbin::Client.new(request, response)
end
end
ActiveSupport.on_load(:action_controller) do
include UserbinClient
end
end
## Instruction:
Use client from env if present
## Code After:
module Userbin
module UserbinClient
def userbin
@userbin ||= env['userbin'] || Userbin::Client.new(request, response)
end
end
ActiveSupport.on_load(:action_controller) do
include UserbinClient
end
end
| module Userbin
module UserbinClient
def userbin
- @userbin ||= Userbin::Client.new(request, response)
+ @userbin ||= env['userbin'] || Userbin::Client.new(request, response)
? ++++++++++++++++++
end
end
ActiveSupport.on_load(:action_controller) do
include UserbinClient
end
end | 2 | 0.181818 | 1 | 1 |
1a8177afe77d78022865b32b1df2fddcf17bf1d3 | metrics/pairwise/pairwise.go | metrics/pairwise/pairwise.go | // Package pairwise implements utilities to evaluate pairwise distances or inner product (via kernel).
package pairwise
| // Package pairwise implements utilities to evaluate pairwise distances or inner product (via kernel).
package pairwise
import (
"github.com/gonum/matrix/mat64"
)
type PairwiseDistanceFunc interface {
Distance(vectorX *mat64.Dense, vectorY *mat64.Dense) float64
}
| Add distanceFunc interface to metrics. | Add distanceFunc interface to metrics.
| Go | mit | jmptrader/golearn,gitforhf/golearn,jacobxk/golearn,thedadams/golearn,elkingtonmcb/golearn,beni55/golearn,tempbottle/golearn,thedadams/golearn,ghs11/golearn,jacobxk/golearn,tempbottle/golearn,liudhzhyym/golearn,sjwhitworth/golearn,dboyliao/golearn,gitforhf/golearn,hsavit1/golearn,hsavit1/golearn,EtienneBruines/golearn,ghs11/golearn,sjwhitworth/golearn,elkingtonmcb/golearn,jmptrader/golearn,Sentimentron/golearn,liudhzhyym/golearn,io2reactor2proactor/golearn,EtienneBruines/golearn,io2reactor2proactor/golearn,liudhzhyym/golearn,io2reactor2proactor/golearn,thedadams/golearn,0x0all/golearn,jacobxk/golearn,EtienneBruines/golearn,tempbottle/golearn,jmptrader/golearn,dboyliao/golearn,gitforhf/golearn,Sentimentron/golearn,sjwhitworth/golearn,beni55/golearn,hsavit1/golearn,beni55/golearn,ghs11/golearn,thedadams/golearn,0x0all/golearn,sjwhitworth/golearn,0x0all/golearn,dboyliao/golearn,sjwhitworth/golearn,elkingtonmcb/golearn,Sentimentron/golearn | go | ## Code Before:
// Package pairwise implements utilities to evaluate pairwise distances or inner product (via kernel).
package pairwise
## Instruction:
Add distanceFunc interface to metrics.
## Code After:
// Package pairwise implements utilities to evaluate pairwise distances or inner product (via kernel).
package pairwise
import (
"github.com/gonum/matrix/mat64"
)
type PairwiseDistanceFunc interface {
Distance(vectorX *mat64.Dense, vectorY *mat64.Dense) float64
}
| // Package pairwise implements utilities to evaluate pairwise distances or inner product (via kernel).
package pairwise
+
+ import (
+ "github.com/gonum/matrix/mat64"
+ )
+
+ type PairwiseDistanceFunc interface {
+ Distance(vectorX *mat64.Dense, vectorY *mat64.Dense) float64
+ } | 8 | 4 | 8 | 0 |
4df4d69f5be50fef21ec8e861956fca4c9398b6e | bindings/python/InstallConfig.cmake | bindings/python/InstallConfig.cmake |
install(TARGETS ${SWIG_MODULE_TARGETS}
DESTINATION python/$<CONFIG>/opencmiss/zinc
COMPONENT PythonBindings
)
install(FILES ${CMAKE_CURRENT_BINARY_DIR}/\${CMAKE_INSTALL_CONFIG_NAME}/setup.py
DESTINATION python/$<CONFIG>
COMPONENT PythonBindings
)
install(DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/\${CMAKE_INSTALL_CONFIG_NAME}/opencmiss
DESTINATION python/$<CONFIG>
COMPONENT PythonBindings
FILES_MATCHING PATTERN "*.py"
)
install(FILES "${BASE_PYTHON_PACKAGE_DIR}/README.txt"
DESTINATION python/$<CONFIG>
COMPONENT PythonBindings
)
# Install step for virtual environment (if given)
if (ZINC_USE_VIRTUALENV)
# Variables required for OCPythonBindingsVirtualEnv.
set(VIRTUALENV_INSTALL_PREFIX ${ZINC_VIRTUALENV_INSTALL_PREFIX})
set(PYTHON_PACKAGE_CURRENT_NAME zinc)
# Virtual environment creation target.
include(OCPythonBindingsVirtualEnv)
endif ()
|
set(_PYTHON_DESTINATION_PREFIX lib/python${PYTHONLIBS_MAJOR_VERSION}.${PYTHONLIBS_MINOR_VERSION})
install(TARGETS ${SWIG_MODULE_TARGETS}
DESTINATION ${_PYTHON_DESTINATION_PREFIX}/$<CONFIG>/opencmiss/zinc
COMPONENT PythonBindings
)
install(FILES ${CMAKE_CURRENT_BINARY_DIR}/\${CMAKE_INSTALL_CONFIG_NAME}/setup.py
DESTINATION ${_PYTHON_DESTINATION_PREFIX}/$<CONFIG>
COMPONENT PythonBindings
)
install(DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/\${CMAKE_INSTALL_CONFIG_NAME}/opencmiss
DESTINATION ${_PYTHON_DESTINATION_PREFIX}/$<CONFIG>
COMPONENT PythonBindings
FILES_MATCHING PATTERN "*.py"
)
install(FILES "${BASE_PYTHON_PACKAGE_DIR}/README.txt"
DESTINATION ${_PYTHON_DESTINATION_PREFIX}/$<CONFIG>
COMPONENT PythonBindings
)
# Install step for virtual environment (if given)
if (ZINC_USE_VIRTUALENV)
# Variables required for OCPythonBindingsVirtualEnv.
set(VIRTUALENV_INSTALL_PREFIX ${ZINC_VIRTUALENV_INSTALL_PREFIX})
set(PYTHON_PACKAGE_CURRENT_NAME zinc)
# Virtual environment creation target.
include(OCPythonBindingsVirtualEnv)
endif ()
| Install python packages into a standard location. | Install python packages into a standard location.
| CMake | mpl-2.0 | hsorby/zinc,hsorby/zinc,hsorby/zinc,hsorby/zinc,OpenCMISS/zinc,OpenCMISS/zinc,OpenCMISS/zinc,OpenCMISS/zinc | cmake | ## Code Before:
install(TARGETS ${SWIG_MODULE_TARGETS}
DESTINATION python/$<CONFIG>/opencmiss/zinc
COMPONENT PythonBindings
)
install(FILES ${CMAKE_CURRENT_BINARY_DIR}/\${CMAKE_INSTALL_CONFIG_NAME}/setup.py
DESTINATION python/$<CONFIG>
COMPONENT PythonBindings
)
install(DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/\${CMAKE_INSTALL_CONFIG_NAME}/opencmiss
DESTINATION python/$<CONFIG>
COMPONENT PythonBindings
FILES_MATCHING PATTERN "*.py"
)
install(FILES "${BASE_PYTHON_PACKAGE_DIR}/README.txt"
DESTINATION python/$<CONFIG>
COMPONENT PythonBindings
)
# Install step for virtual environment (if given)
if (ZINC_USE_VIRTUALENV)
# Variables required for OCPythonBindingsVirtualEnv.
set(VIRTUALENV_INSTALL_PREFIX ${ZINC_VIRTUALENV_INSTALL_PREFIX})
set(PYTHON_PACKAGE_CURRENT_NAME zinc)
# Virtual environment creation target.
include(OCPythonBindingsVirtualEnv)
endif ()
## Instruction:
Install python packages into a standard location.
## Code After:
set(_PYTHON_DESTINATION_PREFIX lib/python${PYTHONLIBS_MAJOR_VERSION}.${PYTHONLIBS_MINOR_VERSION})
install(TARGETS ${SWIG_MODULE_TARGETS}
DESTINATION ${_PYTHON_DESTINATION_PREFIX}/$<CONFIG>/opencmiss/zinc
COMPONENT PythonBindings
)
install(FILES ${CMAKE_CURRENT_BINARY_DIR}/\${CMAKE_INSTALL_CONFIG_NAME}/setup.py
DESTINATION ${_PYTHON_DESTINATION_PREFIX}/$<CONFIG>
COMPONENT PythonBindings
)
install(DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/\${CMAKE_INSTALL_CONFIG_NAME}/opencmiss
DESTINATION ${_PYTHON_DESTINATION_PREFIX}/$<CONFIG>
COMPONENT PythonBindings
FILES_MATCHING PATTERN "*.py"
)
install(FILES "${BASE_PYTHON_PACKAGE_DIR}/README.txt"
DESTINATION ${_PYTHON_DESTINATION_PREFIX}/$<CONFIG>
COMPONENT PythonBindings
)
# Install step for virtual environment (if given)
if (ZINC_USE_VIRTUALENV)
# Variables required for OCPythonBindingsVirtualEnv.
set(VIRTUALENV_INSTALL_PREFIX ${ZINC_VIRTUALENV_INSTALL_PREFIX})
set(PYTHON_PACKAGE_CURRENT_NAME zinc)
# Virtual environment creation target.
include(OCPythonBindingsVirtualEnv)
endif ()
| +
+ set(_PYTHON_DESTINATION_PREFIX lib/python${PYTHONLIBS_MAJOR_VERSION}.${PYTHONLIBS_MINOR_VERSION})
install(TARGETS ${SWIG_MODULE_TARGETS}
- DESTINATION python/$<CONFIG>/opencmiss/zinc
+ DESTINATION ${_PYTHON_DESTINATION_PREFIX}/$<CONFIG>/opencmiss/zinc
COMPONENT PythonBindings
)
install(FILES ${CMAKE_CURRENT_BINARY_DIR}/\${CMAKE_INSTALL_CONFIG_NAME}/setup.py
- DESTINATION python/$<CONFIG>
+ DESTINATION ${_PYTHON_DESTINATION_PREFIX}/$<CONFIG>
COMPONENT PythonBindings
)
install(DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/\${CMAKE_INSTALL_CONFIG_NAME}/opencmiss
- DESTINATION python/$<CONFIG>
+ DESTINATION ${_PYTHON_DESTINATION_PREFIX}/$<CONFIG>
COMPONENT PythonBindings
FILES_MATCHING PATTERN "*.py"
)
install(FILES "${BASE_PYTHON_PACKAGE_DIR}/README.txt"
- DESTINATION python/$<CONFIG>
+ DESTINATION ${_PYTHON_DESTINATION_PREFIX}/$<CONFIG>
COMPONENT PythonBindings
)
# Install step for virtual environment (if given)
if (ZINC_USE_VIRTUALENV)
# Variables required for OCPythonBindingsVirtualEnv.
set(VIRTUALENV_INSTALL_PREFIX ${ZINC_VIRTUALENV_INSTALL_PREFIX})
set(PYTHON_PACKAGE_CURRENT_NAME zinc)
# Virtual environment creation target.
include(OCPythonBindingsVirtualEnv)
endif ()
| 10 | 0.357143 | 6 | 4 |
5a956ea60d5f96263341edb97b55a90df90f7b3c | app/models/gitosis_public_key.rb | app/models/gitosis_public_key.rb | class GitosisPublicKey < ActiveRecord::Base
STATUS_ACTIVE = true
STATUS_LOCKED = false
belongs_to :user
validates_uniqueness_of :title, :scope => :user_id
validates_uniqueness_of :identifier, :score => :user_id
validates_presence_of :title, :key, :identifier
named_scope :active, {:conditions => {:active => GitosisPublicKey::STATUS_ACTIVE != 0}}
named_scope :inactive, {:conditions => {:active => GitosisPublicKey::STATUS_LOCKED == 0}}
validate :has_not_been_changed
before_validation :set_identifier
def has_not_been_changed
unless new_record?
%w(identifier key user_id).each do |attribute|
errors.add(attribute, 'may not be changed') unless changes[attribute].blank?
end
end
end
def set_identifier
self.identifier ||= "#{self.user.login.underscore}-#{self.title.underscore}-#{self.id}".gsub(/[^0-9a-zA-Z-_]/,'_')
end
def to_s ; title ; end
end
| class GitosisPublicKey < ActiveRecord::Base
STATUS_ACTIVE = true
STATUS_LOCKED = false
belongs_to :user
validates_uniqueness_of :title, :scope => :user_id
validates_uniqueness_of :identifier, :score => :user_id
validates_presence_of :title, :key, :identifier
named_scope :active, {:conditions => {:active => GitosisPublicKey::STATUS_ACTIVE}}
named_scope :inactive, {:conditions => {:active => GitosisPublicKey::STATUS_LOCKED}}
validate :has_not_been_changed
before_validation :set_identifier
def has_not_been_changed
unless new_record?
%w(identifier key user_id).each do |attribute|
errors.add(attribute, 'may not be changed') unless changes[attribute].blank?
end
end
end
def set_identifier
self.identifier ||= "#{self.user.login.underscore}-#{self.title.underscore}-#{self.id}".gsub(/[^0-9a-zA-Z-_]/,'_')
end
def to_s ; title ; end
end
| Save fail in last commit | Save fail in last commit
| Ruby | mit | ebastien/redmine-gitolite,ivyl/redmine-gitolite,gdoffe/redmine-gitolite,gdoffe/redmine-gitolite,gdoffe/redmine-gitolite | ruby | ## Code Before:
class GitosisPublicKey < ActiveRecord::Base
STATUS_ACTIVE = true
STATUS_LOCKED = false
belongs_to :user
validates_uniqueness_of :title, :scope => :user_id
validates_uniqueness_of :identifier, :score => :user_id
validates_presence_of :title, :key, :identifier
named_scope :active, {:conditions => {:active => GitosisPublicKey::STATUS_ACTIVE != 0}}
named_scope :inactive, {:conditions => {:active => GitosisPublicKey::STATUS_LOCKED == 0}}
validate :has_not_been_changed
before_validation :set_identifier
def has_not_been_changed
unless new_record?
%w(identifier key user_id).each do |attribute|
errors.add(attribute, 'may not be changed') unless changes[attribute].blank?
end
end
end
def set_identifier
self.identifier ||= "#{self.user.login.underscore}-#{self.title.underscore}-#{self.id}".gsub(/[^0-9a-zA-Z-_]/,'_')
end
def to_s ; title ; end
end
## Instruction:
Save fail in last commit
## Code After:
class GitosisPublicKey < ActiveRecord::Base
STATUS_ACTIVE = true
STATUS_LOCKED = false
belongs_to :user
validates_uniqueness_of :title, :scope => :user_id
validates_uniqueness_of :identifier, :score => :user_id
validates_presence_of :title, :key, :identifier
named_scope :active, {:conditions => {:active => GitosisPublicKey::STATUS_ACTIVE}}
named_scope :inactive, {:conditions => {:active => GitosisPublicKey::STATUS_LOCKED}}
validate :has_not_been_changed
before_validation :set_identifier
def has_not_been_changed
unless new_record?
%w(identifier key user_id).each do |attribute|
errors.add(attribute, 'may not be changed') unless changes[attribute].blank?
end
end
end
def set_identifier
self.identifier ||= "#{self.user.login.underscore}-#{self.title.underscore}-#{self.id}".gsub(/[^0-9a-zA-Z-_]/,'_')
end
def to_s ; title ; end
end
| class GitosisPublicKey < ActiveRecord::Base
STATUS_ACTIVE = true
STATUS_LOCKED = false
belongs_to :user
validates_uniqueness_of :title, :scope => :user_id
validates_uniqueness_of :identifier, :score => :user_id
validates_presence_of :title, :key, :identifier
- named_scope :active, {:conditions => {:active => GitosisPublicKey::STATUS_ACTIVE != 0}}
? -----
+ named_scope :active, {:conditions => {:active => GitosisPublicKey::STATUS_ACTIVE}}
- named_scope :inactive, {:conditions => {:active => GitosisPublicKey::STATUS_LOCKED == 0}}
? -----
+ named_scope :inactive, {:conditions => {:active => GitosisPublicKey::STATUS_LOCKED}}
validate :has_not_been_changed
before_validation :set_identifier
def has_not_been_changed
unless new_record?
%w(identifier key user_id).each do |attribute|
errors.add(attribute, 'may not be changed') unless changes[attribute].blank?
end
end
end
def set_identifier
self.identifier ||= "#{self.user.login.underscore}-#{self.title.underscore}-#{self.id}".gsub(/[^0-9a-zA-Z-_]/,'_')
end
def to_s ; title ; end
end | 4 | 0.129032 | 2 | 2 |
dc1544529160b565bfa75321522c86835e141045 | kids/grades-k-5/index.html | kids/grades-k-5/index.html | ---
layout: page
---
<p id="breadcrumbs">
<a href="{{ site.baseurl }}/">Home</a> › Kids › Grades K-5
</p>
<h1>Grades K-5</h1>
<p>The goal of Lifestone’s KID’S LIFE ministry is to teach kids truths of the Bible with age-appropriate, fun, interactive experiences taught by committed volunteers who serve Jesus by serving you and your kids. Lifestone is committed not only to cementing the fundamentals of faith into the hearts and minds of children, but also equipping parents and getting families talking about Jesus together throughout their week.</p>
<h2>What to Expect</h2>
<p>When you walk in the door you’ll see signage directing you to your child’s check in. At the check in area, you will be greeted by a volunteer who will help you sign in your child. Our desire is for families to worship together, so when the worship music begins, your child’s teacher will bring children grades K-5 in to worship along side the adults before returning to the kids area for their kid-sized Bible lesson.</p>
<p>*** Our secure environments are staffed with loving volunteers who have all submitted to extensive background checks and interviews.</p>
{% include flickr.html albumId=72157651701812368 %}
| ---
layout: page
---
<p id="breadcrumbs">
<a href="{{ site.baseurl }}/">Home</a> › Kids › Grades K-5
</p>
<h1>Grades K-5</h1>
<p>The goal of Lifestone’s KID’S LIFE ministry is to teach kids truths of the Bible with age-appropriate, fun, interactive experiences taught by committed volunteers who serve Jesus by serving you and your kids. Lifestone is committed not only to cementing the fundamentals of faith into the hearts and minds of children, but also equipping parents and getting families talking about Jesus together throughout their week.</p>
<blockquote>
<p>Kid's Camp coming this summer!</p>
</blockquote>
<h2>What to Expect</h2>
<p>When you walk in the door you’ll see signage directing you to your child’s check in. At the check in area, you will be greeted by a volunteer who will help you sign in your child. Our desire is for families to worship together, so when the worship music begins, your child’s teacher will bring children grades K-5 in to worship along side the adults before returning to the kids area for their kid-sized Bible lesson.</p>
<p>*** Our secure environments are staffed with loving volunteers who have all submitted to extensive background checks and interviews.</p>
{% include flickr.html albumId=72157651701812368 %}
| Add 'Kid's camp coming this summer!' | Add 'Kid's camp coming this summer!'
| HTML | mit | Mountainview-WebDesign/lifestonechurch,Mountainview-WebDesign/lifestonechurch,Mountainview-WebDesign/lifestonechurch | html | ## Code Before:
---
layout: page
---
<p id="breadcrumbs">
<a href="{{ site.baseurl }}/">Home</a> › Kids › Grades K-5
</p>
<h1>Grades K-5</h1>
<p>The goal of Lifestone’s KID’S LIFE ministry is to teach kids truths of the Bible with age-appropriate, fun, interactive experiences taught by committed volunteers who serve Jesus by serving you and your kids. Lifestone is committed not only to cementing the fundamentals of faith into the hearts and minds of children, but also equipping parents and getting families talking about Jesus together throughout their week.</p>
<h2>What to Expect</h2>
<p>When you walk in the door you’ll see signage directing you to your child’s check in. At the check in area, you will be greeted by a volunteer who will help you sign in your child. Our desire is for families to worship together, so when the worship music begins, your child’s teacher will bring children grades K-5 in to worship along side the adults before returning to the kids area for their kid-sized Bible lesson.</p>
<p>*** Our secure environments are staffed with loving volunteers who have all submitted to extensive background checks and interviews.</p>
{% include flickr.html albumId=72157651701812368 %}
## Instruction:
Add 'Kid's camp coming this summer!'
## Code After:
---
layout: page
---
<p id="breadcrumbs">
<a href="{{ site.baseurl }}/">Home</a> › Kids › Grades K-5
</p>
<h1>Grades K-5</h1>
<p>The goal of Lifestone’s KID’S LIFE ministry is to teach kids truths of the Bible with age-appropriate, fun, interactive experiences taught by committed volunteers who serve Jesus by serving you and your kids. Lifestone is committed not only to cementing the fundamentals of faith into the hearts and minds of children, but also equipping parents and getting families talking about Jesus together throughout their week.</p>
<blockquote>
<p>Kid's Camp coming this summer!</p>
</blockquote>
<h2>What to Expect</h2>
<p>When you walk in the door you’ll see signage directing you to your child’s check in. At the check in area, you will be greeted by a volunteer who will help you sign in your child. Our desire is for families to worship together, so when the worship music begins, your child’s teacher will bring children grades K-5 in to worship along side the adults before returning to the kids area for their kid-sized Bible lesson.</p>
<p>*** Our secure environments are staffed with loving volunteers who have all submitted to extensive background checks and interviews.</p>
{% include flickr.html albumId=72157651701812368 %}
| ---
layout: page
---
<p id="breadcrumbs">
<a href="{{ site.baseurl }}/">Home</a> › Kids › Grades K-5
</p>
<h1>Grades K-5</h1>
<p>The goal of Lifestone’s KID’S LIFE ministry is to teach kids truths of the Bible with age-appropriate, fun, interactive experiences taught by committed volunteers who serve Jesus by serving you and your kids. Lifestone is committed not only to cementing the fundamentals of faith into the hearts and minds of children, but also equipping parents and getting families talking about Jesus together throughout their week.</p>
+ <blockquote>
+ <p>Kid's Camp coming this summer!</p>
+ </blockquote>
+
<h2>What to Expect</h2>
<p>When you walk in the door you’ll see signage directing you to your child’s check in. At the check in area, you will be greeted by a volunteer who will help you sign in your child. Our desire is for families to worship together, so when the worship music begins, your child’s teacher will bring children grades K-5 in to worship along side the adults before returning to the kids area for their kid-sized Bible lesson.</p>
<p>*** Our secure environments are staffed with loving volunteers who have all submitted to extensive background checks and interviews.</p>
{% include flickr.html albumId=72157651701812368 %} | 4 | 0.210526 | 4 | 0 |
79df7ef3ea3b66729497579a34977eb98e23a895 | metadata/net.pejici.easydice.txt | metadata/net.pejici.easydice.txt | Categories:Games
License:GPLv3+
Web Site:http://pejici.net/~slobo/projects/easydice.html
Source Code:https://github.com/pejic/EasyDice
Issue Tracker:https://github.com/pejic/EasyDice/issues
Auto Name:Easy Dice
Summary:RPG dice roller
Description:
Easy Dice is an rpg dice rolling app with the following features:
* Variety of die types: D4, D6, D8, D10, D10x10, D12, D20
* Choose which dice to roll together. For example a D4 with two D8.
* Re-rolling of dice. Tap dice to select which ones will roll and which ones will stay.
* Adding up. Sums all the die face values.
.
Repo Type:git
Repo:https://github.com/pejic/EasyDice-AndroidAssets.git
Build:1.5,3
commit=d4f7fb7501ac8df8b23726905b7f0d6aabfbea6b
subdir=androidsrc
Auto Update Mode:None
Update Check Mode:RepoManifest/android-assets
Current Version:1.6
Current Version Code:4
| Categories:Games
License:GPLv3+
Web Site:http://pejici.net/~slobo/projects/easydice.html
Source Code:https://github.com/pejic/EasyDice
Issue Tracker:https://github.com/pejic/EasyDice/issues
Auto Name:Easy Dice
Summary:RPG dice roller
Description:
Easy Dice is an rpg dice rolling app with the following features:
* Variety of die types: D4, D6, D8, D10, D10x10, D12, D20
* Choose which dice to roll together. For example a D4 with two D8.
* Re-rolling of dice. Tap dice to select which ones will roll and which ones will stay.
* Adding up. Sums all the die face values.
.
Repo Type:git
Repo:https://github.com/pejic/EasyDice-AndroidAssets.git
Build:1.5,3
commit=d4f7fb7501ac8df8b23726905b7f0d6aabfbea6b
subdir=androidsrc
Build:1.6,4
commit=af97a88bf69
subdir=androidsrc
Maintainer Notes:
The repo used is a copy of the real repo (https://github.com/pejic/EasyDice.git)
but with the assets pre-rendered. Unfortunately this copy repo doesn't contain
Tags, while the main one does.
We could think about using the main repo and rendering the assets ourselves
(requires blender) or asking the dev to use tags on the other repo.
.
Auto Update Mode:None
Update Check Mode:RepoManifest
Current Version:1.6
Current Version Code:4
| Update Easy Dice to 1.6 (4) | Update Easy Dice to 1.6 (4)
| Text | agpl-3.0 | f-droid/fdroiddata,f-droid/fdroiddata,f-droid/fdroid-data | text | ## Code Before:
Categories:Games
License:GPLv3+
Web Site:http://pejici.net/~slobo/projects/easydice.html
Source Code:https://github.com/pejic/EasyDice
Issue Tracker:https://github.com/pejic/EasyDice/issues
Auto Name:Easy Dice
Summary:RPG dice roller
Description:
Easy Dice is an rpg dice rolling app with the following features:
* Variety of die types: D4, D6, D8, D10, D10x10, D12, D20
* Choose which dice to roll together. For example a D4 with two D8.
* Re-rolling of dice. Tap dice to select which ones will roll and which ones will stay.
* Adding up. Sums all the die face values.
.
Repo Type:git
Repo:https://github.com/pejic/EasyDice-AndroidAssets.git
Build:1.5,3
commit=d4f7fb7501ac8df8b23726905b7f0d6aabfbea6b
subdir=androidsrc
Auto Update Mode:None
Update Check Mode:RepoManifest/android-assets
Current Version:1.6
Current Version Code:4
## Instruction:
Update Easy Dice to 1.6 (4)
## Code After:
Categories:Games
License:GPLv3+
Web Site:http://pejici.net/~slobo/projects/easydice.html
Source Code:https://github.com/pejic/EasyDice
Issue Tracker:https://github.com/pejic/EasyDice/issues
Auto Name:Easy Dice
Summary:RPG dice roller
Description:
Easy Dice is an rpg dice rolling app with the following features:
* Variety of die types: D4, D6, D8, D10, D10x10, D12, D20
* Choose which dice to roll together. For example a D4 with two D8.
* Re-rolling of dice. Tap dice to select which ones will roll and which ones will stay.
* Adding up. Sums all the die face values.
.
Repo Type:git
Repo:https://github.com/pejic/EasyDice-AndroidAssets.git
Build:1.5,3
commit=d4f7fb7501ac8df8b23726905b7f0d6aabfbea6b
subdir=androidsrc
Build:1.6,4
commit=af97a88bf69
subdir=androidsrc
Maintainer Notes:
The repo used is a copy of the real repo (https://github.com/pejic/EasyDice.git)
but with the assets pre-rendered. Unfortunately this copy repo doesn't contain
Tags, while the main one does.
We could think about using the main repo and rendering the assets ourselves
(requires blender) or asking the dev to use tags on the other repo.
.
Auto Update Mode:None
Update Check Mode:RepoManifest
Current Version:1.6
Current Version Code:4
| Categories:Games
License:GPLv3+
Web Site:http://pejici.net/~slobo/projects/easydice.html
Source Code:https://github.com/pejic/EasyDice
Issue Tracker:https://github.com/pejic/EasyDice/issues
Auto Name:Easy Dice
Summary:RPG dice roller
Description:
Easy Dice is an rpg dice rolling app with the following features:
* Variety of die types: D4, D6, D8, D10, D10x10, D12, D20
* Choose which dice to roll together. For example a D4 with two D8.
* Re-rolling of dice. Tap dice to select which ones will roll and which ones will stay.
* Adding up. Sums all the die face values.
.
Repo Type:git
Repo:https://github.com/pejic/EasyDice-AndroidAssets.git
Build:1.5,3
commit=d4f7fb7501ac8df8b23726905b7f0d6aabfbea6b
subdir=androidsrc
+ Build:1.6,4
+ commit=af97a88bf69
+ subdir=androidsrc
+
+ Maintainer Notes:
+ The repo used is a copy of the real repo (https://github.com/pejic/EasyDice.git)
+ but with the assets pre-rendered. Unfortunately this copy repo doesn't contain
+ Tags, while the main one does.
+
+ We could think about using the main repo and rendering the assets ourselves
+ (requires blender) or asking the dev to use tags on the other repo.
+ .
+
Auto Update Mode:None
- Update Check Mode:RepoManifest/android-assets
? ---------------
+ Update Check Mode:RepoManifest
Current Version:1.6
Current Version Code:4
| 15 | 0.535714 | 14 | 1 |
79865e36fa70a52d04f586b24da14a216b883432 | test/media-test.js | test/media-test.js | /* Global Includes */
var testCase = require('mocha').describe;
var pre = require('mocha').before;
var preEach = require('mocha').beforeEach;
var post = require('mocha').after;
var postEach = require('mocha').afterEach;
var assertions = require('mocha').it;
var assert = require('chai').assert;
var validator = require('validator');
var exec = require('child_process').execSync;
var artik = require('../lib/artik-sdk');
/* Test Specific Includes */
var media = artik.media();
var sound_file = '/usr/share/sounds/alsa/Front_Center.wav';
/* Test Case Module */
testCase('Media', function() {
pre(function() {
});
testCase('#play_sound_file', function() {
assertions('Play the sound file', function() {
media.play_sound_file(sound_file, function(response, status) {
console.log('Finished playing');
});
});
});
post(function() {
});
});
| /* Global Includes */
var testCase = require('mocha').describe;
var pre = require('mocha').before;
var preEach = require('mocha').beforeEach;
var post = require('mocha').after;
var postEach = require('mocha').afterEach;
var assertions = require('mocha').it;
var assert = require('chai').assert;
var validator = require('validator');
var exec = require('child_process').execSync;
var artik = require('../lib/artik-sdk');
/* Test Specific Includes */
var media = artik.media();
var sound_file = '/usr/share/sounds/alsa/Front_Center.wav';
var start, end;
/* Test Case Module */
testCase('Media', function() {
pre(function() {
});
testCase('#play_sound_file', function() {
this.timeout(5000);
start = new Date();
assertions('Play the sound file', function(done) {
media.play_sound_file(sound_file, function(response, status) {
end = new Date();
var timeOfPlay = (end.getTime() - start.getTime())/1000;
console.log('Finished playing. Seconds ' + timeOfPlay);
assert.isAtLeast(timeOfPlay, 1);
done();
});
});
});
post(function() {
});
});
| Use time duration of test to measure the success | Media: Use time duration of test to measure the success
Signed-off-by: Vaibhav Singh <c267fbcdc94c01eb807238c388b92f321583d99b@samsung.com>
| JavaScript | mit | hoondol/artik-sdk,hoondol/artik-sdk | javascript | ## Code Before:
/* Global Includes */
var testCase = require('mocha').describe;
var pre = require('mocha').before;
var preEach = require('mocha').beforeEach;
var post = require('mocha').after;
var postEach = require('mocha').afterEach;
var assertions = require('mocha').it;
var assert = require('chai').assert;
var validator = require('validator');
var exec = require('child_process').execSync;
var artik = require('../lib/artik-sdk');
/* Test Specific Includes */
var media = artik.media();
var sound_file = '/usr/share/sounds/alsa/Front_Center.wav';
/* Test Case Module */
testCase('Media', function() {
pre(function() {
});
testCase('#play_sound_file', function() {
assertions('Play the sound file', function() {
media.play_sound_file(sound_file, function(response, status) {
console.log('Finished playing');
});
});
});
post(function() {
});
});
## Instruction:
Media: Use time duration of test to measure the success
Signed-off-by: Vaibhav Singh <c267fbcdc94c01eb807238c388b92f321583d99b@samsung.com>
## Code After:
/* Global Includes */
var testCase = require('mocha').describe;
var pre = require('mocha').before;
var preEach = require('mocha').beforeEach;
var post = require('mocha').after;
var postEach = require('mocha').afterEach;
var assertions = require('mocha').it;
var assert = require('chai').assert;
var validator = require('validator');
var exec = require('child_process').execSync;
var artik = require('../lib/artik-sdk');
/* Test Specific Includes */
var media = artik.media();
var sound_file = '/usr/share/sounds/alsa/Front_Center.wav';
var start, end;
/* Test Case Module */
testCase('Media', function() {
pre(function() {
});
testCase('#play_sound_file', function() {
this.timeout(5000);
start = new Date();
assertions('Play the sound file', function(done) {
media.play_sound_file(sound_file, function(response, status) {
end = new Date();
var timeOfPlay = (end.getTime() - start.getTime())/1000;
console.log('Finished playing. Seconds ' + timeOfPlay);
assert.isAtLeast(timeOfPlay, 1);
done();
});
});
});
post(function() {
});
});
| /* Global Includes */
var testCase = require('mocha').describe;
var pre = require('mocha').before;
var preEach = require('mocha').beforeEach;
var post = require('mocha').after;
var postEach = require('mocha').afterEach;
var assertions = require('mocha').it;
var assert = require('chai').assert;
var validator = require('validator');
var exec = require('child_process').execSync;
var artik = require('../lib/artik-sdk');
/* Test Specific Includes */
var media = artik.media();
var sound_file = '/usr/share/sounds/alsa/Front_Center.wav';
+ var start, end;
/* Test Case Module */
testCase('Media', function() {
pre(function() {
});
testCase('#play_sound_file', function() {
+ this.timeout(5000);
+ start = new Date();
- assertions('Play the sound file', function() {
+ assertions('Play the sound file', function(done) {
? ++++
+
media.play_sound_file(sound_file, function(response, status) {
+ end = new Date();
+ var timeOfPlay = (end.getTime() - start.getTime())/1000;
- console.log('Finished playing');
+ console.log('Finished playing. Seconds ' + timeOfPlay);
? ++++++++++ +++++++++++++
+ assert.isAtLeast(timeOfPlay, 1);
+ done();
});
});
});
post(function() {
});
}); | 12 | 0.315789 | 10 | 2 |
1ffeba72e0f3e5e8006fa6ac735a8d96a56d9455 | .travis.yml | .travis.yml | language: objective-c
osx_image: xcode8.3
jobs:
include:
# testing
- script: xcodebuild -workspace Vienna.xcworkspace -scheme Vienna test | xcpretty
# linting
- before_script: swiftlint version
script: swiftlint --lenient --reporter emoji
notifications:
slack:
secure: G/L2fP8z8+jNFicgMkOPrc3e7HqdQ2HkyazUMHwWLOsKmjM0ERqQ6FZXLlSr2P/kukqfUGHCGBGRdoMUb7jebNY0fNQyLYoUD7L91MLaz/zSBpDu4Yl+cVQJER1F6WFWNf697Uevvun1Mmz3N6zHe82KjQtE1yk/qFCbPyG5uJ8=
| language: objective-c
osx_image: xcode8.3
jobs:
include:
# testing
- script: xcodebuild -workspace Vienna.xcworkspace -scheme Vienna test | xcpretty
# linting
- before_script: swiftlint version
script: swiftlint --lenient --reporter emoji
notifications:
slack:
rooms:
- secure: G/L2fP8z8+jNFicgMkOPrc3e7HqdQ2HkyazUMHwWLOsKmjM0ERqQ6FZXLlSr2P/kukqfUGHCGBGRdoMUb7jebNY0fNQyLYoUD7L91MLaz/zSBpDu4Yl+cVQJER1F6WFWNf697Uevvun1Mmz3N6zHe82KjQtE1yk/qFCbPyG5uJ8=
template: "%{commit_subject}\n<%{compare_url}|%{commit}> in <%{pull_request_url}|#%{pull_request_number}> by %{author} <%{build_url}|%{result}> in %{duration}"
on_success: change
on_failure: change
| Format Travis message for Slack | Format Travis message for Slack
| YAML | apache-2.0 | ViennaRSS/vienna-rss,barijaona/vienna-rss,josh64x2/vienna-rss,Eitot/vienna-rss,Eitot/vienna-rss,barijaona/vienna-rss,josh64x2/vienna-rss,barijaona/vienna-rss,josh64x2/vienna-rss,Eitot/vienna-rss,ViennaRSS/vienna-rss,lapcat/vienna-rss,barijaona/vienna-rss,lapcat/vienna-rss,lapcat/vienna-rss,ViennaRSS/vienna-rss,josh64x2/vienna-rss,ViennaRSS/vienna-rss,josh64x2/vienna-rss,Eitot/vienna-rss,barijaona/vienna-rss,lapcat/vienna-rss,ViennaRSS/vienna-rss | yaml | ## Code Before:
language: objective-c
osx_image: xcode8.3
jobs:
include:
# testing
- script: xcodebuild -workspace Vienna.xcworkspace -scheme Vienna test | xcpretty
# linting
- before_script: swiftlint version
script: swiftlint --lenient --reporter emoji
notifications:
slack:
secure: G/L2fP8z8+jNFicgMkOPrc3e7HqdQ2HkyazUMHwWLOsKmjM0ERqQ6FZXLlSr2P/kukqfUGHCGBGRdoMUb7jebNY0fNQyLYoUD7L91MLaz/zSBpDu4Yl+cVQJER1F6WFWNf697Uevvun1Mmz3N6zHe82KjQtE1yk/qFCbPyG5uJ8=
## Instruction:
Format Travis message for Slack
## Code After:
language: objective-c
osx_image: xcode8.3
jobs:
include:
# testing
- script: xcodebuild -workspace Vienna.xcworkspace -scheme Vienna test | xcpretty
# linting
- before_script: swiftlint version
script: swiftlint --lenient --reporter emoji
notifications:
slack:
rooms:
- secure: G/L2fP8z8+jNFicgMkOPrc3e7HqdQ2HkyazUMHwWLOsKmjM0ERqQ6FZXLlSr2P/kukqfUGHCGBGRdoMUb7jebNY0fNQyLYoUD7L91MLaz/zSBpDu4Yl+cVQJER1F6WFWNf697Uevvun1Mmz3N6zHe82KjQtE1yk/qFCbPyG5uJ8=
template: "%{commit_subject}\n<%{compare_url}|%{commit}> in <%{pull_request_url}|#%{pull_request_number}> by %{author} <%{build_url}|%{result}> in %{duration}"
on_success: change
on_failure: change
| language: objective-c
osx_image: xcode8.3
jobs:
include:
# testing
- script: xcodebuild -workspace Vienna.xcworkspace -scheme Vienna test | xcpretty
# linting
- before_script: swiftlint version
script: swiftlint --lenient --reporter emoji
notifications:
slack:
+ rooms:
- secure: G/L2fP8z8+jNFicgMkOPrc3e7HqdQ2HkyazUMHwWLOsKmjM0ERqQ6FZXLlSr2P/kukqfUGHCGBGRdoMUb7jebNY0fNQyLYoUD7L91MLaz/zSBpDu4Yl+cVQJER1F6WFWNf697Uevvun1Mmz3N6zHe82KjQtE1yk/qFCbPyG5uJ8=
+ - secure: G/L2fP8z8+jNFicgMkOPrc3e7HqdQ2HkyazUMHwWLOsKmjM0ERqQ6FZXLlSr2P/kukqfUGHCGBGRdoMUb7jebNY0fNQyLYoUD7L91MLaz/zSBpDu4Yl+cVQJER1F6WFWNf697Uevvun1Mmz3N6zHe82KjQtE1yk/qFCbPyG5uJ8=
? ++++
-
+ template: "%{commit_subject}\n<%{compare_url}|%{commit}> in <%{pull_request_url}|#%{pull_request_number}> by %{author} <%{build_url}|%{result}> in %{duration}"
+ on_success: change
+ on_failure: change | 7 | 0.4375 | 5 | 2 |
295bf3ae3cf4b8f1bf8739ea513389a774f5f84c | README.md | README.md | [](https://travis-ci.org/murillocg/goleador)
# GoleadorApp
API REST para gerenciamento de partidas de futebol amador, contemplando informações como placar das partidas, gols anotados por cada jogador e ranking dos goleadores.
| [](https://travis-ci.org/murillocg/goleador)
[](https://coveralls.io/github/murillocg/goleador?branch=master)
# GoleadorApp
API REST para gerenciamento de partidas de futebol amador, contemplando informações como placar das partidas, gols anotados por cada jogador e ranking dos goleadores.
| Add coverage badge to project | Add coverage badge to project | Markdown | mit | murillocg/goleador,pedrorohr/goleador,pedrorohr/goleador,pedrorohr/goleador,pedrorohr/goleador,murillocg/goleador,murillocg/goleador,murillocg/goleador | markdown | ## Code Before:
[](https://travis-ci.org/murillocg/goleador)
# GoleadorApp
API REST para gerenciamento de partidas de futebol amador, contemplando informações como placar das partidas, gols anotados por cada jogador e ranking dos goleadores.
## Instruction:
Add coverage badge to project
## Code After:
[](https://travis-ci.org/murillocg/goleador)
[](https://coveralls.io/github/murillocg/goleador?branch=master)
# GoleadorApp
API REST para gerenciamento de partidas de futebol amador, contemplando informações como placar das partidas, gols anotados por cada jogador e ranking dos goleadores.
| [](https://travis-ci.org/murillocg/goleador)
+ [](https://coveralls.io/github/murillocg/goleador?branch=master)
# GoleadorApp
API REST para gerenciamento de partidas de futebol amador, contemplando informações como placar das partidas, gols anotados por cada jogador e ranking dos goleadores.
| 1 | 0.166667 | 1 | 0 |
6fa0a324bbc83b57e583aadca7ea812d3066d81a | firebase.json | firebase.json | {
"hosting": {
"public": "./dist",
"rewrites": [
{
"source": "**",
"destination": "/index.html"
}
],
"ignore": [
"**/.*",
"**/*.log",
"**/*.stackdump"
],
"headers": [
{
"source": "**/*.@(jpg|jpeg|gif|png|svg|js|css)",
"headers": [
{
"key": "Cache-Control",
"value": "max-age=2678400"
}
]
},
{
"source": "/",
"headers": [
{
"key": "Link",
"value": "</app.4ad933af.js>;rel=preload;as=script,</app.8bbcefe8.css>;rel=preload;as=style"
},
{
"key": "Content-Security-Policy",
"value": "default-src 'none'; script-src 'self'; img-src 'self'; style-src 'self'; font-src 'self' data:; base-uri 'self';"
},
{
"key": "X-Frame-Options",
"value": "SAMEORIGIN"
},
{
"key": "X-UA-Compatible",
"value": "IE=edge"
},
{
"key": "X-XSS-Protection",
"value": "1; mode=block"
}
]
}
],
"cleanUrls": true
}
}
| {
"hosting": {
"public": "./dist",
"rewrites": [
{
"source": "**",
"destination": "/index.html"
}
],
"ignore": [
"**/.*",
"**/*.log",
"**/*.stackdump"
],
"headers": [
{
"source": "**/*.@(jpg|jpeg|gif|png|svg|js|css)",
"headers": [
{
"key": "Cache-Control",
"value": "max-age=2678400"
}
]
},
{
"source": "/",
"headers": [
{
"key": "Link",
"value": "</app.4ad933af.js>;rel=preload;as=script,</app.8bbcefe8.css>;rel=preload;as=style"
},
{
"key": "Content-Security-Policy",
"value": "default-src 'none'; script-src 'self'; img-src 'self' unsafe-inline; style-src 'self'; font-src 'self' data:; base-uri 'self';"
},
{
"key": "X-Frame-Options",
"value": "SAMEORIGIN"
},
{
"key": "X-UA-Compatible",
"value": "IE=edge"
},
{
"key": "X-XSS-Protection",
"value": "1; mode=block"
}
]
}
],
"cleanUrls": true
}
}
| Add inline img src to CSP | Add inline img src to CSP
| JSON | mit | durasj/website,durasj/website,durasj/website | json | ## Code Before:
{
"hosting": {
"public": "./dist",
"rewrites": [
{
"source": "**",
"destination": "/index.html"
}
],
"ignore": [
"**/.*",
"**/*.log",
"**/*.stackdump"
],
"headers": [
{
"source": "**/*.@(jpg|jpeg|gif|png|svg|js|css)",
"headers": [
{
"key": "Cache-Control",
"value": "max-age=2678400"
}
]
},
{
"source": "/",
"headers": [
{
"key": "Link",
"value": "</app.4ad933af.js>;rel=preload;as=script,</app.8bbcefe8.css>;rel=preload;as=style"
},
{
"key": "Content-Security-Policy",
"value": "default-src 'none'; script-src 'self'; img-src 'self'; style-src 'self'; font-src 'self' data:; base-uri 'self';"
},
{
"key": "X-Frame-Options",
"value": "SAMEORIGIN"
},
{
"key": "X-UA-Compatible",
"value": "IE=edge"
},
{
"key": "X-XSS-Protection",
"value": "1; mode=block"
}
]
}
],
"cleanUrls": true
}
}
## Instruction:
Add inline img src to CSP
## Code After:
{
"hosting": {
"public": "./dist",
"rewrites": [
{
"source": "**",
"destination": "/index.html"
}
],
"ignore": [
"**/.*",
"**/*.log",
"**/*.stackdump"
],
"headers": [
{
"source": "**/*.@(jpg|jpeg|gif|png|svg|js|css)",
"headers": [
{
"key": "Cache-Control",
"value": "max-age=2678400"
}
]
},
{
"source": "/",
"headers": [
{
"key": "Link",
"value": "</app.4ad933af.js>;rel=preload;as=script,</app.8bbcefe8.css>;rel=preload;as=style"
},
{
"key": "Content-Security-Policy",
"value": "default-src 'none'; script-src 'self'; img-src 'self' unsafe-inline; style-src 'self'; font-src 'self' data:; base-uri 'self';"
},
{
"key": "X-Frame-Options",
"value": "SAMEORIGIN"
},
{
"key": "X-UA-Compatible",
"value": "IE=edge"
},
{
"key": "X-XSS-Protection",
"value": "1; mode=block"
}
]
}
],
"cleanUrls": true
}
}
| {
"hosting": {
"public": "./dist",
"rewrites": [
{
"source": "**",
"destination": "/index.html"
}
],
"ignore": [
"**/.*",
"**/*.log",
"**/*.stackdump"
],
"headers": [
{
"source": "**/*.@(jpg|jpeg|gif|png|svg|js|css)",
"headers": [
{
"key": "Cache-Control",
"value": "max-age=2678400"
}
]
},
{
"source": "/",
"headers": [
{
"key": "Link",
"value": "</app.4ad933af.js>;rel=preload;as=script,</app.8bbcefe8.css>;rel=preload;as=style"
},
{
"key": "Content-Security-Policy",
- "value": "default-src 'none'; script-src 'self'; img-src 'self'; style-src 'self'; font-src 'self' data:; base-uri 'self';"
+ "value": "default-src 'none'; script-src 'self'; img-src 'self' unsafe-inline; style-src 'self'; font-src 'self' data:; base-uri 'self';"
? ++++++++++++++
},
{
"key": "X-Frame-Options",
"value": "SAMEORIGIN"
},
{
"key": "X-UA-Compatible",
"value": "IE=edge"
},
{
"key": "X-XSS-Protection",
"value": "1; mode=block"
}
]
}
],
"cleanUrls": true
}
} | 2 | 0.037736 | 1 | 1 |
f36db59a863c3208955a3f64ccd2c98d8a450f9b | setup.py | setup.py | from setuptools import setup
setup(
name='docker-ipsec',
version='2.0.3',
description='Scripts to start/stop ipsec VPN tunnels while adding/removing iptables rules for docker networking.',
author='Christopher Brichford',
author_email='chrisb@farmersbusinessnetwork.com',
license='Apache License 2.0',
keywords=['ipsec', 'docker'], # arbitrary keywords
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Topic :: Internet',
'Topic :: System :: Networking'
],
scripts=['docker_ipsec/docker-ipsec.py'],
install_requires=[
'pyroute2>=0.4.13,<0.5.0',
'netaddr>=0.7.19,<0.8.0',
'python-iptables>=0.12.0,<0.13.0',
'ipsecparse', 'docker>=2.1.0,<2.5.0'
],
url='https://github.com/cbrichford/docker-ipsec/',
packages=[
'docker_ipsec'
],
)
| from setuptools import setup
setup(
name='docker-ipsec',
version='3.0.0',
description='Scripts to start/stop ipsec VPN tunnels while adding/removing iptables rules for docker networking.',
author='Christopher Brichford',
author_email='chrisb@farmersbusinessnetwork.com',
license='Apache License 2.0',
keywords=['ipsec', 'docker'], # arbitrary keywords
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Topic :: Internet',
'Topic :: System :: Networking'
],
scripts=['docker_ipsec/docker-ipsec.py'],
install_requires=[
'pyroute2>=0.5.7,<0.6.0',
'netaddr>=0.7.19,<0.8.0',
'python-iptables>=0.14.0,<0.15.0',
'ipsecparse',
'docker>=4.2.0,<4.3.0'
],
url='https://github.com/cbrichford/docker-ipsec/',
packages=[
'docker_ipsec'
],
)
| Upgrade all dependencies to latest version. | Upgrade all dependencies to latest version.
| Python | apache-2.0 | cbrichford/docker-ipsec | python | ## Code Before:
from setuptools import setup
setup(
name='docker-ipsec',
version='2.0.3',
description='Scripts to start/stop ipsec VPN tunnels while adding/removing iptables rules for docker networking.',
author='Christopher Brichford',
author_email='chrisb@farmersbusinessnetwork.com',
license='Apache License 2.0',
keywords=['ipsec', 'docker'], # arbitrary keywords
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Topic :: Internet',
'Topic :: System :: Networking'
],
scripts=['docker_ipsec/docker-ipsec.py'],
install_requires=[
'pyroute2>=0.4.13,<0.5.0',
'netaddr>=0.7.19,<0.8.0',
'python-iptables>=0.12.0,<0.13.0',
'ipsecparse', 'docker>=2.1.0,<2.5.0'
],
url='https://github.com/cbrichford/docker-ipsec/',
packages=[
'docker_ipsec'
],
)
## Instruction:
Upgrade all dependencies to latest version.
## Code After:
from setuptools import setup
setup(
name='docker-ipsec',
version='3.0.0',
description='Scripts to start/stop ipsec VPN tunnels while adding/removing iptables rules for docker networking.',
author='Christopher Brichford',
author_email='chrisb@farmersbusinessnetwork.com',
license='Apache License 2.0',
keywords=['ipsec', 'docker'], # arbitrary keywords
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Topic :: Internet',
'Topic :: System :: Networking'
],
scripts=['docker_ipsec/docker-ipsec.py'],
install_requires=[
'pyroute2>=0.5.7,<0.6.0',
'netaddr>=0.7.19,<0.8.0',
'python-iptables>=0.14.0,<0.15.0',
'ipsecparse',
'docker>=4.2.0,<4.3.0'
],
url='https://github.com/cbrichford/docker-ipsec/',
packages=[
'docker_ipsec'
],
)
| from setuptools import setup
setup(
name='docker-ipsec',
- version='2.0.3',
? ^ ^
+ version='3.0.0',
? ^ ^
description='Scripts to start/stop ipsec VPN tunnels while adding/removing iptables rules for docker networking.',
author='Christopher Brichford',
author_email='chrisb@farmersbusinessnetwork.com',
license='Apache License 2.0',
keywords=['ipsec', 'docker'], # arbitrary keywords
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Topic :: Internet',
'Topic :: System :: Networking'
],
scripts=['docker_ipsec/docker-ipsec.py'],
install_requires=[
- 'pyroute2>=0.4.13,<0.5.0',
? ^ ^^ ^
+ 'pyroute2>=0.5.7,<0.6.0',
? ^ ^ ^
'netaddr>=0.7.19,<0.8.0',
- 'python-iptables>=0.12.0,<0.13.0',
? ^ ^
+ 'python-iptables>=0.14.0,<0.15.0',
? ^ ^
- 'ipsecparse', 'docker>=2.1.0,<2.5.0'
+ 'ipsecparse',
+ 'docker>=4.2.0,<4.3.0'
],
url='https://github.com/cbrichford/docker-ipsec/',
packages=[
'docker_ipsec'
],
) | 9 | 0.290323 | 5 | 4 |
a146e37ba5159cb6f807933c73854b185365cba5 | src/tixi/mutators/delete.cljs | src/tixi/mutators/delete.cljs | (ns tixi.mutators.delete
(:require [tixi.data :as d]
[tixi.mutators.shared :as msh]
[tixi.mutators.locks :as ml]
[tixi.mutators.undo :as mu]
[tixi.items :as i]))
(defn delete-items! [ids]
(when (not-empty ids)
(mu/snapshot!)
(doseq [id ids]
(ml/delete-from-locks! id (d/completed-item id))
(msh/update-state! update-in [:completed] dissoc id))))
| (ns tixi.mutators.delete
(:require [tixi.data :as d]
[tixi.mutators.shared :as msh]
[tixi.mutators.locks :as ml]
[tixi.mutators.undo :as mu]
[tixi.items :as i]))
(defn delete-items! [ids]
(when (not-empty ids)
(mu/snapshot!)
(doseq [id ids]
(swap! d/data update-in [:cache] dissoc id)
(ml/delete-from-locks! id (d/completed-item id))
(msh/update-state! update-in [:completed] dissoc id))))
| Remove cache when deleting an item | Remove cache when deleting an item
| Clojure | epl-1.0 | astashov/tixi,astashov/tixi,astashov/tixi,astashov/tixi | clojure | ## Code Before:
(ns tixi.mutators.delete
(:require [tixi.data :as d]
[tixi.mutators.shared :as msh]
[tixi.mutators.locks :as ml]
[tixi.mutators.undo :as mu]
[tixi.items :as i]))
(defn delete-items! [ids]
(when (not-empty ids)
(mu/snapshot!)
(doseq [id ids]
(ml/delete-from-locks! id (d/completed-item id))
(msh/update-state! update-in [:completed] dissoc id))))
## Instruction:
Remove cache when deleting an item
## Code After:
(ns tixi.mutators.delete
(:require [tixi.data :as d]
[tixi.mutators.shared :as msh]
[tixi.mutators.locks :as ml]
[tixi.mutators.undo :as mu]
[tixi.items :as i]))
(defn delete-items! [ids]
(when (not-empty ids)
(mu/snapshot!)
(doseq [id ids]
(swap! d/data update-in [:cache] dissoc id)
(ml/delete-from-locks! id (d/completed-item id))
(msh/update-state! update-in [:completed] dissoc id))))
| (ns tixi.mutators.delete
(:require [tixi.data :as d]
[tixi.mutators.shared :as msh]
[tixi.mutators.locks :as ml]
[tixi.mutators.undo :as mu]
[tixi.items :as i]))
(defn delete-items! [ids]
(when (not-empty ids)
(mu/snapshot!)
(doseq [id ids]
+ (swap! d/data update-in [:cache] dissoc id)
(ml/delete-from-locks! id (d/completed-item id))
(msh/update-state! update-in [:completed] dissoc id)))) | 1 | 0.076923 | 1 | 0 |
983db49c002d9d59fdb3df76b224b7fad496200a | lib/meditate.ex | lib/meditate.ex | defmodule Mix.Tasks.Meditate do
use Mix.Task
alias Options
def run(args) do
Application.ensure_all_started(:elixir_koans)
Code.compiler_options(ignore_module_conflict: true)
{:ok, watcher} = Watcher.start
Process.monitor(watcher)
Options.start(args)
Options.initial_koan
|> ok?
|> Runner.modules_to_run
|> Tracker.start
|> Runner.run
if Tracker.complete? do
Display.congratulate
exit(:normal)
end
receive do
{:DOWN, _references, :process, ^watcher, _reason} -> nil
end
end
defp ok?(koan) do
if Runner.koan?(koan) do
koan
else
Display.invalid_koan(koan, Runner.modules)
exit(:normal)
end
end
end
| defmodule Mix.Tasks.Meditate do
use Mix.Task
alias Options
@shortdoc "Start the koans"
def run(args) do
Application.ensure_all_started(:elixir_koans)
Code.compiler_options(ignore_module_conflict: true)
{:ok, watcher} = Watcher.start
Process.monitor(watcher)
Options.start(args)
Options.initial_koan
|> ok?
|> Runner.modules_to_run
|> Tracker.start
|> Runner.run
if Tracker.complete? do
Display.congratulate
exit(:normal)
end
receive do
{:DOWN, _references, :process, ^watcher, _reason} -> nil
end
end
defp ok?(koan) do
if Runner.koan?(koan) do
koan
else
Display.invalid_koan(koan, Runner.modules)
exit(:normal)
end
end
end
| Add shortdoc to display task in mix list | Add shortdoc to display task in mix list
| Elixir | mit | elixirkoans/elixir-koans,samstarling/elixir-koans | elixir | ## Code Before:
defmodule Mix.Tasks.Meditate do
use Mix.Task
alias Options
def run(args) do
Application.ensure_all_started(:elixir_koans)
Code.compiler_options(ignore_module_conflict: true)
{:ok, watcher} = Watcher.start
Process.monitor(watcher)
Options.start(args)
Options.initial_koan
|> ok?
|> Runner.modules_to_run
|> Tracker.start
|> Runner.run
if Tracker.complete? do
Display.congratulate
exit(:normal)
end
receive do
{:DOWN, _references, :process, ^watcher, _reason} -> nil
end
end
defp ok?(koan) do
if Runner.koan?(koan) do
koan
else
Display.invalid_koan(koan, Runner.modules)
exit(:normal)
end
end
end
## Instruction:
Add shortdoc to display task in mix list
## Code After:
defmodule Mix.Tasks.Meditate do
use Mix.Task
alias Options
@shortdoc "Start the koans"
def run(args) do
Application.ensure_all_started(:elixir_koans)
Code.compiler_options(ignore_module_conflict: true)
{:ok, watcher} = Watcher.start
Process.monitor(watcher)
Options.start(args)
Options.initial_koan
|> ok?
|> Runner.modules_to_run
|> Tracker.start
|> Runner.run
if Tracker.complete? do
Display.congratulate
exit(:normal)
end
receive do
{:DOWN, _references, :process, ^watcher, _reason} -> nil
end
end
defp ok?(koan) do
if Runner.koan?(koan) do
koan
else
Display.invalid_koan(koan, Runner.modules)
exit(:normal)
end
end
end
| defmodule Mix.Tasks.Meditate do
use Mix.Task
alias Options
+
+ @shortdoc "Start the koans"
def run(args) do
Application.ensure_all_started(:elixir_koans)
Code.compiler_options(ignore_module_conflict: true)
{:ok, watcher} = Watcher.start
Process.monitor(watcher)
Options.start(args)
Options.initial_koan
|> ok?
|> Runner.modules_to_run
|> Tracker.start
|> Runner.run
if Tracker.complete? do
Display.congratulate
exit(:normal)
end
receive do
{:DOWN, _references, :process, ^watcher, _reason} -> nil
end
end
defp ok?(koan) do
if Runner.koan?(koan) do
koan
else
Display.invalid_koan(koan, Runner.modules)
exit(:normal)
end
end
end
- | 3 | 0.078947 | 2 | 1 |
29b6c0d61957caf8a17d6bb3279474beac9f4444 | src/components/RadioButton.js | src/components/RadioButton.js | import React from 'react';
import PropTypes from 'prop-types';
import Control from './Control';
import controlActions from '../actions/controls';
import {connect} from '../store';
import {getValue, hasError} from '../store/reducers';
class RadioButton extends Control {
static propTypes = {
className: PropTypes.string,
id: PropTypes.string,
model: PropTypes.string.isRequired,
style: PropTypes.string,
value: PropTypes.string.isRequired,
checked: PropTypes.string,
};
componentDidMount() {}
componentDidUpdate() {}
render() {
const {id, value, style, checked} = this.props;
return (
<input
type="radio"
id={id}
className={this.getClassName()}
style={style}
value={value}
checked={checked === value}
onChange={e => this.onChange(e.target.value)}
/>
);
}
}
const mapStateToProps = (state, props) => ({
checked: getValue(state, props.model),
hasError: hasError(state, props.model),
});
const mapDispatchToProps = {
setErrors: controlActions.setErrors,
setValue: controlActions.setValue,
};
export default connect(mapStateToProps, mapDispatchToProps)(RadioButton);
| import React from 'react';
import PropTypes from 'prop-types';
import Control from './Control';
import controlActions from '../actions/controls';
import {connect} from '../store';
import {getValue, hasError} from '../store/reducers';
class RadioButton extends Control {
static propTypes = {
className: PropTypes.string,
id: PropTypes.string,
model: PropTypes.string.isRequired,
style: PropTypes.string,
value: PropTypes.oneOfType([
PropTypes.string,
PropTypes.number,
]).isRequired,
checked: PropTypes.string,
};
componentDidMount() {}
componentDidUpdate() {}
render() {
const {id, value, style, checked} = this.props;
return (
<input
type="radio"
id={id}
className={this.getClassName()}
style={style}
value={value}
checked={checked === value}
onChange={e => this.onChange(e.target.value)}
/>
);
}
}
const mapStateToProps = (state, props) => ({
checked: getValue(state, props.model),
hasError: hasError(state, props.model),
});
const mapDispatchToProps = {
setErrors: controlActions.setErrors,
setValue: controlActions.setValue,
};
export default connect(mapStateToProps, mapDispatchToProps)(RadioButton);
| Allow both string and number for the radio button value | Allow both string and number for the radio button value
| JavaScript | mit | darrikonn/react-chloroform,darrikonn/react-chloroform,darrikonn/react-chloroform | javascript | ## Code Before:
import React from 'react';
import PropTypes from 'prop-types';
import Control from './Control';
import controlActions from '../actions/controls';
import {connect} from '../store';
import {getValue, hasError} from '../store/reducers';
class RadioButton extends Control {
static propTypes = {
className: PropTypes.string,
id: PropTypes.string,
model: PropTypes.string.isRequired,
style: PropTypes.string,
value: PropTypes.string.isRequired,
checked: PropTypes.string,
};
componentDidMount() {}
componentDidUpdate() {}
render() {
const {id, value, style, checked} = this.props;
return (
<input
type="radio"
id={id}
className={this.getClassName()}
style={style}
value={value}
checked={checked === value}
onChange={e => this.onChange(e.target.value)}
/>
);
}
}
const mapStateToProps = (state, props) => ({
checked: getValue(state, props.model),
hasError: hasError(state, props.model),
});
const mapDispatchToProps = {
setErrors: controlActions.setErrors,
setValue: controlActions.setValue,
};
export default connect(mapStateToProps, mapDispatchToProps)(RadioButton);
## Instruction:
Allow both string and number for the radio button value
## Code After:
import React from 'react';
import PropTypes from 'prop-types';
import Control from './Control';
import controlActions from '../actions/controls';
import {connect} from '../store';
import {getValue, hasError} from '../store/reducers';
class RadioButton extends Control {
static propTypes = {
className: PropTypes.string,
id: PropTypes.string,
model: PropTypes.string.isRequired,
style: PropTypes.string,
value: PropTypes.oneOfType([
PropTypes.string,
PropTypes.number,
]).isRequired,
checked: PropTypes.string,
};
componentDidMount() {}
componentDidUpdate() {}
render() {
const {id, value, style, checked} = this.props;
return (
<input
type="radio"
id={id}
className={this.getClassName()}
style={style}
value={value}
checked={checked === value}
onChange={e => this.onChange(e.target.value)}
/>
);
}
}
const mapStateToProps = (state, props) => ({
checked: getValue(state, props.model),
hasError: hasError(state, props.model),
});
const mapDispatchToProps = {
setErrors: controlActions.setErrors,
setValue: controlActions.setValue,
};
export default connect(mapStateToProps, mapDispatchToProps)(RadioButton);
| import React from 'react';
import PropTypes from 'prop-types';
import Control from './Control';
import controlActions from '../actions/controls';
import {connect} from '../store';
import {getValue, hasError} from '../store/reducers';
class RadioButton extends Control {
static propTypes = {
className: PropTypes.string,
id: PropTypes.string,
model: PropTypes.string.isRequired,
style: PropTypes.string,
- value: PropTypes.string.isRequired,
+ value: PropTypes.oneOfType([
+ PropTypes.string,
+ PropTypes.number,
+ ]).isRequired,
checked: PropTypes.string,
};
componentDidMount() {}
componentDidUpdate() {}
render() {
const {id, value, style, checked} = this.props;
return (
<input
type="radio"
id={id}
className={this.getClassName()}
style={style}
value={value}
checked={checked === value}
onChange={e => this.onChange(e.target.value)}
/>
);
}
}
const mapStateToProps = (state, props) => ({
checked: getValue(state, props.model),
hasError: hasError(state, props.model),
});
const mapDispatchToProps = {
setErrors: controlActions.setErrors,
setValue: controlActions.setValue,
};
export default connect(mapStateToProps, mapDispatchToProps)(RadioButton); | 5 | 0.1 | 4 | 1 |
78a80884fe091a7c6de031573a822d218556bf23 | lib/tasks/setup_exchange.rake | lib/tasks/setup_exchange.rake | desc "Create RabbitMQ exchanges"
task setup_exchange: :environment do
config = YAML.load_file(Rails.root.join("config/rabbitmq.yml"))[Rails.env].symbolize_keys
bunny = Bunny.new(ENV["RABBITMQ_URL"])
channel = bunny.start.create_channel
Bunny::Exchange.new(channel, :topic, config[:exchange])
end
| desc "Create RabbitMQ exchanges"
task setup_exchange: :environment do
config = Rails.application.config_for(:rabbitmq)
bunny = Bunny.new(ENV["RABBITMQ_URL"])
channel = bunny.start.create_channel
Bunny::Exchange.new(channel, :topic, config[:exchange])
end
| Use Rails method to get RabbitMQ config | Use Rails method to get RabbitMQ config
The previous implementation isn't compatible with Ruby 3.1 due to the
upgrade of Psych (YAML library) to 4.0 [1] in stdlib and the change in `load_file`.
A simpler way to replace this is to use the Rails built in method to
load a config file.
[1]: https://github.com/ruby/psych/pull/488
| Ruby | mit | alphagov/publishing-api,alphagov/publishing-api | ruby | ## Code Before:
desc "Create RabbitMQ exchanges"
task setup_exchange: :environment do
config = YAML.load_file(Rails.root.join("config/rabbitmq.yml"))[Rails.env].symbolize_keys
bunny = Bunny.new(ENV["RABBITMQ_URL"])
channel = bunny.start.create_channel
Bunny::Exchange.new(channel, :topic, config[:exchange])
end
## Instruction:
Use Rails method to get RabbitMQ config
The previous implementation isn't compatible with Ruby 3.1 due to the
upgrade of Psych (YAML library) to 4.0 [1] in stdlib and the change in `load_file`.
A simpler way to replace this is to use the Rails built in method to
load a config file.
[1]: https://github.com/ruby/psych/pull/488
## Code After:
desc "Create RabbitMQ exchanges"
task setup_exchange: :environment do
config = Rails.application.config_for(:rabbitmq)
bunny = Bunny.new(ENV["RABBITMQ_URL"])
channel = bunny.start.create_channel
Bunny::Exchange.new(channel, :topic, config[:exchange])
end
| desc "Create RabbitMQ exchanges"
task setup_exchange: :environment do
- config = YAML.load_file(Rails.root.join("config/rabbitmq.yml"))[Rails.env].symbolize_keys
+ config = Rails.application.config_for(:rabbitmq)
bunny = Bunny.new(ENV["RABBITMQ_URL"])
channel = bunny.start.create_channel
Bunny::Exchange.new(channel, :topic, config[:exchange])
end | 2 | 0.25 | 1 | 1 |
7b45067cadda828090af37fb123be2c3a839b7b3 | utils/fetch-assembly.sh | utils/fetch-assembly.sh |
echo ""
show_usage() {
echo
echo "Usage: "
echo " -v version (e.g. 3.3.2)"
echo " -j directory for local jar cache (default = ./)"
echo
}
POSITIONAL=()
while [[ $# -gt 0 ]]
do
key="$1"
case $key in
-v|--version)
VERSION="$2"
shift # past argument
shift # past value
;;
-j|--jar)
CACHE="$2"
shift
shift
;;
*) # unknown option
POSITIONAL+=("$1") # save it in an array for later
shift # past argument
;;
esac
done
set -- "${POSITIONAL[@]}" # restore positional parameters
if [ -z "$VERSION" ]; then
echo "ERROR - Version not specified"
show_usage
exit 0
fi
if [ -z "$CACHE" ]; then
echo "Target not specified, using default './'"
CACHE="./"
fi
JAR="LuciusAPI-assembly-$VERSION.jar"
URI="https://github.com/data-intuitive/LuciusAPI/releases/download/v$VERSION/$JAR"
TARGET="$CACHE$JAR"
echo "Fetching assembly jar for v$VERSION..."
echo "Storing in '$CACHE'"
wget "$URI" -O "$TARGET"
echo "Done"
|
echo ""
show_usage() {
echo
echo "Usage: "
echo " -v version (e.g. 3.3.2)"
echo " -j directory for local jar cache (default = ./)"
echo
}
POSITIONAL=()
while [[ $# -gt 0 ]]
do
key="$1"
case $key in
-v|--version)
VERSION="$2"
shift # past argument
shift # past value
;;
-j|--jar)
CACHE="$2"
shift
shift
;;
*) # unknown option
POSITIONAL+=("$1") # save it in an array for later
shift # past argument
;;
esac
done
set -- "${POSITIONAL[@]}" # restore positional parameters
if [ -z "$VERSION" ]; then
echo "ERROR - Version not specified"
show_usage
exit 0
fi
if [ -z "$CACHE" ]; then
echo "Target not specified, using default './'"
CACHE="./"
fi
JAR="LuciusAPI-assembly-$VERSION.jar"
URI="https://github.com/data-intuitive/LuciusAPI/releases/download/v$VERSION/$JAR"
# Makes sure a trailing slash is present on the CACHE path
CACHE=`echo -ne $CACHE | sed 's;/*$;/;'`
TARGET="$CACHE$JAR"
echo "Fetching assembly jar for v$VERSION..."
echo "Storing in '$CACHE'"
wget "$URI" -O "$TARGET"
echo "Done"
| Improve parsing of jar cache | Improve parsing of jar cache
| Shell | apache-2.0 | data-intuitive/LuciusAPI | shell | ## Code Before:
echo ""
show_usage() {
echo
echo "Usage: "
echo " -v version (e.g. 3.3.2)"
echo " -j directory for local jar cache (default = ./)"
echo
}
POSITIONAL=()
while [[ $# -gt 0 ]]
do
key="$1"
case $key in
-v|--version)
VERSION="$2"
shift # past argument
shift # past value
;;
-j|--jar)
CACHE="$2"
shift
shift
;;
*) # unknown option
POSITIONAL+=("$1") # save it in an array for later
shift # past argument
;;
esac
done
set -- "${POSITIONAL[@]}" # restore positional parameters
if [ -z "$VERSION" ]; then
echo "ERROR - Version not specified"
show_usage
exit 0
fi
if [ -z "$CACHE" ]; then
echo "Target not specified, using default './'"
CACHE="./"
fi
JAR="LuciusAPI-assembly-$VERSION.jar"
URI="https://github.com/data-intuitive/LuciusAPI/releases/download/v$VERSION/$JAR"
TARGET="$CACHE$JAR"
echo "Fetching assembly jar for v$VERSION..."
echo "Storing in '$CACHE'"
wget "$URI" -O "$TARGET"
echo "Done"
## Instruction:
Improve parsing of jar cache
## Code After:
echo ""
show_usage() {
echo
echo "Usage: "
echo " -v version (e.g. 3.3.2)"
echo " -j directory for local jar cache (default = ./)"
echo
}
POSITIONAL=()
while [[ $# -gt 0 ]]
do
key="$1"
case $key in
-v|--version)
VERSION="$2"
shift # past argument
shift # past value
;;
-j|--jar)
CACHE="$2"
shift
shift
;;
*) # unknown option
POSITIONAL+=("$1") # save it in an array for later
shift # past argument
;;
esac
done
set -- "${POSITIONAL[@]}" # restore positional parameters
if [ -z "$VERSION" ]; then
echo "ERROR - Version not specified"
show_usage
exit 0
fi
if [ -z "$CACHE" ]; then
echo "Target not specified, using default './'"
CACHE="./"
fi
JAR="LuciusAPI-assembly-$VERSION.jar"
URI="https://github.com/data-intuitive/LuciusAPI/releases/download/v$VERSION/$JAR"
# Makes sure a trailing slash is present on the CACHE path
CACHE=`echo -ne $CACHE | sed 's;/*$;/;'`
TARGET="$CACHE$JAR"
echo "Fetching assembly jar for v$VERSION..."
echo "Storing in '$CACHE'"
wget "$URI" -O "$TARGET"
echo "Done"
|
echo ""
show_usage() {
echo
echo "Usage: "
echo " -v version (e.g. 3.3.2)"
echo " -j directory for local jar cache (default = ./)"
echo
}
POSITIONAL=()
while [[ $# -gt 0 ]]
do
key="$1"
case $key in
-v|--version)
VERSION="$2"
shift # past argument
shift # past value
;;
-j|--jar)
CACHE="$2"
shift
shift
;;
*) # unknown option
POSITIONAL+=("$1") # save it in an array for later
shift # past argument
;;
esac
done
set -- "${POSITIONAL[@]}" # restore positional parameters
if [ -z "$VERSION" ]; then
echo "ERROR - Version not specified"
show_usage
exit 0
fi
if [ -z "$CACHE" ]; then
echo "Target not specified, using default './'"
CACHE="./"
fi
JAR="LuciusAPI-assembly-$VERSION.jar"
URI="https://github.com/data-intuitive/LuciusAPI/releases/download/v$VERSION/$JAR"
+ # Makes sure a trailing slash is present on the CACHE path
+ CACHE=`echo -ne $CACHE | sed 's;/*$;/;'`
TARGET="$CACHE$JAR"
echo "Fetching assembly jar for v$VERSION..."
echo "Storing in '$CACHE'"
wget "$URI" -O "$TARGET"
echo "Done" | 2 | 0.035714 | 2 | 0 |
f8b501c9a31a2b9ae729e8ab634c5976a829ee4a | Cargo.toml | Cargo.toml | [package]
name = "pistoncore-input"
version = "0.0.3"
authors = ["bvssvni <bvssvni@gmail.com>"]
keywords = ["keyboard", "mouse", "input"]
description = "A structure for user input"
license = "MIT"
readme = "README.md"
repository = "https://github.com/pistondevelopers/input.git"
homepage = "https://github.com/pistondevelopers/input"
[lib]
name = "input"
path = "src/lib.rs"
[dependencies.rustc-serialize]
git = "https://github.com/rust-lang/rustc-serialize"
| [package]
name = "pistoncore-input"
version = "0.0.3"
authors = ["bvssvni <bvssvni@gmail.com>"]
keywords = ["keyboard", "mouse", "input"]
description = "A structure for user input"
license = "MIT"
readme = "README.md"
repository = "https://github.com/pistondevelopers/input.git"
homepage = "https://github.com/pistondevelopers/input"
[lib]
name = "input"
path = "src/lib.rs"
[dependencies]
rustc-serialize = "*"
| Use any version on crates.io of rustc-serialize | Use any version on crates.io of rustc-serialize
| TOML | mit | PistonDevelopers/input,bvssvni/input,caldwell/piston,bvssvni/piston,KevinFrans3/piston,tempbottle/piston,placrosse/piston,axelmagn/piston,PistonDevelopers/piston,PistonDevelopers/piston,oldmanmike/piston,bvssvni/piston,getynge/piston,mitchmindtree/piston,Potpourri/piston | toml | ## Code Before:
[package]
name = "pistoncore-input"
version = "0.0.3"
authors = ["bvssvni <bvssvni@gmail.com>"]
keywords = ["keyboard", "mouse", "input"]
description = "A structure for user input"
license = "MIT"
readme = "README.md"
repository = "https://github.com/pistondevelopers/input.git"
homepage = "https://github.com/pistondevelopers/input"
[lib]
name = "input"
path = "src/lib.rs"
[dependencies.rustc-serialize]
git = "https://github.com/rust-lang/rustc-serialize"
## Instruction:
Use any version on crates.io of rustc-serialize
## Code After:
[package]
name = "pistoncore-input"
version = "0.0.3"
authors = ["bvssvni <bvssvni@gmail.com>"]
keywords = ["keyboard", "mouse", "input"]
description = "A structure for user input"
license = "MIT"
readme = "README.md"
repository = "https://github.com/pistondevelopers/input.git"
homepage = "https://github.com/pistondevelopers/input"
[lib]
name = "input"
path = "src/lib.rs"
[dependencies]
rustc-serialize = "*"
| [package]
name = "pistoncore-input"
version = "0.0.3"
authors = ["bvssvni <bvssvni@gmail.com>"]
keywords = ["keyboard", "mouse", "input"]
description = "A structure for user input"
license = "MIT"
readme = "README.md"
repository = "https://github.com/pistondevelopers/input.git"
homepage = "https://github.com/pistondevelopers/input"
[lib]
name = "input"
path = "src/lib.rs"
- [dependencies.rustc-serialize]
+ [dependencies]
- git = "https://github.com/rust-lang/rustc-serialize"
+ rustc-serialize = "*"
| 4 | 0.190476 | 2 | 2 |
19166683a6b3bf56683071e31409ecccb3c457a1 | lib/underskog/response/parse_json.rb | lib/underskog/response/parse_json.rb | require 'faraday'
require 'multi_json'
module Underskog
module Response
class ParseJson < Faraday::Response::Middleware
def parse(body)
case body
when ''
nil
when 'true'
true
when 'false'
false
else
MultiJson.load(body)
end
end
def on_complete(env)
if respond_to? :parse
env[:body] = parse(env[:body]) unless env[:request][:raw] or [204,304].index env[:status]
end
end
end
end
end
| require 'faraday'
require 'multi_json'
module Underskog
module Response
class ParseJson < Faraday::Response::Middleware
def parse(body)
case body
when ''
nil
when 'true'
true
when 'false'
false
else
json = MultiJson.load(body)
return json['data'] if json.is_a?(Hash) and json['data']
json
end
end
def on_complete(env)
if respond_to? :parse
env[:body] = parse(env[:body]) unless env[:request][:raw] or [204,304].index env[:status]
end
end
end
end
end
| Use data field in json response as data | Use data field in json response as data
| Ruby | mit | bengler/underskog_api_client | ruby | ## Code Before:
require 'faraday'
require 'multi_json'
module Underskog
module Response
class ParseJson < Faraday::Response::Middleware
def parse(body)
case body
when ''
nil
when 'true'
true
when 'false'
false
else
MultiJson.load(body)
end
end
def on_complete(env)
if respond_to? :parse
env[:body] = parse(env[:body]) unless env[:request][:raw] or [204,304].index env[:status]
end
end
end
end
end
## Instruction:
Use data field in json response as data
## Code After:
require 'faraday'
require 'multi_json'
module Underskog
module Response
class ParseJson < Faraday::Response::Middleware
def parse(body)
case body
when ''
nil
when 'true'
true
when 'false'
false
else
json = MultiJson.load(body)
return json['data'] if json.is_a?(Hash) and json['data']
json
end
end
def on_complete(env)
if respond_to? :parse
env[:body] = parse(env[:body]) unless env[:request][:raw] or [204,304].index env[:status]
end
end
end
end
end
| require 'faraday'
require 'multi_json'
module Underskog
module Response
class ParseJson < Faraday::Response::Middleware
def parse(body)
case body
when ''
nil
when 'true'
true
when 'false'
false
else
- MultiJson.load(body)
+ json = MultiJson.load(body)
? +++++++
+ return json['data'] if json.is_a?(Hash) and json['data']
+ json
end
end
def on_complete(env)
if respond_to? :parse
env[:body] = parse(env[:body]) unless env[:request][:raw] or [204,304].index env[:status]
end
end
end
end
end | 4 | 0.142857 | 3 | 1 |
79dd9b7bc8e7520ecec88b1817c38db6d5cb9326 | book.json | book.json | {
"gitbook": "3.1.1",
"title": "Relazione cplusplus.com",
"author": "Andrea Mantovani",
"language": "it"
}
| {
"gitbook": "3.1.1",
"title": "Relazione cplusplus.com",
"author": "Andrea Mantovani",
"language": "it",
"plugins": ["image-captions"],
"pluginsConfig": {
"image-captions": {
"caption": "_CAPTION_"
}
}
}
| Add plugin to insert caption below the images | Add plugin to insert caption below the images
| JSON | mit | korut94/RelazioneTecWeb2,korut94/RelazioneTecWeb2 | json | ## Code Before:
{
"gitbook": "3.1.1",
"title": "Relazione cplusplus.com",
"author": "Andrea Mantovani",
"language": "it"
}
## Instruction:
Add plugin to insert caption below the images
## Code After:
{
"gitbook": "3.1.1",
"title": "Relazione cplusplus.com",
"author": "Andrea Mantovani",
"language": "it",
"plugins": ["image-captions"],
"pluginsConfig": {
"image-captions": {
"caption": "_CAPTION_"
}
}
}
| {
"gitbook": "3.1.1",
"title": "Relazione cplusplus.com",
"author": "Andrea Mantovani",
- "language": "it"
+ "language": "it",
? +
+ "plugins": ["image-captions"],
+ "pluginsConfig": {
+ "image-captions": {
+ "caption": "_CAPTION_"
+ }
+ }
} | 8 | 1.333333 | 7 | 1 |
39b74fc0dde618652aafaf2abe9c7599b8a76583 | Resources/config/redis.yml | Resources/config/redis.yml | framework:
annotations:
cache: oro.cache.annotations
session:
handler_id: snc_redis.session.handler
doctrine:
orm:
entity_managers:
default:
metadata_cache_driver:
type: service
id: oro.cache.doctrine.metadata
query_cache_driver:
type: service
id: oro.cache.doctrine.query
config:
metadata_cache_driver:
type: service
id: oro.cache.doctrine.metadata.config
query_cache_driver:
type: service
id: oro.cache.doctrine.query.config
search:
metadata_cache_driver:
type: service
id: oro.cache.doctrine.metadata.search
query_cache_driver:
type: service
id: oro.cache.doctrine.query.search
snc_redis:
clients: # configure phpredis clients
default:
type: predis
alias: default
dsn: '%redis_dsn_cache%'
doctrine:
type: predis
alias: doctrine
dsn: '%redis_dsn_doctrine%'
session:
type: predis
alias: session
dsn: '%redis_dsn_session%'
session: # configure sessions
client: session
prefix: session
use_as_default: true
jms_serializer:
metadata:
cache: Metadata\Cache\DoctrineCacheAdapter
| framework:
annotations:
cache: oro.cache.annotations
doctrine:
orm:
entity_managers:
default:
metadata_cache_driver:
type: service
id: oro.cache.doctrine.metadata
query_cache_driver:
type: service
id: oro.cache.doctrine.query
config:
metadata_cache_driver:
type: service
id: oro.cache.doctrine.metadata.config
query_cache_driver:
type: service
id: oro.cache.doctrine.query.config
search:
metadata_cache_driver:
type: service
id: oro.cache.doctrine.metadata.search
query_cache_driver:
type: service
id: oro.cache.doctrine.query.search
snc_redis:
clients: # configure phpredis clients
default:
type: predis
alias: default
dsn: '%redis_dsn_cache%'
doctrine:
type: predis
alias: doctrine
dsn: '%redis_dsn_doctrine%'
session:
type: predis
alias: session
dsn: '%redis_dsn_session%'
session: # configure sessions
client: session
prefix: session
use_as_default: true
jms_serializer:
metadata:
cache: Metadata\Cache\DoctrineCacheAdapter
| Remove hard coded session handler | Remove hard coded session handler
In case if user wants to use Redis as cache storage but don't for session storage we should be able setup it through parameters.yml. But due to hard coded config we can't. | YAML | mit | orocrm/redis-config | yaml | ## Code Before:
framework:
annotations:
cache: oro.cache.annotations
session:
handler_id: snc_redis.session.handler
doctrine:
orm:
entity_managers:
default:
metadata_cache_driver:
type: service
id: oro.cache.doctrine.metadata
query_cache_driver:
type: service
id: oro.cache.doctrine.query
config:
metadata_cache_driver:
type: service
id: oro.cache.doctrine.metadata.config
query_cache_driver:
type: service
id: oro.cache.doctrine.query.config
search:
metadata_cache_driver:
type: service
id: oro.cache.doctrine.metadata.search
query_cache_driver:
type: service
id: oro.cache.doctrine.query.search
snc_redis:
clients: # configure phpredis clients
default:
type: predis
alias: default
dsn: '%redis_dsn_cache%'
doctrine:
type: predis
alias: doctrine
dsn: '%redis_dsn_doctrine%'
session:
type: predis
alias: session
dsn: '%redis_dsn_session%'
session: # configure sessions
client: session
prefix: session
use_as_default: true
jms_serializer:
metadata:
cache: Metadata\Cache\DoctrineCacheAdapter
## Instruction:
Remove hard coded session handler
In case if user wants to use Redis as cache storage but don't for session storage we should be able setup it through parameters.yml. But due to hard coded config we can't.
## Code After:
framework:
annotations:
cache: oro.cache.annotations
doctrine:
orm:
entity_managers:
default:
metadata_cache_driver:
type: service
id: oro.cache.doctrine.metadata
query_cache_driver:
type: service
id: oro.cache.doctrine.query
config:
metadata_cache_driver:
type: service
id: oro.cache.doctrine.metadata.config
query_cache_driver:
type: service
id: oro.cache.doctrine.query.config
search:
metadata_cache_driver:
type: service
id: oro.cache.doctrine.metadata.search
query_cache_driver:
type: service
id: oro.cache.doctrine.query.search
snc_redis:
clients: # configure phpredis clients
default:
type: predis
alias: default
dsn: '%redis_dsn_cache%'
doctrine:
type: predis
alias: doctrine
dsn: '%redis_dsn_doctrine%'
session:
type: predis
alias: session
dsn: '%redis_dsn_session%'
session: # configure sessions
client: session
prefix: session
use_as_default: true
jms_serializer:
metadata:
cache: Metadata\Cache\DoctrineCacheAdapter
| framework:
annotations:
cache: oro.cache.annotations
- session:
- handler_id: snc_redis.session.handler
doctrine:
orm:
entity_managers:
default:
metadata_cache_driver:
type: service
id: oro.cache.doctrine.metadata
query_cache_driver:
type: service
id: oro.cache.doctrine.query
config:
metadata_cache_driver:
type: service
id: oro.cache.doctrine.metadata.config
query_cache_driver:
type: service
id: oro.cache.doctrine.query.config
search:
metadata_cache_driver:
type: service
id: oro.cache.doctrine.metadata.search
query_cache_driver:
type: service
id: oro.cache.doctrine.query.search
snc_redis:
clients: # configure phpredis clients
default:
type: predis
alias: default
dsn: '%redis_dsn_cache%'
doctrine:
type: predis
alias: doctrine
dsn: '%redis_dsn_doctrine%'
session:
type: predis
alias: session
dsn: '%redis_dsn_session%'
session: # configure sessions
client: session
prefix: session
use_as_default: true
jms_serializer:
metadata:
cache: Metadata\Cache\DoctrineCacheAdapter | 2 | 0.039216 | 0 | 2 |
b11ced7758e7e469a10c6fe331b1273782d4eb88 | TODO.md | TODO.md | - [x] Start with a fully connected, but minimal, topology.
- [x] Start with x-axis seed nodes.
- [ ] Choose a higher mutation rate for individuals which have a worse fitness than
the avergage.
- [ ] Adapt the mutation rate according to the average behavioral distance
- [ ] Weight mutation: Change until the behavioral distance to the original individual changes by
some percentage.
- [ ] Implement a Conrod GUI for experimenting with setting configuration options
during the simulation run.
- [ ] Embed a RNG into every Genome.
- [ ] Record statistics, like number of mutations.
- [ ] Experiment with several different graphs.
- [ ] Make probability of structural mutation dependent on the complexity
(number of nodes, number of links) of the genome.
- [ ] Substrate: Different placement
- [ ] Make weight mutation probability dependent on the current generation
- [ ] Make structural mutation dependent on the average node degree.
For example, if there is a low connectivity of nodes, adding a new node is
not a good thing.
- [ ] Add symmetric links, which, when updated, also update their counterpart.
- [ ] Add a fourth objective: Mutation work, which describes how much mutation has happened
since the beginning for that individual.
- [ ] When adding a link, use a fixed weight for the second link
| - [x] Start with a fully connected, but minimal, topology.
- [x] Start with x-axis seed nodes.
- [ ] Choose a higher mutation rate for individuals which have a worse fitness than
the avergage.
- [ ] Adapt the mutation rate according to the average behavioral distance
- [ ] Weight mutation: Change until the behavioral distance to the original individual changes by
some percentage.
- [ ] Implement a Conrod GUI for experimenting with setting configuration options
during the simulation run.
- [ ] Embed a RNG into every Genome.
- [ ] Record statistics, like number of mutations.
- [ ] Experiment with several different graphs.
- [ ] Make probability of structural mutation dependent on the complexity
(number of nodes, number of links) of the genome.
- [ ] Substrate: Different placement
- [ ] Make weight mutation probability dependent on the current generation
- [ ] Make structural mutation dependent on the average node degree.
For example, if there is a low connectivity of nodes, adding a new node is
not a good thing.
- [ ] Add symmetric links, which, when updated, also update their counterpart.
- [ ] Add a fourth objective: Mutation work, which describes how much mutation has happened
since the beginning for that individual.
- [ ] When adding a link, use a fixed weight for the second link
- [ ] The CPPNs we use, sum all inputs. This way, we cannot
represent e.g. ```sin(x) * sin(y)```. Add aggregation functions/nodes,
which can specify arbitrary functions on the inputs.
| Add todo: CPPNs with aggregation functions | Add todo: CPPNs with aggregation functions
| Markdown | mit | mneumann/hypernsga,mneumann/hypernsga | markdown | ## Code Before:
- [x] Start with a fully connected, but minimal, topology.
- [x] Start with x-axis seed nodes.
- [ ] Choose a higher mutation rate for individuals which have a worse fitness than
the avergage.
- [ ] Adapt the mutation rate according to the average behavioral distance
- [ ] Weight mutation: Change until the behavioral distance to the original individual changes by
some percentage.
- [ ] Implement a Conrod GUI for experimenting with setting configuration options
during the simulation run.
- [ ] Embed a RNG into every Genome.
- [ ] Record statistics, like number of mutations.
- [ ] Experiment with several different graphs.
- [ ] Make probability of structural mutation dependent on the complexity
(number of nodes, number of links) of the genome.
- [ ] Substrate: Different placement
- [ ] Make weight mutation probability dependent on the current generation
- [ ] Make structural mutation dependent on the average node degree.
For example, if there is a low connectivity of nodes, adding a new node is
not a good thing.
- [ ] Add symmetric links, which, when updated, also update their counterpart.
- [ ] Add a fourth objective: Mutation work, which describes how much mutation has happened
since the beginning for that individual.
- [ ] When adding a link, use a fixed weight for the second link
## Instruction:
Add todo: CPPNs with aggregation functions
## Code After:
- [x] Start with a fully connected, but minimal, topology.
- [x] Start with x-axis seed nodes.
- [ ] Choose a higher mutation rate for individuals which have a worse fitness than
the avergage.
- [ ] Adapt the mutation rate according to the average behavioral distance
- [ ] Weight mutation: Change until the behavioral distance to the original individual changes by
some percentage.
- [ ] Implement a Conrod GUI for experimenting with setting configuration options
during the simulation run.
- [ ] Embed a RNG into every Genome.
- [ ] Record statistics, like number of mutations.
- [ ] Experiment with several different graphs.
- [ ] Make probability of structural mutation dependent on the complexity
(number of nodes, number of links) of the genome.
- [ ] Substrate: Different placement
- [ ] Make weight mutation probability dependent on the current generation
- [ ] Make structural mutation dependent on the average node degree.
For example, if there is a low connectivity of nodes, adding a new node is
not a good thing.
- [ ] Add symmetric links, which, when updated, also update their counterpart.
- [ ] Add a fourth objective: Mutation work, which describes how much mutation has happened
since the beginning for that individual.
- [ ] When adding a link, use a fixed weight for the second link
- [ ] The CPPNs we use, sum all inputs. This way, we cannot
represent e.g. ```sin(x) * sin(y)```. Add aggregation functions/nodes,
which can specify arbitrary functions on the inputs.
| - [x] Start with a fully connected, but minimal, topology.
- [x] Start with x-axis seed nodes.
- [ ] Choose a higher mutation rate for individuals which have a worse fitness than
the avergage.
- [ ] Adapt the mutation rate according to the average behavioral distance
- [ ] Weight mutation: Change until the behavioral distance to the original individual changes by
some percentage.
- [ ] Implement a Conrod GUI for experimenting with setting configuration options
during the simulation run.
- [ ] Embed a RNG into every Genome.
- [ ] Record statistics, like number of mutations.
- [ ] Experiment with several different graphs.
- [ ] Make probability of structural mutation dependent on the complexity
(number of nodes, number of links) of the genome.
- [ ] Substrate: Different placement
- [ ] Make weight mutation probability dependent on the current generation
- [ ] Make structural mutation dependent on the average node degree.
For example, if there is a low connectivity of nodes, adding a new node is
not a good thing.
- [ ] Add symmetric links, which, when updated, also update their counterpart.
- [ ] Add a fourth objective: Mutation work, which describes how much mutation has happened
since the beginning for that individual.
- [ ] When adding a link, use a fixed weight for the second link
+ - [ ] The CPPNs we use, sum all inputs. This way, we cannot
+ represent e.g. ```sin(x) * sin(y)```. Add aggregation functions/nodes,
+ which can specify arbitrary functions on the inputs. | 3 | 0.130435 | 3 | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.