commit stringlengths 40 40 | old_file stringlengths 4 184 | new_file stringlengths 4 184 | old_contents stringlengths 1 3.6k | new_contents stringlengths 5 3.38k | subject stringlengths 15 778 | message stringlengths 16 6.74k | lang stringclasses 201 values | license stringclasses 13 values | repos stringlengths 6 116k | config stringclasses 201 values | content stringlengths 137 7.24k | diff stringlengths 26 5.55k | diff_length int64 1 123 | relative_diff_length float64 0.01 89 | n_lines_added int64 0 108 | n_lines_deleted int64 0 106 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
30f290dac339d621972043be70ae520de128a0d5 | test/FrontendC/2005-07-20-SqrtNoErrno.c | test/FrontendC/2005-07-20-SqrtNoErrno.c | // RUN: %llvmgcc %s -S -o - -fno-math-errno | grep llvm.sqrt
#include <math.h>
float foo(float X) {
// Check that this compiles to llvm.sqrt when errno is ignored.
return sqrtf(X);
}
| // RUN: %llvmgcc %s -S -o - -fno-math-errno | grep llvm.sqrt
// llvm.sqrt has undefined behavior on negative inputs, so it is
// inappropriate to translate C/C++ sqrt to this.
// XFAIL: *
#include <math.h>
float foo(float X) {
// Check that this compiles to llvm.sqrt when errno is ignored.
return sqrtf(X);
}
| Disable test; what it's testing for is wrong. | Disable test; what it's testing for is wrong.
git-svn-id: 0ff597fd157e6f4fc38580e8d64ab130330d2411@82658 91177308-0d34-0410-b5e6-96231b3b80d8
| C | apache-2.0 | GPUOpen-Drivers/llvm,apple/swift-llvm,GPUOpen-Drivers/llvm,dslab-epfl/asap,apple/swift-llvm,dslab-epfl/asap,chubbymaggie/asap,GPUOpen-Drivers/llvm,apple/swift-llvm,chubbymaggie/asap,apple/swift-llvm,dslab-epfl/asap,GPUOpen-Drivers/llvm,llvm-mirror/llvm,apple/swift-llvm,dslab-epfl/asap,GPUOpen-Drivers/llvm,llvm-mirror/llvm,llvm-mirror/llvm,dslab-epfl/asap,GPUOpen-Drivers/llvm,chubbymaggie/asap,GPUOpen-Drivers/llvm,chubbymaggie/asap,llvm-mirror/llvm,chubbymaggie/asap,dslab-epfl/asap,llvm-mirror/llvm,llvm-mirror/llvm,GPUOpen-Drivers/llvm,llvm-mirror/llvm,apple/swift-llvm,dslab-epfl/asap,chubbymaggie/asap,llvm-mirror/llvm,apple/swift-llvm,apple/swift-llvm,llvm-mirror/llvm | c | ## Code Before:
// RUN: %llvmgcc %s -S -o - -fno-math-errno | grep llvm.sqrt
#include <math.h>
float foo(float X) {
// Check that this compiles to llvm.sqrt when errno is ignored.
return sqrtf(X);
}
## Instruction:
Disable test; what it's testing for is wrong.
git-svn-id: 0ff597fd157e6f4fc38580e8d64ab130330d2411@82658 91177308-0d34-0410-b5e6-96231b3b80d8
## Code After:
// RUN: %llvmgcc %s -S -o - -fno-math-errno | grep llvm.sqrt
// llvm.sqrt has undefined behavior on negative inputs, so it is
// inappropriate to translate C/C++ sqrt to this.
// XFAIL: *
#include <math.h>
float foo(float X) {
// Check that this compiles to llvm.sqrt when errno is ignored.
return sqrtf(X);
}
| // RUN: %llvmgcc %s -S -o - -fno-math-errno | grep llvm.sqrt
+ // llvm.sqrt has undefined behavior on negative inputs, so it is
+ // inappropriate to translate C/C++ sqrt to this.
+ // XFAIL: *
#include <math.h>
float foo(float X) {
// Check that this compiles to llvm.sqrt when errno is ignored.
return sqrtf(X);
} | 3 | 0.428571 | 3 | 0 |
e6927549fa00d4cc7c4d8d85ebeafbe56a89113e | requirements.txt | requirements.txt | PyYAML==3.11
Twisted==14.0.2
argparse==1.2.2
enum34==1.0.4
pytimeparse>=1.1.0
cobe==2.1.1 | PyYAML==3.11
Twisted==15.0.0
enum34==1.0.4
pytimeparse>=1.1.0
cobe==2.1.1
| Update Twisted and remove argparse | Update Twisted and remove argparse
| Text | mit | HubbeKing/Hubbot_Twisted | text | ## Code Before:
PyYAML==3.11
Twisted==14.0.2
argparse==1.2.2
enum34==1.0.4
pytimeparse>=1.1.0
cobe==2.1.1
## Instruction:
Update Twisted and remove argparse
## Code After:
PyYAML==3.11
Twisted==15.0.0
enum34==1.0.4
pytimeparse>=1.1.0
cobe==2.1.1
| PyYAML==3.11
- Twisted==14.0.2
? ^ ^
+ Twisted==15.0.0
? ^ ^
- argparse==1.2.2
enum34==1.0.4
pytimeparse>=1.1.0
cobe==2.1.1 | 3 | 0.5 | 1 | 2 |
952c916e33f89f356691504690f948ae26a67935 | src/main/scala/gitbucket/core/servlet/PluginControllerFilter.scala | src/main/scala/gitbucket/core/servlet/PluginControllerFilter.scala | package gitbucket.core.servlet
import javax.servlet._
import javax.servlet.http.HttpServletRequest
import gitbucket.core.controller.ControllerBase
import gitbucket.core.plugin.PluginRegistry
class PluginControllerFilter extends Filter {
private var filterConfig: FilterConfig = null
override def init(filterConfig: FilterConfig): Unit = {
this.filterConfig = filterConfig
}
override def destroy(): Unit = {
PluginRegistry().getControllers().foreach { case (controller, _) =>
controller.destroy()
}
}
override def doFilter(request: ServletRequest, response: ServletResponse, chain: FilterChain): Unit = {
val requestUri = request.asInstanceOf[HttpServletRequest].getRequestURI
PluginRegistry().getControllers()
.filter { case (_, path) =>
val start = path.replaceFirst("/\\*$", "/")
(requestUri + "/").startsWith(start)
}
.foreach { case (controller, _) =>
controller match {
case x: ControllerBase if(x.config == null) => x.init(filterConfig)
case _ => ()
}
val mockChain = new MockFilterChain()
controller.doFilter(request, response, mockChain)
if(mockChain.continue == false){
return ()
}
}
chain.doFilter(request, response)
}
}
| package gitbucket.core.servlet
import javax.servlet._
import javax.servlet.http.HttpServletRequest
import gitbucket.core.controller.ControllerBase
import gitbucket.core.plugin.PluginRegistry
class PluginControllerFilter extends Filter {
private var filterConfig: FilterConfig = null
override def init(filterConfig: FilterConfig): Unit = {
this.filterConfig = filterConfig
}
override def destroy(): Unit = {
PluginRegistry().getControllers().foreach { case (controller, _) =>
controller.destroy()
}
}
override def doFilter(request: ServletRequest, response: ServletResponse, chain: FilterChain): Unit = {
val contextPath = request.getServletContext.getContextPath
val requestUri = request.asInstanceOf[HttpServletRequest].getRequestURI.substring(contextPath.length)
PluginRegistry().getControllers()
.filter { case (_, path) =>
val start = path.replaceFirst("/\\*$", "/")
(requestUri + "/").startsWith(start)
}
.foreach { case (controller, _) =>
controller match {
case x: ControllerBase if(x.config == null) => x.init(filterConfig)
case _ => ()
}
val mockChain = new MockFilterChain()
controller.doFilter(request, response, mockChain)
if(mockChain.continue == false){
return ()
}
}
chain.doFilter(request, response)
}
}
| Remove context path from request uri in plugin routing | Remove context path from request uri in plugin routing
| Scala | apache-2.0 | imeszaros/gitbucket,takezoe/gitbucket,xuwei-k/gitbucket,mann-ed/gitbucket,superhj1987/gitbucket,gitbucket/gitbucket,superhj1987/gitbucket,imeszaros/gitbucket,McFoggy/gitbucket,takezoe/gitbucket,kounoike/gitbucket,kounoike/gitbucket,kounoike/gitbucket,McFoggy/gitbucket,imeszaros/gitbucket,imeszaros/gitbucket,takezoe/gitbucket,mann-ed/gitbucket,superhj1987/gitbucket,x-way/gitbucket,gitbucket/gitbucket,superhj1987/gitbucket,x-way/gitbucket,gitbucket/gitbucket,McFoggy/gitbucket,xuwei-k/gitbucket,McFoggy/gitbucket,xuwei-k/gitbucket,mann-ed/gitbucket,takezoe/gitbucket,x-way/gitbucket,xuwei-k/gitbucket,x-way/gitbucket,gitbucket/gitbucket,kounoike/gitbucket,mann-ed/gitbucket | scala | ## Code Before:
package gitbucket.core.servlet
import javax.servlet._
import javax.servlet.http.HttpServletRequest
import gitbucket.core.controller.ControllerBase
import gitbucket.core.plugin.PluginRegistry
class PluginControllerFilter extends Filter {
private var filterConfig: FilterConfig = null
override def init(filterConfig: FilterConfig): Unit = {
this.filterConfig = filterConfig
}
override def destroy(): Unit = {
PluginRegistry().getControllers().foreach { case (controller, _) =>
controller.destroy()
}
}
override def doFilter(request: ServletRequest, response: ServletResponse, chain: FilterChain): Unit = {
val requestUri = request.asInstanceOf[HttpServletRequest].getRequestURI
PluginRegistry().getControllers()
.filter { case (_, path) =>
val start = path.replaceFirst("/\\*$", "/")
(requestUri + "/").startsWith(start)
}
.foreach { case (controller, _) =>
controller match {
case x: ControllerBase if(x.config == null) => x.init(filterConfig)
case _ => ()
}
val mockChain = new MockFilterChain()
controller.doFilter(request, response, mockChain)
if(mockChain.continue == false){
return ()
}
}
chain.doFilter(request, response)
}
}
## Instruction:
Remove context path from request uri in plugin routing
## Code After:
package gitbucket.core.servlet
import javax.servlet._
import javax.servlet.http.HttpServletRequest
import gitbucket.core.controller.ControllerBase
import gitbucket.core.plugin.PluginRegistry
class PluginControllerFilter extends Filter {
private var filterConfig: FilterConfig = null
override def init(filterConfig: FilterConfig): Unit = {
this.filterConfig = filterConfig
}
override def destroy(): Unit = {
PluginRegistry().getControllers().foreach { case (controller, _) =>
controller.destroy()
}
}
override def doFilter(request: ServletRequest, response: ServletResponse, chain: FilterChain): Unit = {
val contextPath = request.getServletContext.getContextPath
val requestUri = request.asInstanceOf[HttpServletRequest].getRequestURI.substring(contextPath.length)
PluginRegistry().getControllers()
.filter { case (_, path) =>
val start = path.replaceFirst("/\\*$", "/")
(requestUri + "/").startsWith(start)
}
.foreach { case (controller, _) =>
controller match {
case x: ControllerBase if(x.config == null) => x.init(filterConfig)
case _ => ()
}
val mockChain = new MockFilterChain()
controller.doFilter(request, response, mockChain)
if(mockChain.continue == false){
return ()
}
}
chain.doFilter(request, response)
}
}
| package gitbucket.core.servlet
import javax.servlet._
import javax.servlet.http.HttpServletRequest
import gitbucket.core.controller.ControllerBase
import gitbucket.core.plugin.PluginRegistry
class PluginControllerFilter extends Filter {
private var filterConfig: FilterConfig = null
override def init(filterConfig: FilterConfig): Unit = {
this.filterConfig = filterConfig
}
override def destroy(): Unit = {
PluginRegistry().getControllers().foreach { case (controller, _) =>
controller.destroy()
}
}
override def doFilter(request: ServletRequest, response: ServletResponse, chain: FilterChain): Unit = {
+ val contextPath = request.getServletContext.getContextPath
- val requestUri = request.asInstanceOf[HttpServletRequest].getRequestURI
+ val requestUri = request.asInstanceOf[HttpServletRequest].getRequestURI.substring(contextPath.length)
? ++++++++++++++++++++++++++++++
PluginRegistry().getControllers()
.filter { case (_, path) =>
val start = path.replaceFirst("/\\*$", "/")
(requestUri + "/").startsWith(start)
}
.foreach { case (controller, _) =>
controller match {
case x: ControllerBase if(x.config == null) => x.init(filterConfig)
case _ => ()
}
val mockChain = new MockFilterChain()
controller.doFilter(request, response, mockChain)
if(mockChain.continue == false){
return ()
}
}
chain.doFilter(request, response)
}
} | 3 | 0.06383 | 2 | 1 |
3e1bdc0bcb4c8c648129c14a833c26c67b91ef86 | Amazon_Cloud_Player_tweak.user.css | Amazon_Cloud_Player_tweak.user.css | /* Amazon Cloud Player tweak */
@namespace url(http://www.w3.org/1999/xhtml);
@-moz-document url-prefix("https://www.amazon.co.jp/gp/dmusic/cloudplayer/") {
.recFooter, .bucket .title,
.currentSongActions, .optionCell > a, .actionButtons > a[href*="#remove"],
#accountOptions .setting {
display: none !important;
}
.countText { display: none !important; }
.countNumber::after { content: " song(s)" }
#nowPlayingSection .timer { font-size: 2.2rem !important; top: -2.8rem !important; }
#currentDuration { font-size: 2.8rem !important; font-weight: bold !important; }
}
| /* Amazon Cloud Player tweak */
@namespace url(http://www.w3.org/1999/xhtml);
@-moz-document url-prefix("https://www.amazon.co.jp/gp/dmusic/cloudplayer/") {
.recFooter, /* Not interested */
.recDescription > span[action="buy"], /* Buy */
#optionPanel li:nth-last-of-type(-n+2), /* Share */
.headerActions > .grey, /* Delete */
.ractiveContextMenu > li:nth-child(3) /* Remove from library */
{
display: none !important;
}
.listViewDuration {
font-weight: 300 !important;
font-size: 1.8rem !important;
}
}
| Support major design update of Amazon Cloud Player | Support major design update of Amazon Cloud Player
| CSS | unlicense | curipha/userstyles | css | ## Code Before:
/* Amazon Cloud Player tweak */
@namespace url(http://www.w3.org/1999/xhtml);
@-moz-document url-prefix("https://www.amazon.co.jp/gp/dmusic/cloudplayer/") {
.recFooter, .bucket .title,
.currentSongActions, .optionCell > a, .actionButtons > a[href*="#remove"],
#accountOptions .setting {
display: none !important;
}
.countText { display: none !important; }
.countNumber::after { content: " song(s)" }
#nowPlayingSection .timer { font-size: 2.2rem !important; top: -2.8rem !important; }
#currentDuration { font-size: 2.8rem !important; font-weight: bold !important; }
}
## Instruction:
Support major design update of Amazon Cloud Player
## Code After:
/* Amazon Cloud Player tweak */
@namespace url(http://www.w3.org/1999/xhtml);
@-moz-document url-prefix("https://www.amazon.co.jp/gp/dmusic/cloudplayer/") {
.recFooter, /* Not interested */
.recDescription > span[action="buy"], /* Buy */
#optionPanel li:nth-last-of-type(-n+2), /* Share */
.headerActions > .grey, /* Delete */
.ractiveContextMenu > li:nth-child(3) /* Remove from library */
{
display: none !important;
}
.listViewDuration {
font-weight: 300 !important;
font-size: 1.8rem !important;
}
}
| /* Amazon Cloud Player tweak */
@namespace url(http://www.w3.org/1999/xhtml);
@-moz-document url-prefix("https://www.amazon.co.jp/gp/dmusic/cloudplayer/") {
- .recFooter, .bucket .title,
- .currentSongActions, .optionCell > a, .actionButtons > a[href*="#remove"],
- #accountOptions .setting {
+ .recFooter, /* Not interested */
+ .recDescription > span[action="buy"], /* Buy */
+ #optionPanel li:nth-last-of-type(-n+2), /* Share */
+ .headerActions > .grey, /* Delete */
+ .ractiveContextMenu > li:nth-child(3) /* Remove from library */
+ {
display: none !important;
}
+ .listViewDuration {
+ font-weight: 300 !important;
+ font-size: 1.8rem !important;
+ }
- .countText { display: none !important; }
- .countNumber::after { content: " song(s)" }
-
- #nowPlayingSection .timer { font-size: 2.2rem !important; top: -2.8rem !important; }
- #currentDuration { font-size: 2.8rem !important; font-weight: bold !important; }
} | 18 | 1.2 | 10 | 8 |
c4e86f155922e3c40b1a71f5823310298426bc66 | lib/timerage/time_interval.rb | lib/timerage/time_interval.rb | require "delegate"
module Timerage
# A range of time. The exposes the Range like interface.
class TimeInterval < DelegateClass(Range)
def initialize(*args)
rng = if rangeish?(args.first)
args.first
else
Range.new(*args)
end
super rng
end
alias_method :to_time, :begin
def step(n, &blk)
if block_given?
time_enumerator(n).each(&blk)
else
time_enumerator(n)
end
end
protected
def rangeish?(an_obj)
an_obj.respond_to?(:begin) &&
an_obj.respond_to?(:end)
end
def time_enumerator(step)
not_done = if exclude_end?
->(nxt) { nxt < self.end }
else
->(nxt) { nxt <= self.end }
end
Enumerator.new do |y|
nxt = self.begin
while not_done.call(nxt) do
y << nxt
nxt += step
end
end
end
end
end
| require "delegate"
module Timerage
# A range of time. The exposes the Range like interface.
class TimeInterval < DelegateClass(Range)
def initialize(*args)
rng = if rangeish?(args.first)
args.first
else
Range.new(*args)
end
super rng
end
alias_method :to_time, :begin
def step(n, &blk)
if block_given?
time_enumerator(n).each(&blk)
else
time_enumerator(n)
end
end
protected
def rangeish?(an_obj)
an_obj.respond_to?(:begin) &&
an_obj.respond_to?(:end)
end
# ---
#
# This is implemented in a slightly more procedural style than i
# prefer because we want to work well with ActiveSupport::Duration
# steps. Adding a Duration to a time uses the timezone (dst, etc),
# leap second and leap day aware `#advance` method in
# ActiveSupport. However, multiplying a Duration by a number
# returns a number, rather than a duration. This, in turn, means
# that adding a duration times a number to a time results in
# Timely incorrect results. So we do it the hard way.
def time_enumerator(step)
count = (self.end - self.begin).div(step)
count += 1 if !exclude_end? and (self.end - self.begin) % step == 0
# We've included our end if it should be
Enumerator.new do |y|
y << last = self.begin
(count-1).times do
y << last = last + step
end
end
end
end
end
| Reduce the number of comparisons during iteration | Reduce the number of comparisons during iteration
| Ruby | mit | pezra/timerage,cschneid/timerage | ruby | ## Code Before:
require "delegate"
module Timerage
# A range of time. The exposes the Range like interface.
class TimeInterval < DelegateClass(Range)
def initialize(*args)
rng = if rangeish?(args.first)
args.first
else
Range.new(*args)
end
super rng
end
alias_method :to_time, :begin
def step(n, &blk)
if block_given?
time_enumerator(n).each(&blk)
else
time_enumerator(n)
end
end
protected
def rangeish?(an_obj)
an_obj.respond_to?(:begin) &&
an_obj.respond_to?(:end)
end
def time_enumerator(step)
not_done = if exclude_end?
->(nxt) { nxt < self.end }
else
->(nxt) { nxt <= self.end }
end
Enumerator.new do |y|
nxt = self.begin
while not_done.call(nxt) do
y << nxt
nxt += step
end
end
end
end
end
## Instruction:
Reduce the number of comparisons during iteration
## Code After:
require "delegate"
module Timerage
# A range of time. The exposes the Range like interface.
class TimeInterval < DelegateClass(Range)
def initialize(*args)
rng = if rangeish?(args.first)
args.first
else
Range.new(*args)
end
super rng
end
alias_method :to_time, :begin
def step(n, &blk)
if block_given?
time_enumerator(n).each(&blk)
else
time_enumerator(n)
end
end
protected
def rangeish?(an_obj)
an_obj.respond_to?(:begin) &&
an_obj.respond_to?(:end)
end
# ---
#
# This is implemented in a slightly more procedural style than i
# prefer because we want to work well with ActiveSupport::Duration
# steps. Adding a Duration to a time uses the timezone (dst, etc),
# leap second and leap day aware `#advance` method in
# ActiveSupport. However, multiplying a Duration by a number
# returns a number, rather than a duration. This, in turn, means
# that adding a duration times a number to a time results in
# Timely incorrect results. So we do it the hard way.
def time_enumerator(step)
count = (self.end - self.begin).div(step)
count += 1 if !exclude_end? and (self.end - self.begin) % step == 0
# We've included our end if it should be
Enumerator.new do |y|
y << last = self.begin
(count-1).times do
y << last = last + step
end
end
end
end
end
| require "delegate"
module Timerage
# A range of time. The exposes the Range like interface.
class TimeInterval < DelegateClass(Range)
def initialize(*args)
rng = if rangeish?(args.first)
args.first
else
Range.new(*args)
end
super rng
end
alias_method :to_time, :begin
def step(n, &blk)
if block_given?
time_enumerator(n).each(&blk)
else
time_enumerator(n)
end
end
protected
def rangeish?(an_obj)
an_obj.respond_to?(:begin) &&
an_obj.respond_to?(:end)
end
+ # ---
+ #
+ # This is implemented in a slightly more procedural style than i
+ # prefer because we want to work well with ActiveSupport::Duration
+ # steps. Adding a Duration to a time uses the timezone (dst, etc),
+ # leap second and leap day aware `#advance` method in
+ # ActiveSupport. However, multiplying a Duration by a number
+ # returns a number, rather than a duration. This, in turn, means
+ # that adding a duration times a number to a time results in
+ # Timely incorrect results. So we do it the hard way.
def time_enumerator(step)
+ count = (self.end - self.begin).div(step)
+ count += 1 if !exclude_end? and (self.end - self.begin) % step == 0
+ # We've included our end if it should be
- not_done = if exclude_end?
- ->(nxt) { nxt < self.end }
- else
- ->(nxt) { nxt <= self.end }
- end
Enumerator.new do |y|
- nxt = self.begin
? ^^
+ y << last = self.begin
? ^^^^^^^^
+ (count-1).times do
+ y << last = last + step
- while not_done.call(nxt) do
- y << nxt
-
- nxt += step
end
end
end
end
end | 26 | 0.509804 | 16 | 10 |
d0d079fef008aca627265736115ef8841f3e9e77 | README.md | README.md |
[](https://travis-ci.org/intellij-purescript/intellij-purescript)
[](https://opensource.org/licenses/BSD-3-Clause)
[](https://gitter.im/intellj-purescript/Lobby?utm_source=share-link&utm_medium=link&utm_campaign=share-link)
## Status
This plugin is still in the **early stages of development** so only has a handful of features. But over time things will stabilize and more features will be added.
## Usage
Visit [intellij-purescript.github.io] to find documentation about installation and features.

## FAQ
coming soon
## Compatible Jetbrains Products
| IntelliJ |
|---------------------------|
| 2017.1.2 |
| 2017.1.1 |
| 2017.1 |
| Latest EAP |
## Contributing
coming soon
### Dev
1. clone the repo
2. ./gradlew idea
3. ./generate_parser.sh
4. ./gradlew build
5. open in intellij |
[](https://travis-ci.org/intellij-purescript/intellij-purescript)
[](https://opensource.org/licenses/BSD-3-Clause)
[](https://gitter.im/intellj-purescript/Lobby?utm_source=share-link&utm_medium=link&utm_campaign=share-link)
## Status
This plugin is still in the **early stages of development** so only has a handful of features. But over time things will stabilize and more features will be added.
## Usage
Visit [intellij-purescript.github.io] to find documentation about installation and features.

## FAQ
coming soon
## Compatible Jetbrains Products
| IntelliJ |
|---------------------------|
| 2017.1.2 |
| 2017.1.1 |
| 2017.1 |
| Latest EAP |
## Contributing
coming soon
### Dev
1. clone the repo
2. ./gradlew idea
3. ./generate_parser.sh
4. ./gradlew build
5. open in intellij | Use badge form github actions instead of travis | Use badge form github actions instead of travis
| Markdown | bsd-3-clause | intellij-purescript/intellij-purescript,intellij-purescript/intellij-purescript | markdown | ## Code Before:
[](https://travis-ci.org/intellij-purescript/intellij-purescript)
[](https://opensource.org/licenses/BSD-3-Clause)
[](https://gitter.im/intellj-purescript/Lobby?utm_source=share-link&utm_medium=link&utm_campaign=share-link)
## Status
This plugin is still in the **early stages of development** so only has a handful of features. But over time things will stabilize and more features will be added.
## Usage
Visit [intellij-purescript.github.io] to find documentation about installation and features.

## FAQ
coming soon
## Compatible Jetbrains Products
| IntelliJ |
|---------------------------|
| 2017.1.2 |
| 2017.1.1 |
| 2017.1 |
| Latest EAP |
## Contributing
coming soon
### Dev
1. clone the repo
2. ./gradlew idea
3. ./generate_parser.sh
4. ./gradlew build
5. open in intellij
## Instruction:
Use badge form github actions instead of travis
## Code After:
[](https://travis-ci.org/intellij-purescript/intellij-purescript)
[](https://opensource.org/licenses/BSD-3-Clause)
[](https://gitter.im/intellj-purescript/Lobby?utm_source=share-link&utm_medium=link&utm_campaign=share-link)
## Status
This plugin is still in the **early stages of development** so only has a handful of features. But over time things will stabilize and more features will be added.
## Usage
Visit [intellij-purescript.github.io] to find documentation about installation and features.

## FAQ
coming soon
## Compatible Jetbrains Products
| IntelliJ |
|---------------------------|
| 2017.1.2 |
| 2017.1.1 |
| 2017.1 |
| Latest EAP |
## Contributing
coming soon
### Dev
1. clone the repo
2. ./gradlew idea
3. ./generate_parser.sh
4. ./gradlew build
5. open in intellij |
- [](https://travis-ci.org/intellij-purescript/intellij-purescript)
? ^^^^^^ -- ^^ --------------
+ [](https://travis-ci.org/intellij-purescript/intellij-purescript)
? ++ ^^^^ ^ +++++++++++++++++++++++++++++++
[](https://opensource.org/licenses/BSD-3-Clause)
[](https://gitter.im/intellj-purescript/Lobby?utm_source=share-link&utm_medium=link&utm_campaign=share-link)
## Status
This plugin is still in the **early stages of development** so only has a handful of features. But over time things will stabilize and more features will be added.
## Usage
Visit [intellij-purescript.github.io] to find documentation about installation and features.

## FAQ
coming soon
## Compatible Jetbrains Products
| IntelliJ |
|---------------------------|
| 2017.1.2 |
| 2017.1.1 |
| 2017.1 |
| Latest EAP |
## Contributing
coming soon
### Dev
1. clone the repo
2. ./gradlew idea
3. ./generate_parser.sh
4. ./gradlew build
5. open in intellij | 2 | 0.047619 | 1 | 1 |
3092673f6df76a079c6b948e8e81272edf9b14d5 | README.md | README.md |
[](https://travis-ci.org/jemc/0mq)
[](http://badge.fury.io/rb/0mq)
A Ruby-like wrapper for ffi-rzmq-core (ZeroMQ)
## Supported
Supported ZeroMQ (libzmq) versions:
- 3.x
- 4.x
Supported Ruby versions:
- MRI >= 1.9
- Rubinius 2.x
## Feature Requests / Bug Reports
File them as issues or pull requests on [the github repository](https://github.com/jemc/0mq).
## Authors
- Joe McIlvain
- Alex McLain
|
[](https://travis-ci.org/jemc/0mq)
[](http://badge.fury.io/rb/0mq)
A Ruby-like wrapper for ffi-rzmq-core (ZeroMQ)
## Supported
Supported ZeroMQ (libzmq) versions:
- 3.x
- 4.x
Supported Ruby versions:
- MRI >= 1.9
- Rubinius 2.x
## Feature Requests / Bug Reports
File them as issues or pull requests on [the github repository](https://github.com/jemc/0mq).
## Authors
- Joe McIlvain
- Alex McLain
## Installation / Prerequisites
- Requires the libzmq library: http://zeromq.org/intro:get-the-software
- PGM (multicast) requires compiling libzmq with ./configure --with-pgm
- Curve cryptography requires compiling libzmq with libsodium:
https://github.com/jedisct1/libsodium
## ZeroMQ Documentation
- Manual: http://zeromq.org/intro:read-the-manual
- API: http://api.zeromq.org/ | Install and ZeroMQ readme doc. | Install and ZeroMQ readme doc.
| Markdown | mit | jemc/0mq | markdown | ## Code Before:
[](https://travis-ci.org/jemc/0mq)
[](http://badge.fury.io/rb/0mq)
A Ruby-like wrapper for ffi-rzmq-core (ZeroMQ)
## Supported
Supported ZeroMQ (libzmq) versions:
- 3.x
- 4.x
Supported Ruby versions:
- MRI >= 1.9
- Rubinius 2.x
## Feature Requests / Bug Reports
File them as issues or pull requests on [the github repository](https://github.com/jemc/0mq).
## Authors
- Joe McIlvain
- Alex McLain
## Instruction:
Install and ZeroMQ readme doc.
## Code After:
[](https://travis-ci.org/jemc/0mq)
[](http://badge.fury.io/rb/0mq)
A Ruby-like wrapper for ffi-rzmq-core (ZeroMQ)
## Supported
Supported ZeroMQ (libzmq) versions:
- 3.x
- 4.x
Supported Ruby versions:
- MRI >= 1.9
- Rubinius 2.x
## Feature Requests / Bug Reports
File them as issues or pull requests on [the github repository](https://github.com/jemc/0mq).
## Authors
- Joe McIlvain
- Alex McLain
## Installation / Prerequisites
- Requires the libzmq library: http://zeromq.org/intro:get-the-software
- PGM (multicast) requires compiling libzmq with ./configure --with-pgm
- Curve cryptography requires compiling libzmq with libsodium:
https://github.com/jedisct1/libsodium
## ZeroMQ Documentation
- Manual: http://zeromq.org/intro:read-the-manual
- API: http://api.zeromq.org/ |
[](https://travis-ci.org/jemc/0mq)
[](http://badge.fury.io/rb/0mq)
A Ruby-like wrapper for ffi-rzmq-core (ZeroMQ)
## Supported
Supported ZeroMQ (libzmq) versions:
- 3.x
- 4.x
Supported Ruby versions:
- MRI >= 1.9
- Rubinius 2.x
## Feature Requests / Bug Reports
File them as issues or pull requests on [the github repository](https://github.com/jemc/0mq).
## Authors
- Joe McIlvain
- Alex McLain
+
+ ## Installation / Prerequisites
+
+ - Requires the libzmq library: http://zeromq.org/intro:get-the-software
+
+ - PGM (multicast) requires compiling libzmq with ./configure --with-pgm
+
+ - Curve cryptography requires compiling libzmq with libsodium:
+ https://github.com/jedisct1/libsodium
+
+ ## ZeroMQ Documentation
+
+ - Manual: http://zeromq.org/intro:read-the-manual
+
+ - API: http://api.zeromq.org/ | 15 | 0.517241 | 15 | 0 |
589aab1e407557946169ff3fae92748a7255ccdb | open_humans/templates/account/login-form.html | open_humans/templates/account/login-form.html | {% load utilities %}
{# Note - submit button not included. #}
<form class="form-horizontal" role="form" method="POST"
action="{% url 'account_login' %}" id="login-form">
{% csrf_token %}
<input type="hidden" name="next" value="{% next_page %}">
{% if form.errors %}
<div class="alert alert-danger">
<b>Error:</b>
<br>
<p>Your username and password didn't match. Please try again.</p>
</div>
{% endif %}
<div class="form-group">
<label for="login-username"
class="col-sm-2 control-label">Username</label>
<div class="col-sm-10">
<input type="text" class="form-control" name="username"
id="login-username" placeholder="Username"
value="{{ form.username.value|default:'' }}">
</div>
</div>
<div class="form-group">
<label for="login-password"
class="col-sm-2 control-label">Password</label>
<div class="col-sm-10">
<input type="password" class="form-control" name="password"
id="login-password" placeholder="Password"
value="{{ form.password.value|default:'' }}">
</div>
</div>
</form>
| {% load utilities %}
{# Note - submit button not included. #}
<form class="form-horizontal" role="form" method="POST"
action="{% url 'account_login' %}" id="login-form">
{% csrf_token %}
<input type="hidden" name="next" value="{% next_page %}">
{% if form.errors %}
<div class="alert alert-danger">
<p>
<strong>Error:</strong> Your username and password didn't match. Please
try again.
</p>
<p>
Please also verify that you're logging in with your username and
<strong>not your email address</strong>.
</p>
</div>
{% endif %}
<div class="form-group">
<label for="login-username"
class="col-sm-2 control-label">Username</label>
<div class="col-sm-10">
<input type="text" class="form-control" name="username"
id="login-username" placeholder="Username"
value="{{ form.username.value|default:'' }}">
</div>
</div>
<div class="form-group">
<label for="login-password"
class="col-sm-2 control-label">Password</label>
<div class="col-sm-10">
<input type="password" class="form-control" name="password"
id="login-password" placeholder="Password"
value="{{ form.password.value|default:'' }}">
</div>
</div>
</form>
| Add not about logging in with username | Add not about logging in with username
| HTML | mit | OpenHumans/open-humans,OpenHumans/open-humans,PersonalGenomesOrg/open-humans,PersonalGenomesOrg/open-humans,PersonalGenomesOrg/open-humans,PersonalGenomesOrg/open-humans,OpenHumans/open-humans,OpenHumans/open-humans | html | ## Code Before:
{% load utilities %}
{# Note - submit button not included. #}
<form class="form-horizontal" role="form" method="POST"
action="{% url 'account_login' %}" id="login-form">
{% csrf_token %}
<input type="hidden" name="next" value="{% next_page %}">
{% if form.errors %}
<div class="alert alert-danger">
<b>Error:</b>
<br>
<p>Your username and password didn't match. Please try again.</p>
</div>
{% endif %}
<div class="form-group">
<label for="login-username"
class="col-sm-2 control-label">Username</label>
<div class="col-sm-10">
<input type="text" class="form-control" name="username"
id="login-username" placeholder="Username"
value="{{ form.username.value|default:'' }}">
</div>
</div>
<div class="form-group">
<label for="login-password"
class="col-sm-2 control-label">Password</label>
<div class="col-sm-10">
<input type="password" class="form-control" name="password"
id="login-password" placeholder="Password"
value="{{ form.password.value|default:'' }}">
</div>
</div>
</form>
## Instruction:
Add not about logging in with username
## Code After:
{% load utilities %}
{# Note - submit button not included. #}
<form class="form-horizontal" role="form" method="POST"
action="{% url 'account_login' %}" id="login-form">
{% csrf_token %}
<input type="hidden" name="next" value="{% next_page %}">
{% if form.errors %}
<div class="alert alert-danger">
<p>
<strong>Error:</strong> Your username and password didn't match. Please
try again.
</p>
<p>
Please also verify that you're logging in with your username and
<strong>not your email address</strong>.
</p>
</div>
{% endif %}
<div class="form-group">
<label for="login-username"
class="col-sm-2 control-label">Username</label>
<div class="col-sm-10">
<input type="text" class="form-control" name="username"
id="login-username" placeholder="Username"
value="{{ form.username.value|default:'' }}">
</div>
</div>
<div class="form-group">
<label for="login-password"
class="col-sm-2 control-label">Password</label>
<div class="col-sm-10">
<input type="password" class="form-control" name="password"
id="login-password" placeholder="Password"
value="{{ form.password.value|default:'' }}">
</div>
</div>
</form>
| {% load utilities %}
{# Note - submit button not included. #}
<form class="form-horizontal" role="form" method="POST"
action="{% url 'account_login' %}" id="login-form">
{% csrf_token %}
<input type="hidden" name="next" value="{% next_page %}">
{% if form.errors %}
<div class="alert alert-danger">
- <b>Error:</b>
+ <p>
+ <strong>Error:</strong> Your username and password didn't match. Please
+ try again.
+ </p>
- <br>
? ^^
+ <p>
? ^
-
- <p>Your username and password didn't match. Please try again.</p>
+ Please also verify that you're logging in with your username and
+ <strong>not your email address</strong>.
+ </p>
</div>
{% endif %}
<div class="form-group">
<label for="login-username"
class="col-sm-2 control-label">Username</label>
<div class="col-sm-10">
<input type="text" class="form-control" name="username"
id="login-username" placeholder="Username"
value="{{ form.username.value|default:'' }}">
</div>
</div>
<div class="form-group">
<label for="login-password"
class="col-sm-2 control-label">Password</label>
<div class="col-sm-10">
<input type="password" class="form-control" name="password"
id="login-password" placeholder="Password"
value="{{ form.password.value|default:'' }}">
</div>
</div>
</form> | 12 | 0.292683 | 8 | 4 |
55e65be4b5dea5bf4edc79366947ad5b5c0fc326 | packages/pi/pipes-bzip.yaml | packages/pi/pipes-bzip.yaml | homepage: http://github.com/chemist/pipes-bzip#readme
changelog-type: ''
hash: 05f046d013fbcd430f0b11f682c71a064d9db3d017a2d84852e594b95f7b617f
test-bench-deps:
base: -any
pipes-bzip: -any
maintainer: chemistmail@gmail.com
synopsis: Bzip2 compression and decompression for Pipes streams
changelog: ''
basic-deps:
bytestring: -any
bzlib: -any
base: ! '>=4.7 && <5'
pipes-bytestring: -any
pipes: -any
all-versions:
- '0.1.0.0'
author: Smirnov Alexey
latest: '0.1.0.0'
description-type: haddock
description: Please see README.md
license-name: BSD3
| homepage: https://github.com/chemist/pipes-bzip
changelog-type: ''
hash: c3b465f7cb3d989b8bce3f7a1a06b063f6396ec6c9abafbd30c689df25a64980
test-bench-deps:
bytestring: -any
bzlib: -any
MonadRandom: -any
base: ==4.*
pipes-bytestring: -any
hspec: ! '>=1.3'
pipes: -any
random: -any
pipes-bzip: -any
QuickCheck: -any
pipes-safe: -any
directory: -any
maintainer: Alexey Smirnov
synopsis: Streaming compression/decompression via pipes.
changelog: ''
basic-deps:
bytestring: ! '>=0.9 && <0.11'
base: ==4.*
data-default: -any
bindings-DSL: -any
pipes: -any
mtl: ==2.*
pipes-safe: -any
all-versions:
- '0.1.0.0'
- '0.2.0.0'
- '0.2.0.1'
author: Hideyuki Tanaka, Alexey Smirnov
latest: '0.2.0.1'
description-type: haddock
description: Streaming compression/decompression via pipes.
license-name: BSD3
| Update from Hackage at 2016-02-29T12:41:06+0000 | Update from Hackage at 2016-02-29T12:41:06+0000
| YAML | mit | commercialhaskell/all-cabal-metadata | yaml | ## Code Before:
homepage: http://github.com/chemist/pipes-bzip#readme
changelog-type: ''
hash: 05f046d013fbcd430f0b11f682c71a064d9db3d017a2d84852e594b95f7b617f
test-bench-deps:
base: -any
pipes-bzip: -any
maintainer: chemistmail@gmail.com
synopsis: Bzip2 compression and decompression for Pipes streams
changelog: ''
basic-deps:
bytestring: -any
bzlib: -any
base: ! '>=4.7 && <5'
pipes-bytestring: -any
pipes: -any
all-versions:
- '0.1.0.0'
author: Smirnov Alexey
latest: '0.1.0.0'
description-type: haddock
description: Please see README.md
license-name: BSD3
## Instruction:
Update from Hackage at 2016-02-29T12:41:06+0000
## Code After:
homepage: https://github.com/chemist/pipes-bzip
changelog-type: ''
hash: c3b465f7cb3d989b8bce3f7a1a06b063f6396ec6c9abafbd30c689df25a64980
test-bench-deps:
bytestring: -any
bzlib: -any
MonadRandom: -any
base: ==4.*
pipes-bytestring: -any
hspec: ! '>=1.3'
pipes: -any
random: -any
pipes-bzip: -any
QuickCheck: -any
pipes-safe: -any
directory: -any
maintainer: Alexey Smirnov
synopsis: Streaming compression/decompression via pipes.
changelog: ''
basic-deps:
bytestring: ! '>=0.9 && <0.11'
base: ==4.*
data-default: -any
bindings-DSL: -any
pipes: -any
mtl: ==2.*
pipes-safe: -any
all-versions:
- '0.1.0.0'
- '0.2.0.0'
- '0.2.0.1'
author: Hideyuki Tanaka, Alexey Smirnov
latest: '0.2.0.1'
description-type: haddock
description: Streaming compression/decompression via pipes.
license-name: BSD3
| - homepage: http://github.com/chemist/pipes-bzip#readme
? -------
+ homepage: https://github.com/chemist/pipes-bzip
? +
changelog-type: ''
- hash: 05f046d013fbcd430f0b11f682c71a064d9db3d017a2d84852e594b95f7b617f
+ hash: c3b465f7cb3d989b8bce3f7a1a06b063f6396ec6c9abafbd30c689df25a64980
test-bench-deps:
- base: -any
+ bytestring: -any
+ bzlib: -any
+ MonadRandom: -any
+ base: ==4.*
+ pipes-bytestring: -any
+ hspec: ! '>=1.3'
+ pipes: -any
+ random: -any
pipes-bzip: -any
- maintainer: chemistmail@gmail.com
- synopsis: Bzip2 compression and decompression for Pipes streams
+ QuickCheck: -any
+ pipes-safe: -any
+ directory: -any
+ maintainer: Alexey Smirnov
+ synopsis: Streaming compression/decompression via pipes.
changelog: ''
basic-deps:
- bytestring: -any
- bzlib: -any
- base: ! '>=4.7 && <5'
- pipes-bytestring: -any
+ bytestring: ! '>=0.9 && <0.11'
+ base: ==4.*
+ data-default: -any
+ bindings-DSL: -any
pipes: -any
+ mtl: ==2.*
+ pipes-safe: -any
all-versions:
- '0.1.0.0'
- author: Smirnov Alexey
+ - '0.2.0.0'
+ - '0.2.0.1'
+ author: Hideyuki Tanaka, Alexey Smirnov
- latest: '0.1.0.0'
? ^ ^
+ latest: '0.2.0.1'
? ^ ^
description-type: haddock
- description: Please see README.md
+ description: Streaming compression/decompression via pipes.
license-name: BSD3 | 38 | 1.727273 | 26 | 12 |
e13ae3118f938383641f598222fe6780555dfddf | sota.tex | sota.tex | % State of the Art Chapter
Introduce the SOTA here.
| % State of the Art Chapter
\section{Linux Kernel}
Blah.
\subsection{Linux Kernel Modules}
Blah.
\section{Software Composition Analysis}
SCA be here.
\subsection{Static Binary Analysis}
SBA be here.
| Update State of the Art section | Update State of the Art section
| TeX | mit | okuuva/cse-dtyo | tex | ## Code Before:
% State of the Art Chapter
Introduce the SOTA here.
## Instruction:
Update State of the Art section
## Code After:
% State of the Art Chapter
\section{Linux Kernel}
Blah.
\subsection{Linux Kernel Modules}
Blah.
\section{Software Composition Analysis}
SCA be here.
\subsection{Static Binary Analysis}
SBA be here.
| % State of the Art Chapter
- Introduce the SOTA here.
+ \section{Linux Kernel}
+
+ Blah.
+
+ \subsection{Linux Kernel Modules}
+
+ Blah.
+
+ \section{Software Composition Analysis}
+
+ SCA be here.
+
+ \subsection{Static Binary Analysis}
+
+ SBA be here. | 16 | 5.333333 | 15 | 1 |
6f54577608e4905d6f43fe5a3c2cf9f3c14272a9 | config/deploy/production.rb | config/deploy/production.rb |
set :user, "#{application}"
set :is_root_domain, true
set :root_domain, "tracker.eballance.cz"
set :branch, "master"
set :deploy_to, "/home/#{user}/web"
set :rails_env, "production"
set :default_environment, {
"PATH" => "/home/#{user}/.rbenv/shims:/home/#{user}/.rbenv/bin:$PATH"
}
set :normal_symlinks, ["config/database.yml", "db/#{rails_env}.sqlite3"]
require "whenever/capistrano"
set :whenever_environment, defer { stage }
set :whenever_command, "bundle exec whenever"
|
set :user, "#{application}"
set :is_root_domain, false
set :root_domain, ""
set :branch, "master"
set :deploy_to, "/home/#{user}/web"
set :rails_env, "production"
set :default_environment, {
"PATH" => "/home/#{user}/.rbenv/shims:/home/#{user}/.rbenv/bin:$PATH"
}
set :normal_symlinks, ["config/database.yml", "config/config.yml", "db/#{rails_env}.sqlite3"]
require "whenever/capistrano"
set :whenever_environment, defer { stage }
set :whenever_command, "bundle exec whenever"
| Add config.yml to deploy process | Add config.yml to deploy process
| Ruby | mit | eballance/trackerapp,eballance/trackerapp,eballance/trackerapp | ruby | ## Code Before:
set :user, "#{application}"
set :is_root_domain, true
set :root_domain, "tracker.eballance.cz"
set :branch, "master"
set :deploy_to, "/home/#{user}/web"
set :rails_env, "production"
set :default_environment, {
"PATH" => "/home/#{user}/.rbenv/shims:/home/#{user}/.rbenv/bin:$PATH"
}
set :normal_symlinks, ["config/database.yml", "db/#{rails_env}.sqlite3"]
require "whenever/capistrano"
set :whenever_environment, defer { stage }
set :whenever_command, "bundle exec whenever"
## Instruction:
Add config.yml to deploy process
## Code After:
set :user, "#{application}"
set :is_root_domain, false
set :root_domain, ""
set :branch, "master"
set :deploy_to, "/home/#{user}/web"
set :rails_env, "production"
set :default_environment, {
"PATH" => "/home/#{user}/.rbenv/shims:/home/#{user}/.rbenv/bin:$PATH"
}
set :normal_symlinks, ["config/database.yml", "config/config.yml", "db/#{rails_env}.sqlite3"]
require "whenever/capistrano"
set :whenever_environment, defer { stage }
set :whenever_command, "bundle exec whenever"
|
set :user, "#{application}"
- set :is_root_domain, true
? ^^^
+ set :is_root_domain, false
? ^^^^
- set :root_domain, "tracker.eballance.cz"
+ set :root_domain, ""
set :branch, "master"
set :deploy_to, "/home/#{user}/web"
set :rails_env, "production"
set :default_environment, {
"PATH" => "/home/#{user}/.rbenv/shims:/home/#{user}/.rbenv/bin:$PATH"
}
- set :normal_symlinks, ["config/database.yml", "db/#{rails_env}.sqlite3"]
+ set :normal_symlinks, ["config/database.yml", "config/config.yml", "db/#{rails_env}.sqlite3"]
? +++++++++++++++++++++
require "whenever/capistrano"
set :whenever_environment, defer { stage }
set :whenever_command, "bundle exec whenever" | 6 | 0.333333 | 3 | 3 |
2a2bf5a8ea26d64995d79b250ed39a45071a4cb7 | appveyor.yml | appveyor.yml | environment:
matrix:
# node.js
- nodejs_version: "5"
- nodejs_version: "4"
# Install scripts. (runs after repo cloning)
install:
# Get the latest stable version of Node.js or io.js
- ps: Install-Product node $env:nodejs_version
# install modules
- npm install
# Post-install test scripts.
test_script:
# run tests
- npm test
# artifacts:
# - path: ./junit/xunit.xml
# - path: ./xunit.xml
# nothing to compile in this project
build: off
deploy: off
| environment:
matrix:
# node.js
- nodejs_version: "5"
- nodejs_version: "4"
# Install scripts. (runs after repo cloning)
install:
# Get the latest stable version of Node.js or io.js
- ps: Install-Product node $env:nodejs_version
# install modules
- npm install
# remove unused modules from node_modules directory
- npm prune
# Post-install test scripts.
test_script:
# run tests
- npm test
# artifacts:
# - path: ./junit/xunit.xml
# - path: ./xunit.xml
# nothing to compile in this project
build: off
deploy: off
cache:
- node_modules
| Add node_modules caching to AppVeyor | Add node_modules caching to AppVeyor
| YAML | mit | convoyinc/apollo-client,calebmer/apollo-client,apollographql/apollo-client,apollostack/apollo-client,convoyinc/apollo-client,stevewillard/apollo-client,convoyinc/apollo-client,stevewillard/apollo-client,calebmer/apollo-client,cesarsolorzano/apollo-client,cesarsolorzano/apollo-client,cesarsolorzano/apollo-client,apollostack/apollo-client,calebmer/apollo-client,stevewillard/apollo-client,apollostack/apollo-client,apollographql/apollo-client | yaml | ## Code Before:
environment:
matrix:
# node.js
- nodejs_version: "5"
- nodejs_version: "4"
# Install scripts. (runs after repo cloning)
install:
# Get the latest stable version of Node.js or io.js
- ps: Install-Product node $env:nodejs_version
# install modules
- npm install
# Post-install test scripts.
test_script:
# run tests
- npm test
# artifacts:
# - path: ./junit/xunit.xml
# - path: ./xunit.xml
# nothing to compile in this project
build: off
deploy: off
## Instruction:
Add node_modules caching to AppVeyor
## Code After:
environment:
matrix:
# node.js
- nodejs_version: "5"
- nodejs_version: "4"
# Install scripts. (runs after repo cloning)
install:
# Get the latest stable version of Node.js or io.js
- ps: Install-Product node $env:nodejs_version
# install modules
- npm install
# remove unused modules from node_modules directory
- npm prune
# Post-install test scripts.
test_script:
# run tests
- npm test
# artifacts:
# - path: ./junit/xunit.xml
# - path: ./xunit.xml
# nothing to compile in this project
build: off
deploy: off
cache:
- node_modules
| environment:
matrix:
# node.js
- nodejs_version: "5"
- nodejs_version: "4"
# Install scripts. (runs after repo cloning)
install:
# Get the latest stable version of Node.js or io.js
- ps: Install-Product node $env:nodejs_version
# install modules
- npm install
+ # remove unused modules from node_modules directory
+ - npm prune
# Post-install test scripts.
test_script:
# run tests
- npm test
# artifacts:
# - path: ./junit/xunit.xml
# - path: ./xunit.xml
# nothing to compile in this project
build: off
deploy: off
+
+ cache:
+ - node_modules | 5 | 0.2 | 5 | 0 |
469d73255365392a821d701b4df9098d97b7546a | judge/toyojjudge/taskrunner.py | judge/toyojjudge/taskrunner.py | import asyncio
import logging
logger = logging.getLogger(__name__)
class TaskRunner:
def __init__(self, sandbox_pool, languages, checkers):
self.sandbox_pool = sandbox_pool
self.languages = languages
self.checkers = checkers
async def run(self, task):
async with self.sandbox_pool.acquire() as box:
logger.debug("Running %s", task)
lang = self.languages[task.submission.language_name]
check = self.checkers[task.testcase.checker_name]
await lang.run_task(box, task)
if task.verdict is not None:
task.accepted = False
else:
await check.check(box, task)
| import asyncio
import logging
logger = logging.getLogger(__name__)
class TaskRunner:
def __init__(self, sandbox_pool, languages, checkers):
self.sandbox_pool = sandbox_pool
self.languages = languages
self.checkers = checkers
async def run(self, task):
async with self.sandbox_pool.acquire() as box:
language_name = task.submission.language_name
checker_name = task.testcase.checker_name
logger.info("Running %s, language %s, checker %s",
task, language_name, checker_name)
lang = self.languages[language_name]
check = self.checkers[checker_name]
await lang.run_task(box, task)
if task.verdict is not None:
task.accepted = False
else:
await check.check(box, task)
| Print running task, language and checker as INFO | judge: Print running task, language and checker as INFO
| Python | agpl-3.0 | johnchen902/toyoj,johnchen902/toyoj,johnchen902/toyoj,johnchen902/toyoj,johnchen902/toyoj,johnchen902/toyoj | python | ## Code Before:
import asyncio
import logging
logger = logging.getLogger(__name__)
class TaskRunner:
def __init__(self, sandbox_pool, languages, checkers):
self.sandbox_pool = sandbox_pool
self.languages = languages
self.checkers = checkers
async def run(self, task):
async with self.sandbox_pool.acquire() as box:
logger.debug("Running %s", task)
lang = self.languages[task.submission.language_name]
check = self.checkers[task.testcase.checker_name]
await lang.run_task(box, task)
if task.verdict is not None:
task.accepted = False
else:
await check.check(box, task)
## Instruction:
judge: Print running task, language and checker as INFO
## Code After:
import asyncio
import logging
logger = logging.getLogger(__name__)
class TaskRunner:
def __init__(self, sandbox_pool, languages, checkers):
self.sandbox_pool = sandbox_pool
self.languages = languages
self.checkers = checkers
async def run(self, task):
async with self.sandbox_pool.acquire() as box:
language_name = task.submission.language_name
checker_name = task.testcase.checker_name
logger.info("Running %s, language %s, checker %s",
task, language_name, checker_name)
lang = self.languages[language_name]
check = self.checkers[checker_name]
await lang.run_task(box, task)
if task.verdict is not None:
task.accepted = False
else:
await check.check(box, task)
| import asyncio
import logging
logger = logging.getLogger(__name__)
class TaskRunner:
def __init__(self, sandbox_pool, languages, checkers):
self.sandbox_pool = sandbox_pool
self.languages = languages
self.checkers = checkers
async def run(self, task):
async with self.sandbox_pool.acquire() as box:
- logger.debug("Running %s", task)
+ language_name = task.submission.language_name
+ checker_name = task.testcase.checker_name
+ logger.info("Running %s, language %s, checker %s",
+ task, language_name, checker_name)
- lang = self.languages[task.submission.language_name]
? ----------------
+ lang = self.languages[language_name]
- check = self.checkers[task.testcase.checker_name]
? --------------
+ check = self.checkers[checker_name]
await lang.run_task(box, task)
if task.verdict is not None:
task.accepted = False
else:
await check.check(box, task) | 9 | 0.428571 | 6 | 3 |
782de357d2a2d15e033471408674b08e0489400a | .travis.yml | .travis.yml | ---
sudo: required
language: node_js
install:
- sudo pip install -r requirements.txt
- npm install
node_js:
- node
services:
- docker
script:
- yamllint --strict $(git ls-files '*.yaml' '*.yml') || exit 1
- >
find -name "*.md" -not -path "*/node_modules/*"
| xargs markdownlint
|| exit 1
- find . -type f -iname "*.sh" | while read -r line; do echo "Linting $line"; docker run -v "$(pwd)":/mnt --rm koalaman/shellcheck:v0.7.0 "$line" || exit 1; done;
matrix:
fast_finish: true
| ---
sudo: required
language: node_js
install:
- sudo pip install -r requirements.txt
- npm install
node_js:
- node
services:
- docker
script:
- yamllint --strict $(git ls-files '*.yaml' '*.yml') || exit 1
- >
find -name "*.md" -not -path "*/node_modules/*"
| xargs markdownlint
|| exit 1
- >
find . -type f -iname "*.sh"
| while read -r line; do
echo "Linting $line";
docker run -v "$(pwd)":/mnt --rm koalaman/shellcheck:v0.7.0 "$line"
|| exit 1;
done;
matrix:
fast_finish: true
| Split shellcheck command on multiple lines | Split shellcheck command on multiple lines
| YAML | mit | ferrarimarco/home-lab,ferrarimarco/home-lab | yaml | ## Code Before:
---
sudo: required
language: node_js
install:
- sudo pip install -r requirements.txt
- npm install
node_js:
- node
services:
- docker
script:
- yamllint --strict $(git ls-files '*.yaml' '*.yml') || exit 1
- >
find -name "*.md" -not -path "*/node_modules/*"
| xargs markdownlint
|| exit 1
- find . -type f -iname "*.sh" | while read -r line; do echo "Linting $line"; docker run -v "$(pwd)":/mnt --rm koalaman/shellcheck:v0.7.0 "$line" || exit 1; done;
matrix:
fast_finish: true
## Instruction:
Split shellcheck command on multiple lines
## Code After:
---
sudo: required
language: node_js
install:
- sudo pip install -r requirements.txt
- npm install
node_js:
- node
services:
- docker
script:
- yamllint --strict $(git ls-files '*.yaml' '*.yml') || exit 1
- >
find -name "*.md" -not -path "*/node_modules/*"
| xargs markdownlint
|| exit 1
- >
find . -type f -iname "*.sh"
| while read -r line; do
echo "Linting $line";
docker run -v "$(pwd)":/mnt --rm koalaman/shellcheck:v0.7.0 "$line"
|| exit 1;
done;
matrix:
fast_finish: true
| ---
sudo: required
language: node_js
install:
- sudo pip install -r requirements.txt
- npm install
node_js:
- node
services:
- docker
script:
- yamllint --strict $(git ls-files '*.yaml' '*.yml') || exit 1
- >
find -name "*.md" -not -path "*/node_modules/*"
| xargs markdownlint
|| exit 1
- - find . -type f -iname "*.sh" | while read -r line; do echo "Linting $line"; docker run -v "$(pwd)":/mnt --rm koalaman/shellcheck:v0.7.0 "$line" || exit 1; done;
+ - >
+ find . -type f -iname "*.sh"
+ | while read -r line; do
+ echo "Linting $line";
+ docker run -v "$(pwd)":/mnt --rm koalaman/shellcheck:v0.7.0 "$line"
+ || exit 1;
+ done;
matrix:
fast_finish: true | 8 | 0.32 | 7 | 1 |
04f36fab2168fb9cd34d3c6fc7f31533c90b9149 | app/clients/statsd/statsd_client.py | app/clients/statsd/statsd_client.py | from statsd import StatsClient
class StatsdClient(StatsClient):
def init_app(self, app, *args, **kwargs):
self.active = app.config.get('STATSD_ENABLED')
self.namespace = app.config.get('NOTIFY_ENVIRONMENT') + ".notifications.api."
if self.active:
StatsClient.__init__(
self,
app.config.get('STATSD_HOST'),
app.config.get('STATSD_PORT'),
prefix=app.config.get('STATSD_PREFIX')
)
def format_stat_name(self, stat):
return self.namespace + stat
def incr(self, stat, count=1, rate=1):
if self.active:
super(StatsClient, self).incr(self.format_stat_name(stat), count, rate)
def timing(self, stat, delta, rate=1):
if self.active:
super(StatsClient, self).timing(self.format_stat_name(stat), delta, rate)
def timing_with_dates(self, stat, start, end, rate=1):
if self.active:
delta = (start - end).total_seconds()
super(StatsClient, self).timing(stat, delta, rate)
| from statsd import StatsClient
class StatsdClient(StatsClient):
def init_app(self, app, *args, **kwargs):
self.active = app.config.get('STATSD_ENABLED')
self.namespace = app.config.get('NOTIFY_ENVIRONMENT') + ".notifications.api."
if self.active:
StatsClient.__init__(
self,
app.config.get('STATSD_HOST'),
app.config.get('STATSD_PORT'),
prefix=app.config.get('STATSD_PREFIX')
)
def format_stat_name(self, stat):
return self.namespace + stat
def incr(self, stat, count=1, rate=1):
if self.active:
super(StatsClient, self).incr(self.format_stat_name(stat), count, rate)
def timing(self, stat, delta, rate=1):
if self.active:
super(StatsClient, self).timing(self.format_stat_name(stat), delta, rate)
def timing_with_dates(self, stat, start, end, rate=1):
if self.active:
delta = (start - end).total_seconds()
super(StatsClient, self).timing(self.format_stat_name(stat), delta, rate)
| Format the stat name with environmenbt | Format the stat name with environmenbt
| Python | mit | alphagov/notifications-api,alphagov/notifications-api | python | ## Code Before:
from statsd import StatsClient
class StatsdClient(StatsClient):
def init_app(self, app, *args, **kwargs):
self.active = app.config.get('STATSD_ENABLED')
self.namespace = app.config.get('NOTIFY_ENVIRONMENT') + ".notifications.api."
if self.active:
StatsClient.__init__(
self,
app.config.get('STATSD_HOST'),
app.config.get('STATSD_PORT'),
prefix=app.config.get('STATSD_PREFIX')
)
def format_stat_name(self, stat):
return self.namespace + stat
def incr(self, stat, count=1, rate=1):
if self.active:
super(StatsClient, self).incr(self.format_stat_name(stat), count, rate)
def timing(self, stat, delta, rate=1):
if self.active:
super(StatsClient, self).timing(self.format_stat_name(stat), delta, rate)
def timing_with_dates(self, stat, start, end, rate=1):
if self.active:
delta = (start - end).total_seconds()
super(StatsClient, self).timing(stat, delta, rate)
## Instruction:
Format the stat name with environmenbt
## Code After:
from statsd import StatsClient
class StatsdClient(StatsClient):
def init_app(self, app, *args, **kwargs):
self.active = app.config.get('STATSD_ENABLED')
self.namespace = app.config.get('NOTIFY_ENVIRONMENT') + ".notifications.api."
if self.active:
StatsClient.__init__(
self,
app.config.get('STATSD_HOST'),
app.config.get('STATSD_PORT'),
prefix=app.config.get('STATSD_PREFIX')
)
def format_stat_name(self, stat):
return self.namespace + stat
def incr(self, stat, count=1, rate=1):
if self.active:
super(StatsClient, self).incr(self.format_stat_name(stat), count, rate)
def timing(self, stat, delta, rate=1):
if self.active:
super(StatsClient, self).timing(self.format_stat_name(stat), delta, rate)
def timing_with_dates(self, stat, start, end, rate=1):
if self.active:
delta = (start - end).total_seconds()
super(StatsClient, self).timing(self.format_stat_name(stat), delta, rate)
| from statsd import StatsClient
class StatsdClient(StatsClient):
def init_app(self, app, *args, **kwargs):
self.active = app.config.get('STATSD_ENABLED')
self.namespace = app.config.get('NOTIFY_ENVIRONMENT') + ".notifications.api."
if self.active:
StatsClient.__init__(
self,
app.config.get('STATSD_HOST'),
app.config.get('STATSD_PORT'),
prefix=app.config.get('STATSD_PREFIX')
)
def format_stat_name(self, stat):
return self.namespace + stat
def incr(self, stat, count=1, rate=1):
if self.active:
super(StatsClient, self).incr(self.format_stat_name(stat), count, rate)
def timing(self, stat, delta, rate=1):
if self.active:
super(StatsClient, self).timing(self.format_stat_name(stat), delta, rate)
def timing_with_dates(self, stat, start, end, rate=1):
if self.active:
delta = (start - end).total_seconds()
- super(StatsClient, self).timing(stat, delta, rate)
+ super(StatsClient, self).timing(self.format_stat_name(stat), delta, rate)
? ++++++++++++ +++++++++++
| 2 | 0.064516 | 1 | 1 |
fe7720e1c5ec783c503def3e7e4293e35065db98 | packages/fyndiq-component-input/src/invisible-no-form.js | packages/fyndiq-component-input/src/invisible-no-form.js | import React from 'react'
import PropTypes from 'prop-types'
import AutosizeInput from 'react-input-autosize'
import styles from '../input.css'
const InvisibleNoFormInput = ({ onChange, value, ...props }) => (
<AutosizeInput
inputClassName={`
${styles.invisibleInput}
${!value && styles.invisibleInputEmpty}
`}
onChange={e => onChange(e.target.value)}
value={value}
{...props}
/>
)
InvisibleNoFormInput.propTypes = {
onChange: PropTypes.func,
value: PropTypes.string,
}
InvisibleNoFormInput.defaultProps = {
onChange: () => {},
value: '',
}
export default InvisibleNoFormInput
| import React from 'react'
import PropTypes from 'prop-types'
import AutosizeInput from 'react-input-autosize'
import styles from '../input.css'
const InvisibleNoFormInput = ({ onChange, value, ...props }) => (
<AutosizeInput
inputClassName={`
${styles.invisibleInput}
${!value && styles.invisibleInputEmpty}
`}
onChange={e => onChange(e.target.value)}
extraWidth={40}
value={value}
{...props}
/>
)
InvisibleNoFormInput.propTypes = {
onChange: PropTypes.func,
value: PropTypes.string,
}
InvisibleNoFormInput.defaultProps = {
onChange: () => {},
value: '',
}
export default InvisibleNoFormInput
| Add some extra width to the invisibleinputnoform | :wrench: Add some extra width to the invisibleinputnoform
| JavaScript | mit | fyndiq/fyndiq-ui,fyndiq/fyndiq-ui | javascript | ## Code Before:
import React from 'react'
import PropTypes from 'prop-types'
import AutosizeInput from 'react-input-autosize'
import styles from '../input.css'
const InvisibleNoFormInput = ({ onChange, value, ...props }) => (
<AutosizeInput
inputClassName={`
${styles.invisibleInput}
${!value && styles.invisibleInputEmpty}
`}
onChange={e => onChange(e.target.value)}
value={value}
{...props}
/>
)
InvisibleNoFormInput.propTypes = {
onChange: PropTypes.func,
value: PropTypes.string,
}
InvisibleNoFormInput.defaultProps = {
onChange: () => {},
value: '',
}
export default InvisibleNoFormInput
## Instruction:
:wrench: Add some extra width to the invisibleinputnoform
## Code After:
import React from 'react'
import PropTypes from 'prop-types'
import AutosizeInput from 'react-input-autosize'
import styles from '../input.css'
const InvisibleNoFormInput = ({ onChange, value, ...props }) => (
<AutosizeInput
inputClassName={`
${styles.invisibleInput}
${!value && styles.invisibleInputEmpty}
`}
onChange={e => onChange(e.target.value)}
extraWidth={40}
value={value}
{...props}
/>
)
InvisibleNoFormInput.propTypes = {
onChange: PropTypes.func,
value: PropTypes.string,
}
InvisibleNoFormInput.defaultProps = {
onChange: () => {},
value: '',
}
export default InvisibleNoFormInput
| import React from 'react'
import PropTypes from 'prop-types'
import AutosizeInput from 'react-input-autosize'
import styles from '../input.css'
const InvisibleNoFormInput = ({ onChange, value, ...props }) => (
<AutosizeInput
inputClassName={`
${styles.invisibleInput}
${!value && styles.invisibleInputEmpty}
`}
onChange={e => onChange(e.target.value)}
+ extraWidth={40}
value={value}
{...props}
/>
)
InvisibleNoFormInput.propTypes = {
onChange: PropTypes.func,
value: PropTypes.string,
}
InvisibleNoFormInput.defaultProps = {
onChange: () => {},
value: '',
}
export default InvisibleNoFormInput | 1 | 0.034483 | 1 | 0 |
fdeab35e4997c16515efea32d97845786d373990 | tests/test_utilities.h | tests/test_utilities.h |
std::string output_matrices(Eigen::MatrixXd expected, Eigen::MatrixXd actual) {
std::stringstream ss;
ss << "expected:\n" << expected << "\nactual:\n" << actual << std::endl;
return ss.str();
}
|
std::string output_matrices(Eigen::MatrixXd expected, Eigen::MatrixXd actual) {
std::stringstream ss;
ss << "expected:\n" << expected << "\nactual:\n" << actual << std::endl;
return ss.str();
}
/*
* allclose() function to match numpy.allclose
* https://stackoverflow.com/questions/15051367/how-to-compare-vectors-approximately-in-eige://stackoverflow.com/questions/15051367/how-to-compare-vectors-approximately-in-eigen
*/
namespace test {
template<typename DerivedA, typename DerivedB>
bool allclose(const Eigen::DenseBase<DerivedA>& a, const Eigen::DenseBase<DerivedB>& b,
const typename DerivedA::RealScalar& rtol
= Eigen::NumTraits<typename DerivedA::RealScalar>::dummy_precision(),
const typename DerivedA::RealScalar& atol
= Eigen::NumTraits<typename DerivedA::RealScalar>::epsilon()) {
return ((a.derived() - b.derived()).array().abs() <= (atol + rtol * b.derived().array().abs())).all();
}
} // namespace test
| Add allclose function for testing | Add allclose function for testing
Compare relative and absolute tolerence of matrix elements. Eigen
isApprox() functions compare matrix norms.
| C | bsd-2-clause | oliverlee/bicycle,oliverlee/bicycle | c | ## Code Before:
std::string output_matrices(Eigen::MatrixXd expected, Eigen::MatrixXd actual) {
std::stringstream ss;
ss << "expected:\n" << expected << "\nactual:\n" << actual << std::endl;
return ss.str();
}
## Instruction:
Add allclose function for testing
Compare relative and absolute tolerence of matrix elements. Eigen
isApprox() functions compare matrix norms.
## Code After:
std::string output_matrices(Eigen::MatrixXd expected, Eigen::MatrixXd actual) {
std::stringstream ss;
ss << "expected:\n" << expected << "\nactual:\n" << actual << std::endl;
return ss.str();
}
/*
* allclose() function to match numpy.allclose
* https://stackoverflow.com/questions/15051367/how-to-compare-vectors-approximately-in-eige://stackoverflow.com/questions/15051367/how-to-compare-vectors-approximately-in-eigen
*/
namespace test {
template<typename DerivedA, typename DerivedB>
bool allclose(const Eigen::DenseBase<DerivedA>& a, const Eigen::DenseBase<DerivedB>& b,
const typename DerivedA::RealScalar& rtol
= Eigen::NumTraits<typename DerivedA::RealScalar>::dummy_precision(),
const typename DerivedA::RealScalar& atol
= Eigen::NumTraits<typename DerivedA::RealScalar>::epsilon()) {
return ((a.derived() - b.derived()).array().abs() <= (atol + rtol * b.derived().array().abs())).all();
}
} // namespace test
|
std::string output_matrices(Eigen::MatrixXd expected, Eigen::MatrixXd actual) {
std::stringstream ss;
ss << "expected:\n" << expected << "\nactual:\n" << actual << std::endl;
return ss.str();
}
+
+ /*
+ * allclose() function to match numpy.allclose
+ * https://stackoverflow.com/questions/15051367/how-to-compare-vectors-approximately-in-eige://stackoverflow.com/questions/15051367/how-to-compare-vectors-approximately-in-eigen
+ */
+ namespace test {
+
+ template<typename DerivedA, typename DerivedB>
+ bool allclose(const Eigen::DenseBase<DerivedA>& a, const Eigen::DenseBase<DerivedB>& b,
+ const typename DerivedA::RealScalar& rtol
+ = Eigen::NumTraits<typename DerivedA::RealScalar>::dummy_precision(),
+ const typename DerivedA::RealScalar& atol
+ = Eigen::NumTraits<typename DerivedA::RealScalar>::epsilon()) {
+ return ((a.derived() - b.derived()).array().abs() <= (atol + rtol * b.derived().array().abs())).all();
+ }
+
+ } // namespace test | 17 | 2.833333 | 17 | 0 |
46a4d6a9cbbdc451c83beb2d7a66e4d64b533c0e | js/sensor-management.js | js/sensor-management.js | (function(){
$.ajax({
url: 'evanxd/sensors',
})
.done(function(sensors) {
var sensorList = $('#sensor-list ul');
var html = '';
sensors.forEach(function(sensor) {
var html = '<li class="collection-item">' +
'<div>' +
sensor.name + '<a href="sensor-detail.html" class="secondary-content"><i class="material-icons">info</i></a>' +
'</div>' +
'</li>';
sensorList.append(html);
});
})
.fail(function(error) {
console.error(error);
})
})();
| (function(){
$.ajax({
url: 'evanxd/sensors',
})
.done(function(sensors) {
var sensorList = $('#sensor-list ul');
var html = '';
sensors.forEach(function(sensor) {
var html = '<li class="collection-item">' +
'<div>' +
sensor.name + '<a href="sensor-detail.html?id=' + sensor._id +
'" class="secondary-content"><i class="material-icons">info</i></a>' +
'</div>' +
'</li>';
sensorList.append(html);
});
})
.fail(function(error) {
console.error(error);
})
})();
| Add id param for the sensor-detail.html page | Add id param for the sensor-detail.html page
| JavaScript | mit | sensor-web/sensorweb-frontend,sensor-web/sensorweb-frontend,evanxd/sensorweb-frontend,evanxd/sensorweb-frontend | javascript | ## Code Before:
(function(){
$.ajax({
url: 'evanxd/sensors',
})
.done(function(sensors) {
var sensorList = $('#sensor-list ul');
var html = '';
sensors.forEach(function(sensor) {
var html = '<li class="collection-item">' +
'<div>' +
sensor.name + '<a href="sensor-detail.html" class="secondary-content"><i class="material-icons">info</i></a>' +
'</div>' +
'</li>';
sensorList.append(html);
});
})
.fail(function(error) {
console.error(error);
})
})();
## Instruction:
Add id param for the sensor-detail.html page
## Code After:
(function(){
$.ajax({
url: 'evanxd/sensors',
})
.done(function(sensors) {
var sensorList = $('#sensor-list ul');
var html = '';
sensors.forEach(function(sensor) {
var html = '<li class="collection-item">' +
'<div>' +
sensor.name + '<a href="sensor-detail.html?id=' + sensor._id +
'" class="secondary-content"><i class="material-icons">info</i></a>' +
'</div>' +
'</li>';
sensorList.append(html);
});
})
.fail(function(error) {
console.error(error);
})
})();
| (function(){
$.ajax({
url: 'evanxd/sensors',
})
.done(function(sensors) {
var sensorList = $('#sensor-list ul');
var html = '';
sensors.forEach(function(sensor) {
var html = '<li class="collection-item">' +
'<div>' +
+ sensor.name + '<a href="sensor-detail.html?id=' + sensor._id +
- sensor.name + '<a href="sensor-detail.html" class="secondary-content"><i class="material-icons">info</i></a>' +
? -------------- ---------------------------
+ '" class="secondary-content"><i class="material-icons">info</i></a>' +
'</div>' +
'</li>';
sensorList.append(html);
});
})
.fail(function(error) {
console.error(error);
})
})(); | 3 | 0.15 | 2 | 1 |
ad57915e04126c08d3472149c59e9d0db0a77e55 | test/unit/CMakeLists.txt | test/unit/CMakeLists.txt | add_executable(queue queue.c)
add_executable(mhash mhash.c)
add_executable(objc_finally objc_finally.m)
add_executable(objc_catchcxx objc_catchcxx.m)
set_target_properties(mhash PROPERTIES COMPILE_FLAGS "-std=c99")
target_link_libraries(objc_finally ${LIBOBJC_LIB})
target_link_libraries(objc_catchcxx ${LIBOBJC_LIB} ${LUAJIT_LIB})
if (TARGET_OS_LINUX OR TARGET_OS_DEBIAN_FREEBSD)
target_link_libraries(objc_catchcxx dl)
endif()
| add_executable(queue queue.c)
add_executable(mhash mhash.c)
add_executable(objc_finally objc_finally.m)
add_executable(objc_catchcxx objc_catchcxx.m)
add_dependencies(objc_finally build_bundled_libs)
add_dependencies(objc_catchcxx build_bundled_libs)
set_target_properties(mhash PROPERTIES COMPILE_FLAGS "-std=c99")
target_link_libraries(objc_finally ${LIBOBJC_LIB})
target_link_libraries(objc_catchcxx ${LIBOBJC_LIB} ${LUAJIT_LIB})
if (TARGET_OS_LINUX OR TARGET_OS_DEBIAN_FREEBSD)
target_link_libraries(objc_catchcxx dl)
endif()
| Fix a build failure, caused by broken dependencies in unit tests. | Fix a build failure, caused by broken dependencies in unit tests.
| Text | bsd-2-clause | KlonD90/tarantool,nvoron23/tarantool,Sannis/tarantool,nvoron23/tarantool,dkorolev/tarantool,nvoron23/tarantool,dkorolev/tarantool,guard163/tarantool,vasilenkomike/tarantool,rtsisyk/tarantool,rtsisyk/tarantool,KlonD90/tarantool,KlonD90/tarantool,ocelot-inc/tarantool,condor-the-bird/tarantool,mejedi/tarantool,Sannis/tarantool,nvoron23/tarantool,mejedi/tarantool,condor-the-bird/tarantool,guard163/tarantool,ocelot-inc/tarantool,guard163/tarantool,nvoron23/tarantool,Sannis/tarantool,ocelot-inc/tarantool,vasilenkomike/tarantool,dkorolev/tarantool,Sannis/tarantool,Sannis/tarantool,rtsisyk/tarantool,vasilenkomike/tarantool,guard163/tarantool,ocelot-inc/tarantool,rtsisyk/tarantool,vasilenkomike/tarantool,nvoron23/tarantool,mejedi/tarantool,vasilenkomike/tarantool,condor-the-bird/tarantool,KlonD90/tarantool,mejedi/tarantool,guard163/tarantool,KlonD90/tarantool,dkorolev/tarantool,dkorolev/tarantool,condor-the-bird/tarantool,condor-the-bird/tarantool | text | ## Code Before:
add_executable(queue queue.c)
add_executable(mhash mhash.c)
add_executable(objc_finally objc_finally.m)
add_executable(objc_catchcxx objc_catchcxx.m)
set_target_properties(mhash PROPERTIES COMPILE_FLAGS "-std=c99")
target_link_libraries(objc_finally ${LIBOBJC_LIB})
target_link_libraries(objc_catchcxx ${LIBOBJC_LIB} ${LUAJIT_LIB})
if (TARGET_OS_LINUX OR TARGET_OS_DEBIAN_FREEBSD)
target_link_libraries(objc_catchcxx dl)
endif()
## Instruction:
Fix a build failure, caused by broken dependencies in unit tests.
## Code After:
add_executable(queue queue.c)
add_executable(mhash mhash.c)
add_executable(objc_finally objc_finally.m)
add_executable(objc_catchcxx objc_catchcxx.m)
add_dependencies(objc_finally build_bundled_libs)
add_dependencies(objc_catchcxx build_bundled_libs)
set_target_properties(mhash PROPERTIES COMPILE_FLAGS "-std=c99")
target_link_libraries(objc_finally ${LIBOBJC_LIB})
target_link_libraries(objc_catchcxx ${LIBOBJC_LIB} ${LUAJIT_LIB})
if (TARGET_OS_LINUX OR TARGET_OS_DEBIAN_FREEBSD)
target_link_libraries(objc_catchcxx dl)
endif()
| add_executable(queue queue.c)
add_executable(mhash mhash.c)
add_executable(objc_finally objc_finally.m)
add_executable(objc_catchcxx objc_catchcxx.m)
+ add_dependencies(objc_finally build_bundled_libs)
+ add_dependencies(objc_catchcxx build_bundled_libs)
set_target_properties(mhash PROPERTIES COMPILE_FLAGS "-std=c99")
target_link_libraries(objc_finally ${LIBOBJC_LIB})
target_link_libraries(objc_catchcxx ${LIBOBJC_LIB} ${LUAJIT_LIB})
if (TARGET_OS_LINUX OR TARGET_OS_DEBIAN_FREEBSD)
target_link_libraries(objc_catchcxx dl)
endif() | 2 | 0.2 | 2 | 0 |
31a3e8f3a0e11eccf3d3c059ba4fa279cae7fa47 | pages/operations/health-checks/0630-heat-prepare-test.rst | pages/operations/health-checks/0630-heat-prepare-test.rst |
.. _heat-test-prepare:
Preparing Heat for Testing
--------------------------
The platform tests are run in the tenant you've specified in
'OpenStack Settings' tab during OpenStack installation. By default that is
'admin' tenant. Perform the following actions under that tenant to prepare Heat
for testing of its autoscaling feature:
1. Download the following image of Linux Fedora with pre-installed
cloud-init and heat-cfntools packages:
http://fedorapeople.org/groups/heat/prebuilt-jeos-images/F17-x86_64-cfntools.qcow2
2. Then upload the image into OpenStack Image Service (Glance)
into 'admin' tenant and name it 'F17-x86_64-cfntools'.
Now Heat autoscaling is ready for testing. Note that this test creates a stack
with two instances of Linux Fedora and it may fail if Compute node doesn't
have enough resources.
|
.. _heat-test-prepare:
Preparing Heat for Testing
--------------------------
The platform tests are run in the tenant you've specified in
'OpenStack Settings' tab during OpenStack installation. By default that is
'admin' tenant. Perform the following actions under that tenant to prepare Heat
for testing of its autoscaling feature:
1. Download the `image of Linux Fedora with pre-installed cloud-init and heat-cfntools
packages <http://murano-files.mirantis.com/F17-x86_64-cfntools.qcow2>`_.
2. Then upload the image into OpenStack Image Service (Glance)
into 'admin' tenant and name it 'F17-x86_64-cfntools'.
Now Heat autoscaling is ready for testing. Note that this test creates a stack
with two instances of Linux Fedora and it may fail if Compute node doesn't
have enough resources.
| Fix URL for Heat test image | Fix URL for Heat test image
In "Preparing Heat for Testing" section in FUEL documentation
there is a link with special Fedora ISO.
This link now is invalid and I updated it.
Change-Id: I7f85840fe2b5214c157370eb55a19ea2b65b38a9
Closes-Bug: #1401940
| reStructuredText | apache-2.0 | ogelbukh/fuel-docs,phillipsio/fuel-docs,phillipsio/fuel-docs,phillipsio/fuel-docs,phillipsio/fuel-docs,stackforge/fuel-docs,ogelbukh/fuel-docs,ogelbukh/fuel-docs,stackforge/fuel-docs,ogelbukh/fuel-docs | restructuredtext | ## Code Before:
.. _heat-test-prepare:
Preparing Heat for Testing
--------------------------
The platform tests are run in the tenant you've specified in
'OpenStack Settings' tab during OpenStack installation. By default that is
'admin' tenant. Perform the following actions under that tenant to prepare Heat
for testing of its autoscaling feature:
1. Download the following image of Linux Fedora with pre-installed
cloud-init and heat-cfntools packages:
http://fedorapeople.org/groups/heat/prebuilt-jeos-images/F17-x86_64-cfntools.qcow2
2. Then upload the image into OpenStack Image Service (Glance)
into 'admin' tenant and name it 'F17-x86_64-cfntools'.
Now Heat autoscaling is ready for testing. Note that this test creates a stack
with two instances of Linux Fedora and it may fail if Compute node doesn't
have enough resources.
## Instruction:
Fix URL for Heat test image
In "Preparing Heat for Testing" section in FUEL documentation
there is a link with special Fedora ISO.
This link now is invalid and I updated it.
Change-Id: I7f85840fe2b5214c157370eb55a19ea2b65b38a9
Closes-Bug: #1401940
## Code After:
.. _heat-test-prepare:
Preparing Heat for Testing
--------------------------
The platform tests are run in the tenant you've specified in
'OpenStack Settings' tab during OpenStack installation. By default that is
'admin' tenant. Perform the following actions under that tenant to prepare Heat
for testing of its autoscaling feature:
1. Download the `image of Linux Fedora with pre-installed cloud-init and heat-cfntools
packages <http://murano-files.mirantis.com/F17-x86_64-cfntools.qcow2>`_.
2. Then upload the image into OpenStack Image Service (Glance)
into 'admin' tenant and name it 'F17-x86_64-cfntools'.
Now Heat autoscaling is ready for testing. Note that this test creates a stack
with two instances of Linux Fedora and it may fail if Compute node doesn't
have enough resources.
|
.. _heat-test-prepare:
Preparing Heat for Testing
--------------------------
The platform tests are run in the tenant you've specified in
'OpenStack Settings' tab during OpenStack installation. By default that is
'admin' tenant. Perform the following actions under that tenant to prepare Heat
for testing of its autoscaling feature:
+ 1. Download the `image of Linux Fedora with pre-installed cloud-init and heat-cfntools
+ packages <http://murano-files.mirantis.com/F17-x86_64-cfntools.qcow2>`_.
- 1. Download the following image of Linux Fedora with pre-installed
- cloud-init and heat-cfntools packages:
-
- http://fedorapeople.org/groups/heat/prebuilt-jeos-images/F17-x86_64-cfntools.qcow2
2. Then upload the image into OpenStack Image Service (Glance)
into 'admin' tenant and name it 'F17-x86_64-cfntools'.
Now Heat autoscaling is ready for testing. Note that this test creates a stack
with two instances of Linux Fedora and it may fail if Compute node doesn't
have enough resources.
| 6 | 0.26087 | 2 | 4 |
4c6b151d45a6be4fa136442aeacc742c1cc5367f | .zsh/prompt.zsh | .zsh/prompt.zsh | parse_git_dirty() {
if [[ -n $(git status -s --ignore-submodules=dirty 2> /dev/null) ]]; then
echo "%{$fg[red]%}"
else
echo "%{$fg[green]%}"
fi
}
function git_prompt_info() {
# Ignore directorys that aren't in git
if ! git ls-files >& /dev/null; then
return;
fi;
# Ignore the home directory
if [ $(git rev-parse --quiet --show-toplevel) '==' $HOME ]; then
return;
fi;
# Get the branch name and color
ref=$(git symbolic-ref HEAD 2> /dev/null) || return
echo " $(parse_git_dirty)${ref#refs/heads/}%{$reset_color%}"
}
PROMPT='%{$fg[blue]%}%c%{$reset_color%}$ '
RPROMPT='$(git_prompt_info)'
| parse_git_dirty() {
if [[ -n $(git status -s --ignore-submodules=dirty 2> /dev/null) ]]; then
echo "%{$fg[red]%}"
else
echo "%{$fg[green]%}"
fi
}
function git_prompt_info() {
# Ignore directorys that aren't in git
if ! git ls-files >& /dev/null; then
return;
fi;
# Ignore the home directory
if [ $(git rev-parse --quiet --show-toplevel) '==' $HOME ]; then
return;
fi;
# Get the branch name and color
ref=$(git symbolic-ref HEAD 2> /dev/null) || return
echo " $(parse_git_dirty)${ref#refs/heads/}%{$reset_color%}"
}
PROMPT='%{$fg[blue]%}%c%{$reset_color%}$ '
RPROMPT='$(git_prompt_info)'
# Timing App
PROMPT_TITLE='echo -ne "\033]0;${USER}@${HOSTNAME%%.*}:${PWD/#$HOME/~}\007"'
export PROMPT_COMMAND="${PROMPT_COMMAND} ${PROMPT_TITLE}; "
| Add Timing App Terminal support | Add Timing App Terminal support
| Shell | mit | soffes/dotfiles,soffes/dotfiles | shell | ## Code Before:
parse_git_dirty() {
if [[ -n $(git status -s --ignore-submodules=dirty 2> /dev/null) ]]; then
echo "%{$fg[red]%}"
else
echo "%{$fg[green]%}"
fi
}
function git_prompt_info() {
# Ignore directorys that aren't in git
if ! git ls-files >& /dev/null; then
return;
fi;
# Ignore the home directory
if [ $(git rev-parse --quiet --show-toplevel) '==' $HOME ]; then
return;
fi;
# Get the branch name and color
ref=$(git symbolic-ref HEAD 2> /dev/null) || return
echo " $(parse_git_dirty)${ref#refs/heads/}%{$reset_color%}"
}
PROMPT='%{$fg[blue]%}%c%{$reset_color%}$ '
RPROMPT='$(git_prompt_info)'
## Instruction:
Add Timing App Terminal support
## Code After:
parse_git_dirty() {
if [[ -n $(git status -s --ignore-submodules=dirty 2> /dev/null) ]]; then
echo "%{$fg[red]%}"
else
echo "%{$fg[green]%}"
fi
}
function git_prompt_info() {
# Ignore directorys that aren't in git
if ! git ls-files >& /dev/null; then
return;
fi;
# Ignore the home directory
if [ $(git rev-parse --quiet --show-toplevel) '==' $HOME ]; then
return;
fi;
# Get the branch name and color
ref=$(git symbolic-ref HEAD 2> /dev/null) || return
echo " $(parse_git_dirty)${ref#refs/heads/}%{$reset_color%}"
}
PROMPT='%{$fg[blue]%}%c%{$reset_color%}$ '
RPROMPT='$(git_prompt_info)'
# Timing App
PROMPT_TITLE='echo -ne "\033]0;${USER}@${HOSTNAME%%.*}:${PWD/#$HOME/~}\007"'
export PROMPT_COMMAND="${PROMPT_COMMAND} ${PROMPT_TITLE}; "
| parse_git_dirty() {
if [[ -n $(git status -s --ignore-submodules=dirty 2> /dev/null) ]]; then
echo "%{$fg[red]%}"
else
echo "%{$fg[green]%}"
fi
}
function git_prompt_info() {
# Ignore directorys that aren't in git
if ! git ls-files >& /dev/null; then
return;
fi;
# Ignore the home directory
if [ $(git rev-parse --quiet --show-toplevel) '==' $HOME ]; then
return;
fi;
# Get the branch name and color
ref=$(git symbolic-ref HEAD 2> /dev/null) || return
echo " $(parse_git_dirty)${ref#refs/heads/}%{$reset_color%}"
}
PROMPT='%{$fg[blue]%}%c%{$reset_color%}$ '
RPROMPT='$(git_prompt_info)'
+
+ # Timing App
+ PROMPT_TITLE='echo -ne "\033]0;${USER}@${HOSTNAME%%.*}:${PWD/#$HOME/~}\007"'
+ export PROMPT_COMMAND="${PROMPT_COMMAND} ${PROMPT_TITLE}; " | 4 | 0.153846 | 4 | 0 |
87bbd05504697f8b743850de9880cf96a0ff5738 | spec/test_app/db/migrate/20170815215418_create_articles.rb | spec/test_app/db/migrate/20170815215418_create_articles.rb | class CreateArticles < ActiveRecord::Migration[5.1]
def change
create_table :articles do |t|
t.string :title
t.text :text
t.timestamps
end
end
end
| superclass = if Gem::Version.new(Rails.version) >= Gem::Version.new(5)
ActiveRecord::Migration[5.0]
else
ActiveRecord::Migration
end
class CreateArticles < superclass
def change
create_table :articles do |t|
t.string :title
t.text :text
t.timestamps
end
end
end
| Fix migration class to be compatible with Rails versions < 5 | Fix migration class to be compatible with Rails versions < 5
| Ruby | apache-2.0 | iaintshine/ruby-rails-tracer,iaintshine/ruby-rails-tracer,iaintshine/ruby-rails-tracer,iaintshine/ruby-rails-tracer | ruby | ## Code Before:
class CreateArticles < ActiveRecord::Migration[5.1]
def change
create_table :articles do |t|
t.string :title
t.text :text
t.timestamps
end
end
end
## Instruction:
Fix migration class to be compatible with Rails versions < 5
## Code After:
superclass = if Gem::Version.new(Rails.version) >= Gem::Version.new(5)
ActiveRecord::Migration[5.0]
else
ActiveRecord::Migration
end
class CreateArticles < superclass
def change
create_table :articles do |t|
t.string :title
t.text :text
t.timestamps
end
end
end
| - class CreateArticles < ActiveRecord::Migration[5.1]
+ superclass = if Gem::Version.new(Rails.version) >= Gem::Version.new(5)
+ ActiveRecord::Migration[5.0]
+ else
+ ActiveRecord::Migration
+ end
+
+ class CreateArticles < superclass
def change
create_table :articles do |t|
t.string :title
t.text :text
t.timestamps
end
end
end | 8 | 0.8 | 7 | 1 |
31619ed800459c0615ea78979369cd856b1e24f5 | test/controllers/store_controller_test.rb | test/controllers/store_controller_test.rb | require 'test_helper'
class StoreControllerTest < ActionController::TestCase
test "should get index" do
get :index
assert_response :success
end
end
| require 'test_helper'
class StoreControllerTest < ActionController::TestCase
test "should get index" do
get :index
assert_response :success
assert_select '#columns #side a', minimum: 4
assert_select '#main .entry', 3
assert_select 'h3', 'Programming Ruby 1.9'
assert_select '.price', /\$[,\d]+\.\d\d/
end
end
| Add functional test for store_controller | Add functional test for store_controller
| Ruby | mit | bdisney/eShop,bdisney/eShop,bdisney/eShop | ruby | ## Code Before:
require 'test_helper'
class StoreControllerTest < ActionController::TestCase
test "should get index" do
get :index
assert_response :success
end
end
## Instruction:
Add functional test for store_controller
## Code After:
require 'test_helper'
class StoreControllerTest < ActionController::TestCase
test "should get index" do
get :index
assert_response :success
assert_select '#columns #side a', minimum: 4
assert_select '#main .entry', 3
assert_select 'h3', 'Programming Ruby 1.9'
assert_select '.price', /\$[,\d]+\.\d\d/
end
end
| require 'test_helper'
class StoreControllerTest < ActionController::TestCase
test "should get index" do
get :index
assert_response :success
+ assert_select '#columns #side a', minimum: 4
+ assert_select '#main .entry', 3
+ assert_select 'h3', 'Programming Ruby 1.9'
+ assert_select '.price', /\$[,\d]+\.\d\d/
end
end | 4 | 0.444444 | 4 | 0 |
0fba3bb61de2d701499500c090566a2e11752fc4 | zsh/versioncontrol.zsh | zsh/versioncontrol.zsh | if [[ -x `which git` ]]; then
export GIT_PAGER="`which less`"
fi
# I am a lazy bum
if [[ -x `which git` ]]; then
alias g='git'
fi
| if [[ -x `which git` ]]; then
export GIT_PAGER=less
fi
# I am a lazy bum
if [[ -x `which git` ]]; then
alias g='git'
fi
| Fix bug 'less: aliased to /usr/bin/less' output on git diff | Fix bug 'less: aliased to /usr/bin/less' output on git diff
| Shell | mit | pygospa/dotfiles,pygospa/dotfiles,pygospa/dotfiles,pygospa/dotfiles | shell | ## Code Before:
if [[ -x `which git` ]]; then
export GIT_PAGER="`which less`"
fi
# I am a lazy bum
if [[ -x `which git` ]]; then
alias g='git'
fi
## Instruction:
Fix bug 'less: aliased to /usr/bin/less' output on git diff
## Code After:
if [[ -x `which git` ]]; then
export GIT_PAGER=less
fi
# I am a lazy bum
if [[ -x `which git` ]]; then
alias g='git'
fi
| if [[ -x `which git` ]]; then
- export GIT_PAGER="`which less`"
? -------- --
+ export GIT_PAGER=less
fi
# I am a lazy bum
if [[ -x `which git` ]]; then
alias g='git'
fi | 2 | 0.25 | 1 | 1 |
0205adcedb596312a68eb796e0ecd57e42b5bf75 | npc/json.go | npc/json.go | package npc
import (
"encoding/json"
)
func js(v interface{}) string {
a, _ := json.Marshal(v)
return string(a)
}
| package npc
import (
"encoding/json"
)
// Return JSON suitable for logging an API object.
func js(v interface{}) string {
// Get the raw JSON
a, _ := json.Marshal(v)
// Convert this back into a tree of key-value maps
var m map[string]interface{}
if err := json.Unmarshal(a, &m); err != nil {
// If that didn't work, just return the raw version
return string(a)
}
// Trim some bulk, and potentially sensitive areas
withMap(m["metadata"], func(status map[string]interface{}) {
delete(status, "ownerReferences")
})
withMap(m["spec"], func(spec map[string]interface{}) {
delete(spec, "tolerations")
delete(spec, "volumes")
rangeSlice(spec["containers"], func(container map[string]interface{}) {
delete(container, "args")
delete(container, "command")
delete(container, "env")
delete(container, "livenessProbe")
delete(container, "resources")
delete(container, "securityContext")
delete(container, "volumeMounts")
})
})
withMap(m["status"], func(status map[string]interface{}) {
delete(status, "containerStatuses")
})
// Now marshall what's left to JSON
a, _ = json.Marshal(m)
return string(a)
}
// Helper function: operate on a map node from a tree of key-value maps
func withMap(m interface{}, f func(map[string]interface{})) {
if v, ok := m.(map[string]interface{}); ok {
f(v)
}
}
// Helper function: operate on all nodes under i which is a slice in a
// tree of key-value maps
func rangeSlice(i interface{}, f func(map[string]interface{})) {
if s, ok := i.([]interface{}); ok {
for _, v := range s {
if m, ok := v.(map[string]interface{}); ok {
f(m)
}
}
}
}
| Trim the JSON logged for Kubernetes objects by weave-npc | Trim the JSON logged for Kubernetes objects by weave-npc
Remove things like "args" and "env" which bulk the logs, aren't
relevant to the function of weave-npc, and may include sensitive
information.
Use a black-list rather than a white-list because we can't be sure
what information will be necessary in future.
| Go | apache-2.0 | weaveworks/weave,weaveworks/weave,weaveworks/weave,weaveworks/weave | go | ## Code Before:
package npc
import (
"encoding/json"
)
func js(v interface{}) string {
a, _ := json.Marshal(v)
return string(a)
}
## Instruction:
Trim the JSON logged for Kubernetes objects by weave-npc
Remove things like "args" and "env" which bulk the logs, aren't
relevant to the function of weave-npc, and may include sensitive
information.
Use a black-list rather than a white-list because we can't be sure
what information will be necessary in future.
## Code After:
package npc
import (
"encoding/json"
)
// Return JSON suitable for logging an API object.
func js(v interface{}) string {
// Get the raw JSON
a, _ := json.Marshal(v)
// Convert this back into a tree of key-value maps
var m map[string]interface{}
if err := json.Unmarshal(a, &m); err != nil {
// If that didn't work, just return the raw version
return string(a)
}
// Trim some bulk, and potentially sensitive areas
withMap(m["metadata"], func(status map[string]interface{}) {
delete(status, "ownerReferences")
})
withMap(m["spec"], func(spec map[string]interface{}) {
delete(spec, "tolerations")
delete(spec, "volumes")
rangeSlice(spec["containers"], func(container map[string]interface{}) {
delete(container, "args")
delete(container, "command")
delete(container, "env")
delete(container, "livenessProbe")
delete(container, "resources")
delete(container, "securityContext")
delete(container, "volumeMounts")
})
})
withMap(m["status"], func(status map[string]interface{}) {
delete(status, "containerStatuses")
})
// Now marshall what's left to JSON
a, _ = json.Marshal(m)
return string(a)
}
// Helper function: operate on a map node from a tree of key-value maps
func withMap(m interface{}, f func(map[string]interface{})) {
if v, ok := m.(map[string]interface{}); ok {
f(v)
}
}
// Helper function: operate on all nodes under i which is a slice in a
// tree of key-value maps
func rangeSlice(i interface{}, f func(map[string]interface{})) {
if s, ok := i.([]interface{}); ok {
for _, v := range s {
if m, ok := v.(map[string]interface{}); ok {
f(m)
}
}
}
}
| package npc
import (
"encoding/json"
)
+ // Return JSON suitable for logging an API object.
func js(v interface{}) string {
+ // Get the raw JSON
a, _ := json.Marshal(v)
+ // Convert this back into a tree of key-value maps
+ var m map[string]interface{}
+ if err := json.Unmarshal(a, &m); err != nil {
+ // If that didn't work, just return the raw version
+ return string(a)
+ }
+ // Trim some bulk, and potentially sensitive areas
+ withMap(m["metadata"], func(status map[string]interface{}) {
+ delete(status, "ownerReferences")
+ })
+ withMap(m["spec"], func(spec map[string]interface{}) {
+ delete(spec, "tolerations")
+ delete(spec, "volumes")
+ rangeSlice(spec["containers"], func(container map[string]interface{}) {
+ delete(container, "args")
+ delete(container, "command")
+ delete(container, "env")
+ delete(container, "livenessProbe")
+ delete(container, "resources")
+ delete(container, "securityContext")
+ delete(container, "volumeMounts")
+ })
+ })
+ withMap(m["status"], func(status map[string]interface{}) {
+ delete(status, "containerStatuses")
+ })
+ // Now marshall what's left to JSON
+ a, _ = json.Marshal(m)
return string(a)
}
+
+ // Helper function: operate on a map node from a tree of key-value maps
+ func withMap(m interface{}, f func(map[string]interface{})) {
+ if v, ok := m.(map[string]interface{}); ok {
+ f(v)
+ }
+ }
+
+ // Helper function: operate on all nodes under i which is a slice in a
+ // tree of key-value maps
+ func rangeSlice(i interface{}, f func(map[string]interface{})) {
+ if s, ok := i.([]interface{}); ok {
+ for _, v := range s {
+ if m, ok := v.(map[string]interface{}); ok {
+ f(m)
+ }
+ }
+ }
+ } | 49 | 4.9 | 49 | 0 |
77b6b3423a2e4f095f69cf859d6845eea6f4cc49 | app/helpers/application_helper.rb | app/helpers/application_helper.rb | module ApplicationHelper
def formatted_tasks(tasks)
tasks.split(';').map(&:strip).map(&:capitalize)
end
def pluralize_without_count(count, noun)
count == 1 ? "#{noun}" : "#{noun.pluralize}"
end
end
| module ApplicationHelper
def formatted_tasks(tasks)
tasks.split(';').map(&:strip).map(&:capitalize)
end
def pluralize_without_count(count, noun)
count == 1 ? noun : noun.to_s.pluralize
end
end
| Remove string interpolation from ApplicationHelper | Remove string interpolation from ApplicationHelper | Ruby | apache-2.0 | lm-tools/work-you-could-do,lm-tools/work-you-could-do,lm-tools/work-you-could-do,lm-tools/work-you-could-do | ruby | ## Code Before:
module ApplicationHelper
def formatted_tasks(tasks)
tasks.split(';').map(&:strip).map(&:capitalize)
end
def pluralize_without_count(count, noun)
count == 1 ? "#{noun}" : "#{noun.pluralize}"
end
end
## Instruction:
Remove string interpolation from ApplicationHelper
## Code After:
module ApplicationHelper
def formatted_tasks(tasks)
tasks.split(';').map(&:strip).map(&:capitalize)
end
def pluralize_without_count(count, noun)
count == 1 ? noun : noun.to_s.pluralize
end
end
| module ApplicationHelper
def formatted_tasks(tasks)
tasks.split(';').map(&:strip).map(&:capitalize)
end
def pluralize_without_count(count, noun)
- count == 1 ? "#{noun}" : "#{noun.pluralize}"
? --- -- --- --
+ count == 1 ? noun : noun.to_s.pluralize
? +++++
end
end | 2 | 0.222222 | 1 | 1 |
a581d98fce6f891d0f78e1ab3dcff70b851d9d55 | server/router.js | server/router.js | const express = require('express');
const router = express.Router();
const authController = require('./authController');
const locationsController = require('./locationsController');
const locationTypeController = require('./locationTypeController');
const happyHoursController = require('./happyHoursController');
router.post('/v1/auth', authController.getAuth);
router.post(
'/v1/auth/test',
authController.checkAuth,
authController.testCheckAuth
);
router.post(
'/v1/locations',
authController.checkAuth,
locationsController.addLocation
);
router.get('/v1/locations', locationsController.getLocations);
router.delete(
'/v1/locations/:id',
authController.checkAuth,
locationsController.deleteLocation
);
router.get(
'/v1/locations/:id/happyhours',
happyHoursController.getHappyHoursByLocation
);
router.post(
'/v1/happyhours',
authController.checkAuth,
happyHoursController.addHappyHours
);
router.put(
'/v1/happyhours/:id',
authController.checkAuth,
happyHoursController.updateHappyHours
);
router.delete(
'/v1/happyhours/:id',
authController.checkAuth,
happyHoursController.deleteHappyHours
);
router.get('/v1/locationtypes', locationTypeController.getLocationTypes);
module.exports = router;
| const express = require('express');
const router = express.Router();
const authController = require('./authController');
const locationsController = require('./locationsController');
const locationTypeController = require('./locationTypeController');
const happyHoursController = require('./happyHoursController');
router.post('/v1/auth', authController.getAuth);
router.post(
'/v1/auth/test',
authController.checkAuth,
authController.testCheckAuth
);
router.post(
'/v1/locations',
authController.checkAuth,
locationsController.addLocation
);
router.get('/v1/locations', locationsController.getLocations);
router.get('/v1/locations/:id', locationsController.getLocationById);
router.delete(
'/v1/locations/:id',
authController.checkAuth,
locationsController.deleteLocation
);
router.get(
'/v1/locations/:id/happyhours',
happyHoursController.getHappyHoursByLocation
);
router.post(
'/v1/happyhours',
authController.checkAuth,
happyHoursController.addHappyHours
);
router.put(
'/v1/happyhours/:id',
authController.checkAuth,
happyHoursController.updateHappyHours
);
router.delete(
'/v1/happyhours/:id',
authController.checkAuth,
happyHoursController.deleteHappyHours
);
router.get('/v1/locationtypes', locationTypeController.getLocationTypes);
module.exports = router;
| Add GET location by ID route. | Add GET location by ID route.
| JavaScript | mit | the-oem/happy-hour-power,the-oem/happy-hour-power | javascript | ## Code Before:
const express = require('express');
const router = express.Router();
const authController = require('./authController');
const locationsController = require('./locationsController');
const locationTypeController = require('./locationTypeController');
const happyHoursController = require('./happyHoursController');
router.post('/v1/auth', authController.getAuth);
router.post(
'/v1/auth/test',
authController.checkAuth,
authController.testCheckAuth
);
router.post(
'/v1/locations',
authController.checkAuth,
locationsController.addLocation
);
router.get('/v1/locations', locationsController.getLocations);
router.delete(
'/v1/locations/:id',
authController.checkAuth,
locationsController.deleteLocation
);
router.get(
'/v1/locations/:id/happyhours',
happyHoursController.getHappyHoursByLocation
);
router.post(
'/v1/happyhours',
authController.checkAuth,
happyHoursController.addHappyHours
);
router.put(
'/v1/happyhours/:id',
authController.checkAuth,
happyHoursController.updateHappyHours
);
router.delete(
'/v1/happyhours/:id',
authController.checkAuth,
happyHoursController.deleteHappyHours
);
router.get('/v1/locationtypes', locationTypeController.getLocationTypes);
module.exports = router;
## Instruction:
Add GET location by ID route.
## Code After:
const express = require('express');
const router = express.Router();
const authController = require('./authController');
const locationsController = require('./locationsController');
const locationTypeController = require('./locationTypeController');
const happyHoursController = require('./happyHoursController');
router.post('/v1/auth', authController.getAuth);
router.post(
'/v1/auth/test',
authController.checkAuth,
authController.testCheckAuth
);
router.post(
'/v1/locations',
authController.checkAuth,
locationsController.addLocation
);
router.get('/v1/locations', locationsController.getLocations);
router.get('/v1/locations/:id', locationsController.getLocationById);
router.delete(
'/v1/locations/:id',
authController.checkAuth,
locationsController.deleteLocation
);
router.get(
'/v1/locations/:id/happyhours',
happyHoursController.getHappyHoursByLocation
);
router.post(
'/v1/happyhours',
authController.checkAuth,
happyHoursController.addHappyHours
);
router.put(
'/v1/happyhours/:id',
authController.checkAuth,
happyHoursController.updateHappyHours
);
router.delete(
'/v1/happyhours/:id',
authController.checkAuth,
happyHoursController.deleteHappyHours
);
router.get('/v1/locationtypes', locationTypeController.getLocationTypes);
module.exports = router;
| const express = require('express');
const router = express.Router();
const authController = require('./authController');
const locationsController = require('./locationsController');
const locationTypeController = require('./locationTypeController');
const happyHoursController = require('./happyHoursController');
router.post('/v1/auth', authController.getAuth);
router.post(
'/v1/auth/test',
authController.checkAuth,
authController.testCheckAuth
);
router.post(
'/v1/locations',
authController.checkAuth,
locationsController.addLocation
);
router.get('/v1/locations', locationsController.getLocations);
+ router.get('/v1/locations/:id', locationsController.getLocationById);
router.delete(
'/v1/locations/:id',
authController.checkAuth,
locationsController.deleteLocation
);
router.get(
'/v1/locations/:id/happyhours',
happyHoursController.getHappyHoursByLocation
);
router.post(
'/v1/happyhours',
authController.checkAuth,
happyHoursController.addHappyHours
);
router.put(
'/v1/happyhours/:id',
authController.checkAuth,
happyHoursController.updateHappyHours
);
router.delete(
'/v1/happyhours/:id',
authController.checkAuth,
happyHoursController.deleteHappyHours
);
router.get('/v1/locationtypes', locationTypeController.getLocationTypes);
module.exports = router; | 1 | 0.019608 | 1 | 0 |
87adce0d38a35398ee200484d02d2975a6b4126e | src/Allegro/C.hsc | src/Allegro/C.hsc | {-# LANGUAGE ForeignFunctionInterface #-}
module Allegro.C (al_init) where
import Foreign
import Foreign.C.Types
#include <allegro5/allegro.h>
al_init :: IO Bool
al_init = al_install_system #{const ALLEGRO_VERSION_INT} fp_atexit
foreign import ccall "al_install_system"
al_install_system :: CInt -> FunPtr (Ptr () -> IO CInt) -> IO Bool
foreign import ccall "&atexit"
fp_atexit :: FunPtr (Ptr () -> IO CInt)
| {-# LANGUAGE ForeignFunctionInterface #-}
module Allegro.C (al_init) where
import Foreign
import Foreign.C.Types
#include <allegro5/allegro.h>
al_init :: IO Bool
al_init = al_install_system #{const ALLEGRO_VERSION_INT} fp_atexit
foreign import ccall unsafe "al_install_system"
al_install_system :: CInt -> FunPtr (Ptr () -> IO CInt) -> IO Bool
foreign import ccall unsafe "&atexit"
fp_atexit :: FunPtr (Ptr () -> IO CInt)
| Declare imports as unsafe (they are not calling back) | Declare imports as unsafe (they are not calling back)
| Haskell | bsd-3-clause | yav/allegro | haskell | ## Code Before:
{-# LANGUAGE ForeignFunctionInterface #-}
module Allegro.C (al_init) where
import Foreign
import Foreign.C.Types
#include <allegro5/allegro.h>
al_init :: IO Bool
al_init = al_install_system #{const ALLEGRO_VERSION_INT} fp_atexit
foreign import ccall "al_install_system"
al_install_system :: CInt -> FunPtr (Ptr () -> IO CInt) -> IO Bool
foreign import ccall "&atexit"
fp_atexit :: FunPtr (Ptr () -> IO CInt)
## Instruction:
Declare imports as unsafe (they are not calling back)
## Code After:
{-# LANGUAGE ForeignFunctionInterface #-}
module Allegro.C (al_init) where
import Foreign
import Foreign.C.Types
#include <allegro5/allegro.h>
al_init :: IO Bool
al_init = al_install_system #{const ALLEGRO_VERSION_INT} fp_atexit
foreign import ccall unsafe "al_install_system"
al_install_system :: CInt -> FunPtr (Ptr () -> IO CInt) -> IO Bool
foreign import ccall unsafe "&atexit"
fp_atexit :: FunPtr (Ptr () -> IO CInt)
| {-# LANGUAGE ForeignFunctionInterface #-}
module Allegro.C (al_init) where
import Foreign
import Foreign.C.Types
#include <allegro5/allegro.h>
al_init :: IO Bool
al_init = al_install_system #{const ALLEGRO_VERSION_INT} fp_atexit
- foreign import ccall "al_install_system"
+ foreign import ccall unsafe "al_install_system"
? +++++++
al_install_system :: CInt -> FunPtr (Ptr () -> IO CInt) -> IO Bool
- foreign import ccall "&atexit"
+ foreign import ccall unsafe "&atexit"
? +++++++
fp_atexit :: FunPtr (Ptr () -> IO CInt)
| 4 | 0.190476 | 2 | 2 |
d3b372597697e14d0e4c7984255691b3d5235356 | Cargo.toml | Cargo.toml | [package]
name = "rustfm-scrobble"
version = "0.9.2"
authors = ["David Futcher <david@futcher.io>"]
description = "Last.fm Scrobble library for Rust"
repository = "https://github.com/bobbo/rustfm-scrobble"
keywords = ["lastfm", "last-fm", "scrobble", "api"]
license = "MIT"
readme = "README.md"
categories = ["api-bindings", "multimedia"]
edition = "2018"
[dependencies]
reqwest = "0.9.15"
rust-crypto = "0.2.36"
serde = "1.0.2"
serde_derive = "1.0.2"
serde_json = "1.0.2"
wrapped-vec = "0.2.1"
| [package]
name = "rustfm-scrobble"
version = "0.9.2"
authors = ["David Futcher <david@futcher.io>"]
description = "Last.fm Scrobble library for Rust"
repository = "https://github.com/bobbo/rustfm-scrobble"
keywords = ["lastfm", "last-fm", "scrobble", "api"]
license = "MIT"
readme = "README.md"
categories = ["api-bindings", "multimedia"]
edition = "2018"
[dependencies]
reqwest = "0.9.15"
rust-crypto = "0.2.36"
serde = { version = "1", features = ["derive"] }
serde_json = "1.0.2"
wrapped-vec = "0.2.1"
| Use serde_derive as a serde's feature | Use serde_derive as a serde's feature
| TOML | mit | bobbo/rustfm-scrobble | toml | ## Code Before:
[package]
name = "rustfm-scrobble"
version = "0.9.2"
authors = ["David Futcher <david@futcher.io>"]
description = "Last.fm Scrobble library for Rust"
repository = "https://github.com/bobbo/rustfm-scrobble"
keywords = ["lastfm", "last-fm", "scrobble", "api"]
license = "MIT"
readme = "README.md"
categories = ["api-bindings", "multimedia"]
edition = "2018"
[dependencies]
reqwest = "0.9.15"
rust-crypto = "0.2.36"
serde = "1.0.2"
serde_derive = "1.0.2"
serde_json = "1.0.2"
wrapped-vec = "0.2.1"
## Instruction:
Use serde_derive as a serde's feature
## Code After:
[package]
name = "rustfm-scrobble"
version = "0.9.2"
authors = ["David Futcher <david@futcher.io>"]
description = "Last.fm Scrobble library for Rust"
repository = "https://github.com/bobbo/rustfm-scrobble"
keywords = ["lastfm", "last-fm", "scrobble", "api"]
license = "MIT"
readme = "README.md"
categories = ["api-bindings", "multimedia"]
edition = "2018"
[dependencies]
reqwest = "0.9.15"
rust-crypto = "0.2.36"
serde = { version = "1", features = ["derive"] }
serde_json = "1.0.2"
wrapped-vec = "0.2.1"
| [package]
name = "rustfm-scrobble"
version = "0.9.2"
authors = ["David Futcher <david@futcher.io>"]
description = "Last.fm Scrobble library for Rust"
repository = "https://github.com/bobbo/rustfm-scrobble"
keywords = ["lastfm", "last-fm", "scrobble", "api"]
license = "MIT"
readme = "README.md"
categories = ["api-bindings", "multimedia"]
edition = "2018"
[dependencies]
reqwest = "0.9.15"
rust-crypto = "0.2.36"
+ serde = { version = "1", features = ["derive"] }
- serde = "1.0.2"
- serde_derive = "1.0.2"
serde_json = "1.0.2"
wrapped-vec = "0.2.1" | 3 | 0.157895 | 1 | 2 |
14f28da022fd7b60bbee02f23d956f73f2dda691 | app/validators/gov_uk_url_format_validator.rb | app/validators/gov_uk_url_format_validator.rb | class GovUkUrlFormatValidator < ActiveModel::EachValidator
EXTERNAL_HOST_ALLOW_LIST = %w[.gov.uk .judiciary.uk .nhs.uk .ukri.org .nationalhighways.co.uk].freeze
def validate_each(record, attribute, value)
unless self.class.matches_gov_uk?(value) || matches_allow_list?(value)
record.errors.add(attribute, message: failure_message)
end
end
def self.matches_gov_uk?(value)
%r{\A#{Whitehall.public_protocol}://#{Whitehall.public_host}/}.match?(value)
end
private
def failure_message
options[:message] || "must be in the form of #{Whitehall.public_protocol}://#{Whitehall.public_host}/example"
end
def matches_allow_list?(value)
uri = URI.parse(value)
uri.host&.end_with?(*EXTERNAL_HOST_ALLOW_LIST)
rescue URI::InvalidURIError
false
end
end
| class GovUkUrlFormatValidator < ActiveModel::EachValidator
EXTERNAL_HOST_ALLOW_LIST = %w[.gov.uk .judiciary.uk .nhs.uk .ukri.org .nationalhighways.co.uk .protectuk.police.uk].freeze
def validate_each(record, attribute, value)
unless self.class.matches_gov_uk?(value) || matches_allow_list?(value)
record.errors.add(attribute, message: failure_message)
end
end
def self.matches_gov_uk?(value)
%r{\A#{Whitehall.public_protocol}://#{Whitehall.public_host}/}.match?(value)
end
private
def failure_message
options[:message] || "must be in the form of #{Whitehall.public_protocol}://#{Whitehall.public_host}/example"
end
def matches_allow_list?(value)
uri = URI.parse(value)
uri.host&.end_with?(*EXTERNAL_HOST_ALLOW_LIST)
rescue URI::InvalidURIError
false
end
end
| Add .protectuk.police.uk to redirect allow list | Add .protectuk.police.uk to redirect allow list
This domain has been approved to host content off of GOV.UK. Content designers need to redirect existing content to this external domain. | Ruby | mit | alphagov/whitehall,alphagov/whitehall,alphagov/whitehall,alphagov/whitehall | ruby | ## Code Before:
class GovUkUrlFormatValidator < ActiveModel::EachValidator
EXTERNAL_HOST_ALLOW_LIST = %w[.gov.uk .judiciary.uk .nhs.uk .ukri.org .nationalhighways.co.uk].freeze
def validate_each(record, attribute, value)
unless self.class.matches_gov_uk?(value) || matches_allow_list?(value)
record.errors.add(attribute, message: failure_message)
end
end
def self.matches_gov_uk?(value)
%r{\A#{Whitehall.public_protocol}://#{Whitehall.public_host}/}.match?(value)
end
private
def failure_message
options[:message] || "must be in the form of #{Whitehall.public_protocol}://#{Whitehall.public_host}/example"
end
def matches_allow_list?(value)
uri = URI.parse(value)
uri.host&.end_with?(*EXTERNAL_HOST_ALLOW_LIST)
rescue URI::InvalidURIError
false
end
end
## Instruction:
Add .protectuk.police.uk to redirect allow list
This domain has been approved to host content off of GOV.UK. Content designers need to redirect existing content to this external domain.
## Code After:
class GovUkUrlFormatValidator < ActiveModel::EachValidator
EXTERNAL_HOST_ALLOW_LIST = %w[.gov.uk .judiciary.uk .nhs.uk .ukri.org .nationalhighways.co.uk .protectuk.police.uk].freeze
def validate_each(record, attribute, value)
unless self.class.matches_gov_uk?(value) || matches_allow_list?(value)
record.errors.add(attribute, message: failure_message)
end
end
def self.matches_gov_uk?(value)
%r{\A#{Whitehall.public_protocol}://#{Whitehall.public_host}/}.match?(value)
end
private
def failure_message
options[:message] || "must be in the form of #{Whitehall.public_protocol}://#{Whitehall.public_host}/example"
end
def matches_allow_list?(value)
uri = URI.parse(value)
uri.host&.end_with?(*EXTERNAL_HOST_ALLOW_LIST)
rescue URI::InvalidURIError
false
end
end
| class GovUkUrlFormatValidator < ActiveModel::EachValidator
- EXTERNAL_HOST_ALLOW_LIST = %w[.gov.uk .judiciary.uk .nhs.uk .ukri.org .nationalhighways.co.uk].freeze
+ EXTERNAL_HOST_ALLOW_LIST = %w[.gov.uk .judiciary.uk .nhs.uk .ukri.org .nationalhighways.co.uk .protectuk.police.uk].freeze
? +++++++++++++++++++++
def validate_each(record, attribute, value)
unless self.class.matches_gov_uk?(value) || matches_allow_list?(value)
record.errors.add(attribute, message: failure_message)
end
end
def self.matches_gov_uk?(value)
%r{\A#{Whitehall.public_protocol}://#{Whitehall.public_host}/}.match?(value)
end
private
def failure_message
options[:message] || "must be in the form of #{Whitehall.public_protocol}://#{Whitehall.public_host}/example"
end
def matches_allow_list?(value)
uri = URI.parse(value)
uri.host&.end_with?(*EXTERNAL_HOST_ALLOW_LIST)
rescue URI::InvalidURIError
false
end
end | 2 | 0.076923 | 1 | 1 |
809c60bbcec764ce8068515dac7d853d1d2771a6 | pgf+/include/gf/IOException.h | pgf+/include/gf/IOException.h | //
// IOException.h
// pgf+
//
// Created by Emil Djupfeldt on 2012-06-22.
// Copyright (c) 2012 Chalmers University of Technology. All rights reserved.
//
#ifndef pgf__IOException_h
#define pgf__IOException_h
#include <gf/Exception.h>
namespace gf {
class IOException : public Exception {
private:
public:
IOException();
IOException(const std::string& message);
virtual ~IOException();
};
}
#endif
| //
// IOException.h
// pgf+
//
// Created by Emil Djupfeldt on 2012-06-22.
// Copyright (c) 2012 Chalmers University of Technology. All rights reserved.
//
#ifndef pgf__IOException_h
#define pgf__IOException_h
#include <gf/Exception.h>
namespace gf {
class IOException : public Exception {
private:
public:
IOException();
IOException(const std::string& message);
IOException(int err);
virtual ~IOException();
};
}
#endif
| Add a constructor taking an c error number. | Add a constructor taking an c error number.
| C | bsd-2-clause | egladil/mscthesis,egladil/mscthesis,egladil/mscthesis | c | ## Code Before:
//
// IOException.h
// pgf+
//
// Created by Emil Djupfeldt on 2012-06-22.
// Copyright (c) 2012 Chalmers University of Technology. All rights reserved.
//
#ifndef pgf__IOException_h
#define pgf__IOException_h
#include <gf/Exception.h>
namespace gf {
class IOException : public Exception {
private:
public:
IOException();
IOException(const std::string& message);
virtual ~IOException();
};
}
#endif
## Instruction:
Add a constructor taking an c error number.
## Code After:
//
// IOException.h
// pgf+
//
// Created by Emil Djupfeldt on 2012-06-22.
// Copyright (c) 2012 Chalmers University of Technology. All rights reserved.
//
#ifndef pgf__IOException_h
#define pgf__IOException_h
#include <gf/Exception.h>
namespace gf {
class IOException : public Exception {
private:
public:
IOException();
IOException(const std::string& message);
IOException(int err);
virtual ~IOException();
};
}
#endif
| //
// IOException.h
// pgf+
//
// Created by Emil Djupfeldt on 2012-06-22.
// Copyright (c) 2012 Chalmers University of Technology. All rights reserved.
//
#ifndef pgf__IOException_h
#define pgf__IOException_h
#include <gf/Exception.h>
namespace gf {
class IOException : public Exception {
private:
public:
IOException();
IOException(const std::string& message);
+ IOException(int err);
virtual ~IOException();
};
}
#endif | 1 | 0.037037 | 1 | 0 |
c406ff21396cbc1054ee810a3ee250d75e77cf38 | bench/app-bench.sh | bench/app-bench.sh |
if [ $# -eq 0 ]
then
echo "$0 top-dir"
exit 1
fi
echo "=== app-bench $1 ==="
cd $1
echo "=== git clone ==="
# time -p git clone git@g.csail.mit.edu:fscq
time -p git clone ~/fscq
echo "=== compile xv6 ==="
cd fscq/xv6
time -p make
echo "=== compile lfs bench ==="
cd ../bench/LFStest
time -p make
echo "=== run lfs large ==="
./largefile -f 1 -i 1 $1
echo "=== cleanup ==="
#cd $1
#time -p rm -rf *
|
if [ $# -ne 2 ]
then
echo "$0 fscq-repo top-dir"
exit 1
fi
echo "=== app-bench $1 $2 ==="
cd $2
echo "=== git clone ==="
time -p git clone $1
echo "=== compile xv6 ==="
cd fscq/xv6
time -p make
echo "=== compile lfs bench ==="
cd ../bench/LFStest
time -p make
echo "=== run lfs large ==="
./largefile -f 1 -i 1 $2
echo "=== cleanup ==="
cd $2
time -p rm -rf fscq/*
| Update script to take fscq-repo as an argument. xv6 works now when specifying a pathname to the repo, instead of url. | Update script to take fscq-repo as an argument. xv6 works
now when specifying a pathname to the repo, instead of url.
| Shell | mit | mit-pdos/fscq-impl,mit-pdos/fscq-impl,mit-pdos/fscq-impl,mit-pdos/fscq-impl,mit-pdos/fscq-impl | shell | ## Code Before:
if [ $# -eq 0 ]
then
echo "$0 top-dir"
exit 1
fi
echo "=== app-bench $1 ==="
cd $1
echo "=== git clone ==="
# time -p git clone git@g.csail.mit.edu:fscq
time -p git clone ~/fscq
echo "=== compile xv6 ==="
cd fscq/xv6
time -p make
echo "=== compile lfs bench ==="
cd ../bench/LFStest
time -p make
echo "=== run lfs large ==="
./largefile -f 1 -i 1 $1
echo "=== cleanup ==="
#cd $1
#time -p rm -rf *
## Instruction:
Update script to take fscq-repo as an argument. xv6 works
now when specifying a pathname to the repo, instead of url.
## Code After:
if [ $# -ne 2 ]
then
echo "$0 fscq-repo top-dir"
exit 1
fi
echo "=== app-bench $1 $2 ==="
cd $2
echo "=== git clone ==="
time -p git clone $1
echo "=== compile xv6 ==="
cd fscq/xv6
time -p make
echo "=== compile lfs bench ==="
cd ../bench/LFStest
time -p make
echo "=== run lfs large ==="
./largefile -f 1 -i 1 $2
echo "=== cleanup ==="
cd $2
time -p rm -rf fscq/*
|
- if [ $# -eq 0 ]
? - ^
+ if [ $# -ne 2 ]
? + ^
then
- echo "$0 top-dir"
+ echo "$0 fscq-repo top-dir"
? ++++++++++
exit 1
fi
- echo "=== app-bench $1 ==="
+ echo "=== app-bench $1 $2 ==="
? +++
- cd $1
? ^
+ cd $2
? ^
echo "=== git clone ==="
- # time -p git clone git@g.csail.mit.edu:fscq
- time -p git clone ~/fscq
? ^^^^^^
+ time -p git clone $1
? ^^
echo "=== compile xv6 ==="
cd fscq/xv6
time -p make
echo "=== compile lfs bench ==="
cd ../bench/LFStest
time -p make
echo "=== run lfs large ==="
- ./largefile -f 1 -i 1 $1
? ^
+ ./largefile -f 1 -i 1 $2
? ^
echo "=== cleanup ==="
- #cd $1
+ cd $2
- #time -p rm -rf *
? -
+ time -p rm -rf fscq/*
? +++++
| 17 | 0.586207 | 8 | 9 |
0f7a9eb0a899d5a0b23cda18d6b1d352d9d89e04 | common_config.rb | common_config.rb | require 'bootstrap-sass'
require 'font-awesome-sass'
fonts_dir = "viaduct/static/font"
http_fonts_path = "/static/font"
http_fonts_dir = "/static/font"
environment = :development
css_dir = "viaduct/static/css"
http_css_path = "/static/css"
images_dir = "viaduct/static/img"
http_images_path = "/static/img"
javascript_dir = "viaduct/static/js"
http_javascript_path = "/static/js"
| gem 'bootstrap-sass', '=3.2.0.2'
gem 'font-awesome-sass', '=4.2.0'
require 'bootstrap-sass'
require 'font-awesome-sass'
fonts_dir = "viaduct/static/font"
http_fonts_path = "/static/font"
http_fonts_dir = "/static/font"
environment = :development
css_dir = "viaduct/static/css"
http_css_path = "/static/css"
images_dir = "viaduct/static/img"
http_images_path = "/static/img"
javascript_dir = "viaduct/static/js"
http_javascript_path = "/static/js"
| Fix compass dependencies when more gems are installed | Fix compass dependencies when more gems are installed
| Ruby | mit | viaict/viaduct,viaict/viaduct,viaict/viaduct,viaict/viaduct,viaict/viaduct | ruby | ## Code Before:
require 'bootstrap-sass'
require 'font-awesome-sass'
fonts_dir = "viaduct/static/font"
http_fonts_path = "/static/font"
http_fonts_dir = "/static/font"
environment = :development
css_dir = "viaduct/static/css"
http_css_path = "/static/css"
images_dir = "viaduct/static/img"
http_images_path = "/static/img"
javascript_dir = "viaduct/static/js"
http_javascript_path = "/static/js"
## Instruction:
Fix compass dependencies when more gems are installed
## Code After:
gem 'bootstrap-sass', '=3.2.0.2'
gem 'font-awesome-sass', '=4.2.0'
require 'bootstrap-sass'
require 'font-awesome-sass'
fonts_dir = "viaduct/static/font"
http_fonts_path = "/static/font"
http_fonts_dir = "/static/font"
environment = :development
css_dir = "viaduct/static/css"
http_css_path = "/static/css"
images_dir = "viaduct/static/img"
http_images_path = "/static/img"
javascript_dir = "viaduct/static/js"
http_javascript_path = "/static/js"
| + gem 'bootstrap-sass', '=3.2.0.2'
+ gem 'font-awesome-sass', '=4.2.0'
require 'bootstrap-sass'
require 'font-awesome-sass'
fonts_dir = "viaduct/static/font"
http_fonts_path = "/static/font"
http_fonts_dir = "/static/font"
environment = :development
css_dir = "viaduct/static/css"
http_css_path = "/static/css"
images_dir = "viaduct/static/img"
http_images_path = "/static/img"
javascript_dir = "viaduct/static/js"
http_javascript_path = "/static/js" | 2 | 0.153846 | 2 | 0 |
c7e55bfd8284c4bb6755abc51dd7c940bca9d81a | sensor_consumers/dust_node.py | sensor_consumers/dust_node.py |
from local_settings import *
from utils import SensorConsumerBase
import redis
import datetime
import sys
class DustNode(SensorConsumerBase):
def __init__(self):
SensorConsumerBase.__init__(self, "indoor_air_quality")
def run(self):
self.subscribe("dust-node-pubsub", self.pubsub_callback)
def pubsub_callback(self, data):
if "action" in data:
return
influx_data = {
"measurement": "dustnode",
"timestamp": data["utctimestamp"].isoformat() + "Z",
"fields": {
"room_humidity": data["data"]["room_humidity"],
"room_temperature": round(data["data"]["room_temperature"], 1),
"barometer_temperature": round(data["data"]["barometer_temperature"], 1),
"barometer_pressure": round(data["data"]["barometer_reading"], 1),
"dust_density": round(data["data"]["dust_density"], 5)
}
}
self.insert_into_influx([influx_data])
def main():
item = DustNode()
item.run()
return 0
if __name__ == '__main__':
sys.exit(main())
|
from local_settings import *
from utils import SensorConsumerBase
import redis
import datetime
import sys
class DustNode(SensorConsumerBase):
def __init__(self):
SensorConsumerBase.__init__(self, "indoor_air_quality")
def run(self):
self.subscribe("dust-node-pubsub", self.pubsub_callback)
def pubsub_callback(self, data):
if "action" in data:
return
influx_data = {
"measurement": "dustnode",
"timestamp": data["utctimestamp"].isoformat() + "Z",
"fields": {
"room_humidity": data["data"]["room_humidity"],
"room_temperature": round(data["data"]["room_temperature"], 1),
"barometer_temperature": round(data["data"]["barometer_temperature"], 1),
"barometer_pressure": round(data["data"]["barometer_reading"], 1),
"dust_density": round(data["data"]["dust_density"], 5),
"sound_level": data["data"]["sound_level"],
}
}
self.insert_into_influx([influx_data])
def main():
item = DustNode()
item.run()
return 0
if __name__ == '__main__':
sys.exit(main())
| Add sound level to influx | Add sound level to influx
| Python | bsd-3-clause | ojarva/home-info-display,ojarva/home-info-display,ojarva/home-info-display,ojarva/home-info-display | python | ## Code Before:
from local_settings import *
from utils import SensorConsumerBase
import redis
import datetime
import sys
class DustNode(SensorConsumerBase):
def __init__(self):
SensorConsumerBase.__init__(self, "indoor_air_quality")
def run(self):
self.subscribe("dust-node-pubsub", self.pubsub_callback)
def pubsub_callback(self, data):
if "action" in data:
return
influx_data = {
"measurement": "dustnode",
"timestamp": data["utctimestamp"].isoformat() + "Z",
"fields": {
"room_humidity": data["data"]["room_humidity"],
"room_temperature": round(data["data"]["room_temperature"], 1),
"barometer_temperature": round(data["data"]["barometer_temperature"], 1),
"barometer_pressure": round(data["data"]["barometer_reading"], 1),
"dust_density": round(data["data"]["dust_density"], 5)
}
}
self.insert_into_influx([influx_data])
def main():
item = DustNode()
item.run()
return 0
if __name__ == '__main__':
sys.exit(main())
## Instruction:
Add sound level to influx
## Code After:
from local_settings import *
from utils import SensorConsumerBase
import redis
import datetime
import sys
class DustNode(SensorConsumerBase):
def __init__(self):
SensorConsumerBase.__init__(self, "indoor_air_quality")
def run(self):
self.subscribe("dust-node-pubsub", self.pubsub_callback)
def pubsub_callback(self, data):
if "action" in data:
return
influx_data = {
"measurement": "dustnode",
"timestamp": data["utctimestamp"].isoformat() + "Z",
"fields": {
"room_humidity": data["data"]["room_humidity"],
"room_temperature": round(data["data"]["room_temperature"], 1),
"barometer_temperature": round(data["data"]["barometer_temperature"], 1),
"barometer_pressure": round(data["data"]["barometer_reading"], 1),
"dust_density": round(data["data"]["dust_density"], 5),
"sound_level": data["data"]["sound_level"],
}
}
self.insert_into_influx([influx_data])
def main():
item = DustNode()
item.run()
return 0
if __name__ == '__main__':
sys.exit(main())
|
from local_settings import *
from utils import SensorConsumerBase
import redis
import datetime
import sys
class DustNode(SensorConsumerBase):
def __init__(self):
SensorConsumerBase.__init__(self, "indoor_air_quality")
def run(self):
self.subscribe("dust-node-pubsub", self.pubsub_callback)
def pubsub_callback(self, data):
if "action" in data:
return
influx_data = {
"measurement": "dustnode",
"timestamp": data["utctimestamp"].isoformat() + "Z",
"fields": {
"room_humidity": data["data"]["room_humidity"],
"room_temperature": round(data["data"]["room_temperature"], 1),
"barometer_temperature": round(data["data"]["barometer_temperature"], 1),
"barometer_pressure": round(data["data"]["barometer_reading"], 1),
- "dust_density": round(data["data"]["dust_density"], 5)
+ "dust_density": round(data["data"]["dust_density"], 5),
? +
+ "sound_level": data["data"]["sound_level"],
}
}
self.insert_into_influx([influx_data])
def main():
item = DustNode()
item.run()
return 0
if __name__ == '__main__':
sys.exit(main()) | 3 | 0.076923 | 2 | 1 |
8a50dae571934c50f291555944868ddf8a9e5b84 | .vscode/settings.json | .vscode/settings.json | {
"files.exclude": {
"node_modules": true
}
} | {
"files.exclude": {
"package-lock.json": true,
"node_modules": true
}
} | Exclude NPM generated file from Visual Studio Code | Exclude NPM generated file from Visual Studio Code
| JSON | unlicense | matco/peerchat,matco/peerchat | json | ## Code Before:
{
"files.exclude": {
"node_modules": true
}
}
## Instruction:
Exclude NPM generated file from Visual Studio Code
## Code After:
{
"files.exclude": {
"package-lock.json": true,
"node_modules": true
}
} | {
"files.exclude": {
+ "package-lock.json": true,
"node_modules": true
}
} | 1 | 0.2 | 1 | 0 |
7fa67d4a21e876c0d00667eae90a5c727efe4622 | src/app/installed-packages-config-panel.coffee | src/app/installed-packages-config-panel.coffee | ConfigPanel = require 'config-panel'
PackageConfigView = require 'package-config-view'
### Internal ###
module.exports =
class InstalledPackagesConfigPanel extends ConfigPanel
@content: ->
@div class: 'installed-packages'
initialize: ->
for pack in atom.getLoadedPackages()
@append(new PackageConfigView(pack.metadata))
| _ = require 'underscore'
ConfigPanel = require 'config-panel'
PackageConfigView = require 'package-config-view'
### Internal ###
module.exports =
class InstalledPackagesConfigPanel extends ConfigPanel
@content: ->
@div class: 'installed-packages'
initialize: ->
for pack in _.sortBy(atom.getLoadedPackages(), 'name')
@append(new PackageConfigView(pack.metadata))
| Sort installed packages by name | Sort installed packages by name
| CoffeeScript | mit | devoncarew/atom,mdumrauf/atom,stinsonga/atom,g2p/atom,ali/atom,bcoe/atom,constanzaurzua/atom,rookie125/atom,codex8/atom,MjAbuz/atom,devoncarew/atom,Jandersoft/atom,codex8/atom,KENJU/atom,SlimeQ/atom,batjko/atom,dsandstrom/atom,darwin/atom,n-riesco/atom,woss/atom,me-benni/atom,fedorov/atom,ykeisuke/atom,john-kelly/atom,githubteacher/atom,florianb/atom,acontreras89/atom,russlescai/atom,avdg/atom,fscherwi/atom,folpindo/atom,xream/atom,hellendag/atom,targeter21/atom,Arcanemagus/atom,panuchart/atom,hpham04/atom,amine7536/atom,mostafaeweda/atom,Mokolea/atom,brumm/atom,anuwat121/atom,elkingtonmcb/atom,YunchengLiao/atom,gontadu/atom,isghe/atom,yalexx/atom,splodingsocks/atom,AlexxNica/atom,fredericksilva/atom,tony612/atom,Mokolea/atom,AlisaKiatkongkumthon/atom,darwin/atom,Abdillah/atom,pkdevbox/atom,jtrose2/atom,001szymon/atom,lisonma/atom,h0dgep0dge/atom,mostafaeweda/atom,Ingramz/atom,decaffeinate-examples/atom,me6iaton/atom,fedorov/atom,dsandstrom/atom,Andrey-Pavlov/atom,Jandersolutions/atom,Sangaroonaom/atom,tjkr/atom,basarat/atom,kittens/atom,jlord/atom,isghe/atom,yomybaby/atom,erikhakansson/atom,G-Baby/atom,rookie125/atom,panuchart/atom,lisonma/atom,ObviouslyGreen/atom,stinsonga/atom,AlbertoBarrago/atom,Rychard/atom,mnquintana/atom,bolinfest/atom,constanzaurzua/atom,crazyquark/atom,Hasimir/atom,boomwaiza/atom,gontadu/atom,Klozz/atom,kdheepak89/atom,phord/atom,bryonwinger/atom,woss/atom,GHackAnonymous/atom,GHackAnonymous/atom,basarat/atom,bradgearon/atom,me6iaton/atom,burodepeper/atom,helber/atom,ardeshirj/atom,andrewleverette/atom,vjeux/atom,Hasimir/atom,ilovezy/atom,me6iaton/atom,ardeshirj/atom,sxgao3001/atom,johnrizzo1/atom,transcranial/atom,Shekharrajak/atom,toqz/atom,hpham04/atom,jacekkopecky/atom,g2p/atom,medovob/atom,woss/atom,vinodpanicker/atom,mdumrauf/atom,t9md/atom,FoldingText/atom,woss/atom,abcP9110/atom,execjosh/atom,jjz/atom,avdg/atom,tony612/atom,einarmagnus/atom,Ju2ender/atom,tmunro/atom,isghe/atom,scippio/atom,ironbox360/atom,rmartin/atom,prembasumatary/atom,Rodjana/atom,AlisaKiatkongkumthon/atom,tisu2tisu/atom,mostafaeweda/atom,PKRoma/atom,acontreras89/atom,beni55/atom,omarhuanca/atom,rsvip/aTom,Jdesk/atom,ali/atom,vjeux/atom,efatsi/atom,Ju2ender/atom,hharchani/atom,stuartquin/atom,cyzn/atom,GHackAnonymous/atom,Andrey-Pavlov/atom,ppamorim/atom,jordanbtucker/atom,fang-yufeng/atom,codex8/atom,chfritz/atom,bradgearon/atom,fscherwi/atom,ardeshirj/atom,Huaraz2/atom,isghe/atom,fang-yufeng/atom,jeremyramin/atom,constanzaurzua/atom,yamhon/atom,lisonma/atom,RobinTec/atom,YunchengLiao/atom,DiogoXRP/atom,bj7/atom,ezeoleaf/atom,brumm/atom,hharchani/atom,rxkit/atom,ReddTea/atom,jacekkopecky/atom,brumm/atom,efatsi/atom,alexandergmann/atom,basarat/atom,mertkahyaoglu/atom,oggy/atom,wiggzz/atom,ezeoleaf/atom,bcoe/atom,ilovezy/atom,prembasumatary/atom,rjattrill/atom,n-riesco/atom,codex8/atom,Jonekee/atom,bsmr-x-script/atom,cyzn/atom,bencolon/atom,mrodalgaard/atom,RobinTec/atom,hakatashi/atom,niklabh/atom,abe33/atom,hagb4rd/atom,ezeoleaf/atom,dijs/atom,palita01/atom,bcoe/atom,bolinfest/atom,ralphtheninja/atom,devoncarew/atom,SlimeQ/atom,lisonma/atom,Galactix/atom,Huaraz2/atom,sillvan/atom,fang-yufeng/atom,devmario/atom,rmartin/atom,Austen-G/BlockBuilder,folpindo/atom,toqz/atom,codex8/atom,liuxiong332/atom,G-Baby/atom,Neron-X5/atom,Andrey-Pavlov/atom,me-benni/atom,tjkr/atom,kaicataldo/atom,Ju2ender/atom,yalexx/atom,KENJU/atom,targeter21/atom,Abdillah/atom,jlord/atom,FoldingText/atom,devoncarew/atom,ezeoleaf/atom,AlisaKiatkongkumthon/atom,Neron-X5/atom,Galactix/atom,fredericksilva/atom,NunoEdgarGub1/atom,kittens/atom,YunchengLiao/atom,nvoron23/atom,charleswhchan/atom,FIT-CSE2410-A-Bombs/atom,svanharmelen/atom,ykeisuke/atom,sebmck/atom,abcP9110/atom,nvoron23/atom,anuwat121/atom,mertkahyaoglu/atom,niklabh/atom,rjattrill/atom,Huaraz2/atom,constanzaurzua/atom,jjz/atom,amine7536/atom,oggy/atom,ashneo76/atom,bryonwinger/atom,beni55/atom,woss/atom,johnrizzo1/atom,crazyquark/atom,Jdesk/atom,deepfox/atom,russlescai/atom,kandros/atom,lpommers/atom,ashneo76/atom,yangchenghu/atom,scippio/atom,MjAbuz/atom,Hasimir/atom,Neron-X5/atom,Jdesk/atom,ppamorim/atom,t9md/atom,bencolon/atom,scv119/atom,darwin/atom,liuxiong332/atom,BogusCurry/atom,deepfox/atom,liuxiong332/atom,rsvip/aTom,Ju2ender/atom,kjav/atom,Mokolea/atom,oggy/atom,matthewclendening/atom,yomybaby/atom,KENJU/atom,kittens/atom,chfritz/atom,medovob/atom,pombredanne/atom,alexandergmann/atom,Shekharrajak/atom,h0dgep0dge/atom,jacekkopecky/atom,AdrianVovk/substance-ide,kjav/atom,gisenberg/atom,Ingramz/atom,ReddTea/atom,einarmagnus/atom,kaicataldo/atom,davideg/atom,hharchani/atom,helber/atom,jordanbtucker/atom,AlbertoBarrago/atom,palita01/atom,avdg/atom,jlord/atom,nucked/atom,harshdattani/atom,n-riesco/atom,abcP9110/atom,targeter21/atom,Arcanemagus/atom,qiujuer/atom,tjkr/atom,basarat/atom,scv119/atom,stuartquin/atom,Austen-G/BlockBuilder,Shekharrajak/atom,rxkit/atom,ali/atom,0x73/atom,kjav/atom,kdheepak89/atom,Sangaroonaom/atom,Klozz/atom,qiujuer/atom,deepfox/atom,seedtigo/atom,me6iaton/atom,yamhon/atom,NunoEdgarGub1/atom,dannyflax/atom,yomybaby/atom,SlimeQ/atom,tanin47/atom,deoxilix/atom,tmunro/atom,sebmck/atom,alfredxing/atom,kdheepak89/atom,devmario/atom,Galactix/atom,qskycolor/atom,ppamorim/atom,lovesnow/atom,ali/atom,anuwat121/atom,champagnez/atom,jjz/atom,Jonekee/atom,rmartin/atom,liuderchi/atom,vhutheesing/atom,chengky/atom,seedtigo/atom,ali/atom,kc8wxm/atom,hharchani/atom,gisenberg/atom,fredericksilva/atom,mertkahyaoglu/atom,CraZySacX/atom,florianb/atom,hellendag/atom,AdrianVovk/substance-ide,ilovezy/atom,jeremyramin/atom,gabrielPeart/atom,chengky/atom,kdheepak89/atom,NunoEdgarGub1/atom,sebmck/atom,KENJU/atom,Jandersoft/atom,sillvan/atom,pkdevbox/atom,dkfiresky/atom,liuderchi/atom,originye/atom,FoldingText/atom,FoldingText/atom,FIT-CSE2410-A-Bombs/atom,vcarrera/atom,mostafaeweda/atom,synaptek/atom,hpham04/atom,sxgao3001/atom,bj7/atom,001szymon/atom,batjko/atom,jtrose2/atom,ykeisuke/atom,tony612/atom,johnhaley81/atom,batjko/atom,NunoEdgarGub1/atom,splodingsocks/atom,kevinrenaers/atom,liuxiong332/atom,Locke23rus/atom,MjAbuz/atom,BogusCurry/atom,boomwaiza/atom,Rychard/atom,AlbertoBarrago/atom,crazyquark/atom,qiujuer/atom,johnhaley81/atom,dannyflax/atom,matthewclendening/atom,crazyquark/atom,chengky/atom,splodingsocks/atom,rsvip/aTom,targeter21/atom,harshdattani/atom,Shekharrajak/atom,Jandersolutions/atom,Ju2ender/atom,brettle/atom,phord/atom,russlescai/atom,yalexx/atom,vinodpanicker/atom,gabrielPeart/atom,ironbox360/atom,charleswhchan/atom,einarmagnus/atom,vhutheesing/atom,elkingtonmcb/atom,Klozz/atom,RobinTec/atom,execjosh/atom,vjeux/atom,ReddTea/atom,YunchengLiao/atom,tisu2tisu/atom,devmario/atom,AlexxNica/atom,chengky/atom,cyzn/atom,gzzhanghao/atom,hellendag/atom,gzzhanghao/atom,PKRoma/atom,tony612/atom,amine7536/atom,russlescai/atom,burodepeper/atom,nrodriguez13/atom,john-kelly/atom,tmunro/atom,matthewclendening/atom,tisu2tisu/atom,yomybaby/atom,sekcheong/atom,dkfiresky/atom,001szymon/atom,pombredanne/atom,toqz/atom,dsandstrom/atom,johnhaley81/atom,andrewleverette/atom,rmartin/atom,matthewclendening/atom,pombredanne/atom,nrodriguez13/atom,0x73/atom,einarmagnus/atom,jtrose2/atom,hagb4rd/atom,florianb/atom,mnquintana/atom,synaptek/atom,RuiDGoncalves/atom,Austen-G/BlockBuilder,mrodalgaard/atom,bcoe/atom,stinsonga/atom,atom/atom,daxlab/atom,githubteacher/atom,tony612/atom,qskycolor/atom,liuderchi/atom,jeremyramin/atom,rlugojr/atom,G-Baby/atom,hagb4rd/atom,MjAbuz/atom,chengky/atom,paulcbetts/atom,fang-yufeng/atom,folpindo/atom,nrodriguez13/atom,davideg/atom,ReddTea/atom,xream/atom,Jandersoft/atom,lovesnow/atom,acontreras89/atom,Galactix/atom,pkdevbox/atom,yamhon/atom,jacekkopecky/atom,hakatashi/atom,Austen-G/BlockBuilder,n-riesco/atom,kevinrenaers/atom,jordanbtucker/atom,vcarrera/atom,Rodjana/atom,yangchenghu/atom,Rodjana/atom,andrewleverette/atom,liuxiong332/atom,AlexxNica/atom,nucked/atom,rmartin/atom,RuiDGoncalves/atom,Jdesk/atom,davideg/atom,florianb/atom,yangchenghu/atom,basarat/atom,matthewclendening/atom,gisenberg/atom,oggy/atom,erikhakansson/atom,deepfox/atom,Abdillah/atom,sillvan/atom,jacekkopecky/atom,john-kelly/atom,oggy/atom,mdumrauf/atom,charleswhchan/atom,bryonwinger/atom,john-kelly/atom,Dennis1978/atom,SlimeQ/atom,mostafaeweda/atom,Abdillah/atom,0x73/atom,Neron-X5/atom,omarhuanca/atom,kevinrenaers/atom,phord/atom,sebmck/atom,fscherwi/atom,GHackAnonymous/atom,batjko/atom,davideg/atom,mnquintana/atom,dsandstrom/atom,prembasumatary/atom,kc8wxm/atom,gabrielPeart/atom,Neron-X5/atom,liuderchi/atom,decaffeinate-examples/atom,davideg/atom,NunoEdgarGub1/atom,Dennis1978/atom,fredericksilva/atom,RuiDGoncalves/atom,Jandersoft/atom,devmario/atom,DiogoXRP/atom,acontreras89/atom,fedorov/atom,kandros/atom,xream/atom,Locke23rus/atom,acontreras89/atom,alfredxing/atom,constanzaurzua/atom,sebmck/atom,devmario/atom,prembasumatary/atom,synaptek/atom,harshdattani/atom,sekcheong/atom,Jandersolutions/atom,brettle/atom,medovob/atom,abe33/atom,omarhuanca/atom,Jandersolutions/atom,Austen-G/BlockBuilder,burodepeper/atom,alfredxing/atom,paulcbetts/atom,MjAbuz/atom,scippio/atom,sekcheong/atom,dijs/atom,kittens/atom,ilovezy/atom,efatsi/atom,crazyquark/atom,dkfiresky/atom,RobinTec/atom,Ingramz/atom,h0dgep0dge/atom,batjko/atom,gzzhanghao/atom,mnquintana/atom,CraZySacX/atom,decaffeinate-examples/atom,vinodpanicker/atom,alexandergmann/atom,Abdillah/atom,qskycolor/atom,amine7536/atom,Rychard/atom,BogusCurry/atom,ivoadf/atom,pengshp/atom,dsandstrom/atom,hpham04/atom,jtrose2/atom,dannyflax/atom,dannyflax/atom,isghe/atom,beni55/atom,targeter21/atom,niklabh/atom,qiujuer/atom,originye/atom,toqz/atom,nvoron23/atom,hagb4rd/atom,rsvip/aTom,rjattrill/atom,dkfiresky/atom,me-benni/atom,bsmr-x-script/atom,wiggzz/atom,ashneo76/atom,jlord/atom,nvoron23/atom,sxgao3001/atom,kdheepak89/atom,rjattrill/atom,CraZySacX/atom,SlimeQ/atom,Hasimir/atom,qskycolor/atom,Galactix/atom,lpommers/atom,0x73/atom,palita01/atom,sxgao3001/atom,nvoron23/atom,tanin47/atom,mnquintana/atom,KENJU/atom,pengshp/atom,jtrose2/atom,RobinTec/atom,tanin47/atom,ivoadf/atom,vhutheesing/atom,h0dgep0dge/atom,svanharmelen/atom,execjosh/atom,john-kelly/atom,sillvan/atom,kaicataldo/atom,basarat/atom,pombredanne/atom,bradgearon/atom,vinodpanicker/atom,kc8wxm/atom,bcoe/atom,vcarrera/atom,mrodalgaard/atom,johnrizzo1/atom,lisonma/atom,charleswhchan/atom,gisenberg/atom,elkingtonmcb/atom,AdrianVovk/substance-ide,champagnez/atom,atom/atom,ironbox360/atom,jjz/atom,hakatashi/atom,russlescai/atom,jacekkopecky/atom,bj7/atom,Hasimir/atom,deepfox/atom,vjeux/atom,rxkit/atom,yomybaby/atom,FoldingText/atom,florianb/atom,vcarrera/atom,charleswhchan/atom,jlord/atom,dijs/atom,g2p/atom,pombredanne/atom,decaffeinate-examples/atom,pengshp/atom,svanharmelen/atom,seedtigo/atom,panuchart/atom,ppamorim/atom,hakatashi/atom,Andrey-Pavlov/atom,vinodpanicker/atom,Jandersolutions/atom,transcranial/atom,stuartquin/atom,Austen-G/BlockBuilder,ralphtheninja/atom,scv119/atom,sekcheong/atom,PKRoma/atom,brettle/atom,ilovezy/atom,Arcanemagus/atom,einarmagnus/atom,transcranial/atom,mertkahyaoglu/atom,helber/atom,daxlab/atom,Shekharrajak/atom,jjz/atom,originye/atom,boomwaiza/atom,daxlab/atom,abe33/atom,Andrey-Pavlov/atom,fedorov/atom,bencolon/atom,ObviouslyGreen/atom,yalexx/atom,champagnez/atom,dkfiresky/atom,erikhakansson/atom,synaptek/atom,FIT-CSE2410-A-Bombs/atom,DiogoXRP/atom,rlugojr/atom,Jdesk/atom,kc8wxm/atom,hpham04/atom,sekcheong/atom,qiujuer/atom,n-riesco/atom,kittens/atom,deoxilix/atom,rlugojr/atom,kjav/atom,vcarrera/atom,ObviouslyGreen/atom,omarhuanca/atom,yalexx/atom,me6iaton/atom,dannyflax/atom,ppamorim/atom,fredericksilva/atom,sotayamashita/atom,githubteacher/atom,hharchani/atom,Jonekee/atom,devoncarew/atom,sxgao3001/atom,paulcbetts/atom,gisenberg/atom,ralphtheninja/atom,omarhuanca/atom,ReddTea/atom,chfritz/atom,sotayamashita/atom,bryonwinger/atom,Dennis1978/atom,lpommers/atom,lovesnow/atom,kjav/atom,YunchengLiao/atom,sillvan/atom,synaptek/atom,sotayamashita/atom,vjeux/atom,Locke23rus/atom,lovesnow/atom,prembasumatary/atom,nucked/atom,kandros/atom,abcP9110/atom,deoxilix/atom,qskycolor/atom,scv119/atom,fang-yufeng/atom,FoldingText/atom,hagb4rd/atom,mertkahyaoglu/atom,paulcbetts/atom,amine7536/atom,wiggzz/atom,rsvip/aTom,bolinfest/atom,dannyflax/atom,kc8wxm/atom,Sangaroonaom/atom,bsmr-x-script/atom,Jandersoft/atom,GHackAnonymous/atom,toqz/atom,atom/atom,stinsonga/atom,lovesnow/atom,rookie125/atom,gontadu/atom,abcP9110/atom,splodingsocks/atom,ivoadf/atom,t9md/atom,fedorov/atom | coffeescript | ## Code Before:
ConfigPanel = require 'config-panel'
PackageConfigView = require 'package-config-view'
### Internal ###
module.exports =
class InstalledPackagesConfigPanel extends ConfigPanel
@content: ->
@div class: 'installed-packages'
initialize: ->
for pack in atom.getLoadedPackages()
@append(new PackageConfigView(pack.metadata))
## Instruction:
Sort installed packages by name
## Code After:
_ = require 'underscore'
ConfigPanel = require 'config-panel'
PackageConfigView = require 'package-config-view'
### Internal ###
module.exports =
class InstalledPackagesConfigPanel extends ConfigPanel
@content: ->
@div class: 'installed-packages'
initialize: ->
for pack in _.sortBy(atom.getLoadedPackages(), 'name')
@append(new PackageConfigView(pack.metadata))
| + _ = require 'underscore'
ConfigPanel = require 'config-panel'
PackageConfigView = require 'package-config-view'
### Internal ###
module.exports =
class InstalledPackagesConfigPanel extends ConfigPanel
@content: ->
@div class: 'installed-packages'
initialize: ->
- for pack in atom.getLoadedPackages()
+ for pack in _.sortBy(atom.getLoadedPackages(), 'name')
? +++++++++ +++++++++
@append(new PackageConfigView(pack.metadata)) | 3 | 0.230769 | 2 | 1 |
18966a7766838ba0244218dac22aefcbf25f3fc0 | project/plugins.sbt | project/plugins.sbt | // Comment to get more information during initialization
logLevel := Level.Warn
// The Typesafe repository
resolvers += "Typesafe repository" at "http://repo.typesafe.com/typesafe/releases/"
// Use the Play sbt plugin for Play projects
addSbtPlugin("com.typesafe.play" % "sbt-plugin" % "2.3.0")
addSbtPlugin("com.typesafe.sbt" % "sbt-native-packager" % "0.7.0-M3")
addSbtPlugin("de.johoop" % "jacoco4sbt" % "2.1.2") | // Comment to get more information during initialization
logLevel := Level.Warn
// The Typesafe repository
resolvers += "Typesafe repository" at "http://repo.typesafe.com/typesafe/releases/"
//temporary resolver until jacoco4sbt is compatible to Java 8
resolvers += "Schleichardts GitHub" at "http://schleichardt.github.io/jvmrepo/"
// Use the Play sbt plugin for Play projects
addSbtPlugin("com.typesafe.play" % "sbt-plugin" % "2.3.0")
addSbtPlugin("com.typesafe.sbt" % "sbt-native-packager" % "0.7.0-M3")
addSbtPlugin("io.sphere.de.johoop" % "jacoco4sbt" % "2.1.5-fork-1.0.0") | Use jacoco4sbt fork to be compatible with Java 8. | Use jacoco4sbt fork to be compatible with Java 8.
| Scala | apache-2.0 | sphereio/sphere-sunrise,sphereio/commercetools-sunrise-java,sphereio/commercetools-sunrise-java,rfuertesp/pruebas2,rfuertesp/pruebas2,commercetools/commercetools-sunrise-java,commercetools/commercetools-sunrise-java,sphereio/commercetools-sunrise-java,commercetools/commercetools-sunrise-java,commercetools/commercetools-sunrise-java,sphereio/sphere-sunrise,rfuertesp/pruebas2,sphereio/commercetools-sunrise-java,rfuertesp/pruebas2,sphereio/sphere-sunrise,sphereio/sphere-sunrise | scala | ## Code Before:
// Comment to get more information during initialization
logLevel := Level.Warn
// The Typesafe repository
resolvers += "Typesafe repository" at "http://repo.typesafe.com/typesafe/releases/"
// Use the Play sbt plugin for Play projects
addSbtPlugin("com.typesafe.play" % "sbt-plugin" % "2.3.0")
addSbtPlugin("com.typesafe.sbt" % "sbt-native-packager" % "0.7.0-M3")
addSbtPlugin("de.johoop" % "jacoco4sbt" % "2.1.2")
## Instruction:
Use jacoco4sbt fork to be compatible with Java 8.
## Code After:
// Comment to get more information during initialization
logLevel := Level.Warn
// The Typesafe repository
resolvers += "Typesafe repository" at "http://repo.typesafe.com/typesafe/releases/"
//temporary resolver until jacoco4sbt is compatible to Java 8
resolvers += "Schleichardts GitHub" at "http://schleichardt.github.io/jvmrepo/"
// Use the Play sbt plugin for Play projects
addSbtPlugin("com.typesafe.play" % "sbt-plugin" % "2.3.0")
addSbtPlugin("com.typesafe.sbt" % "sbt-native-packager" % "0.7.0-M3")
addSbtPlugin("io.sphere.de.johoop" % "jacoco4sbt" % "2.1.5-fork-1.0.0") | // Comment to get more information during initialization
logLevel := Level.Warn
// The Typesafe repository
resolvers += "Typesafe repository" at "http://repo.typesafe.com/typesafe/releases/"
+ //temporary resolver until jacoco4sbt is compatible to Java 8
+ resolvers += "Schleichardts GitHub" at "http://schleichardt.github.io/jvmrepo/"
+
// Use the Play sbt plugin for Play projects
addSbtPlugin("com.typesafe.play" % "sbt-plugin" % "2.3.0")
addSbtPlugin("com.typesafe.sbt" % "sbt-native-packager" % "0.7.0-M3")
- addSbtPlugin("de.johoop" % "jacoco4sbt" % "2.1.2")
? ^
+ addSbtPlugin("io.sphere.de.johoop" % "jacoco4sbt" % "2.1.5-fork-1.0.0")
? ++++++++++ ^^^^^^^^^^^^
| 5 | 0.416667 | 4 | 1 |
0a3c2916847f04e8c768c2273d58eec1d0d19797 | hello-world/start.sh | hello-world/start.sh | . ./sub-scripts/check-params.sh
IMAGE_NAME=ecomp/hello-world
#Check params
test_params
# Pull the image
echo "Logging docker engine $DOCKER_ADDRESS in to $REGISTRY_ADDRESS"
echo $DOCKER_PASSWORD| sudo -S docker login -u=$REGISTRY_LOGIN -p=$REGISTRY_PASSWORD https://$REGISTRY_ADDRESS
echo "Pulling the image named $REGISTRY_ADDRESS/$IMAGE_NAME:$IMAGE_VERSION from docker engine $DOCKER_ADDRESS "
docker -H $DOCKER_ADDRESS pull $REGISTRY_ADDRESS/$IMAGE_NAME:$IMAGE_VERSION
echo "Starting image $REGISTRY_ADDRESS/$IMAGE_NAME on Docker engine $DOCKER_ADDRESS"
docker -H $DOCKER_ADDRESS run --rm --name hello-world-demo --hostname hello-world-demo -p 18080:8080 -v "`pwd`/conf/images/:/hello-world/conf" $REGISTRY_ADDRESS/$IMAGE_NAME:$IMAGE_VERSION | . ./sub-scripts/check-params.sh
IMAGE_NAME=ecomp/hello-world
#Check params
test_params
# Pull the image
echo "Logging docker engine $DOCKER_ADDRESS in to $REGISTRY_ADDRESS"
echo $DOCKER_PASSWORD| sudo -S docker login -u=$REGISTRY_LOGIN -p=$REGISTRY_PASSWORD https://$REGISTRY_ADDRESS
echo "Pulling the image named $REGISTRY_ADDRESS/$IMAGE_NAME:$IMAGE_VERSION from docker engine $DOCKER_ADDRESS "
echo $DOCKER_PASSWORD| sudo -S docker -H $DOCKER_ADDRESS pull $REGISTRY_ADDRESS/$IMAGE_NAME:$IMAGE_VERSION
echo "Starting image $REGISTRY_ADDRESS/$IMAGE_NAME on Docker engine $DOCKER_ADDRESS"
echo $DOCKER_PASSWORD| sudo -S docker -H $DOCKER_ADDRESS run --rm --name hello-world-demo --hostname hello-world-demo -p 18080:8080 -v "`pwd`/conf/images/:/hello-world/conf" $REGISTRY_ADDRESS/$IMAGE_NAME:$IMAGE_VERSION | Add sudo for all commands | Add sudo for all commands | Shell | apache-2.0 | SBCorp/docker-configs | shell | ## Code Before:
. ./sub-scripts/check-params.sh
IMAGE_NAME=ecomp/hello-world
#Check params
test_params
# Pull the image
echo "Logging docker engine $DOCKER_ADDRESS in to $REGISTRY_ADDRESS"
echo $DOCKER_PASSWORD| sudo -S docker login -u=$REGISTRY_LOGIN -p=$REGISTRY_PASSWORD https://$REGISTRY_ADDRESS
echo "Pulling the image named $REGISTRY_ADDRESS/$IMAGE_NAME:$IMAGE_VERSION from docker engine $DOCKER_ADDRESS "
docker -H $DOCKER_ADDRESS pull $REGISTRY_ADDRESS/$IMAGE_NAME:$IMAGE_VERSION
echo "Starting image $REGISTRY_ADDRESS/$IMAGE_NAME on Docker engine $DOCKER_ADDRESS"
docker -H $DOCKER_ADDRESS run --rm --name hello-world-demo --hostname hello-world-demo -p 18080:8080 -v "`pwd`/conf/images/:/hello-world/conf" $REGISTRY_ADDRESS/$IMAGE_NAME:$IMAGE_VERSION
## Instruction:
Add sudo for all commands
## Code After:
. ./sub-scripts/check-params.sh
IMAGE_NAME=ecomp/hello-world
#Check params
test_params
# Pull the image
echo "Logging docker engine $DOCKER_ADDRESS in to $REGISTRY_ADDRESS"
echo $DOCKER_PASSWORD| sudo -S docker login -u=$REGISTRY_LOGIN -p=$REGISTRY_PASSWORD https://$REGISTRY_ADDRESS
echo "Pulling the image named $REGISTRY_ADDRESS/$IMAGE_NAME:$IMAGE_VERSION from docker engine $DOCKER_ADDRESS "
echo $DOCKER_PASSWORD| sudo -S docker -H $DOCKER_ADDRESS pull $REGISTRY_ADDRESS/$IMAGE_NAME:$IMAGE_VERSION
echo "Starting image $REGISTRY_ADDRESS/$IMAGE_NAME on Docker engine $DOCKER_ADDRESS"
echo $DOCKER_PASSWORD| sudo -S docker -H $DOCKER_ADDRESS run --rm --name hello-world-demo --hostname hello-world-demo -p 18080:8080 -v "`pwd`/conf/images/:/hello-world/conf" $REGISTRY_ADDRESS/$IMAGE_NAME:$IMAGE_VERSION | . ./sub-scripts/check-params.sh
IMAGE_NAME=ecomp/hello-world
#Check params
test_params
# Pull the image
echo "Logging docker engine $DOCKER_ADDRESS in to $REGISTRY_ADDRESS"
echo $DOCKER_PASSWORD| sudo -S docker login -u=$REGISTRY_LOGIN -p=$REGISTRY_PASSWORD https://$REGISTRY_ADDRESS
echo "Pulling the image named $REGISTRY_ADDRESS/$IMAGE_NAME:$IMAGE_VERSION from docker engine $DOCKER_ADDRESS "
- docker -H $DOCKER_ADDRESS pull $REGISTRY_ADDRESS/$IMAGE_NAME:$IMAGE_VERSION
+ echo $DOCKER_PASSWORD| sudo -S docker -H $DOCKER_ADDRESS pull $REGISTRY_ADDRESS/$IMAGE_NAME:$IMAGE_VERSION
? +++++++++++++++++++++++++++++++
echo "Starting image $REGISTRY_ADDRESS/$IMAGE_NAME on Docker engine $DOCKER_ADDRESS"
- docker -H $DOCKER_ADDRESS run --rm --name hello-world-demo --hostname hello-world-demo -p 18080:8080 -v "`pwd`/conf/images/:/hello-world/conf" $REGISTRY_ADDRESS/$IMAGE_NAME:$IMAGE_VERSION
+ echo $DOCKER_PASSWORD| sudo -S docker -H $DOCKER_ADDRESS run --rm --name hello-world-demo --hostname hello-world-demo -p 18080:8080 -v "`pwd`/conf/images/:/hello-world/conf" $REGISTRY_ADDRESS/$IMAGE_NAME:$IMAGE_VERSION
? +++++++++++++++++++++++++++++++
| 4 | 0.285714 | 2 | 2 |
9c3fe9c7c69715c7c50d9dcac1ff2155126c233b | README.md | README.md | Doc-As-Code
===========
![Appveyor]()
Easily build and publish your API documentation. We currently support C# and VB projects.
Getting Started
---------------
Please refer to [Getting Started](http://aspnet.github.io/docfx/#/tutorial/docfx_getting_started.md) to play with `docfx`!
How to Contribute
---------------
Check out the [contributing](CONTRIBUTING.md) page to see the best places to log issues and start discussions.
| Doc-As-Code
===========
![Appveyor]()
Easily build and publish your API documentation. We currently support C# and VB projects.
Getting Started
---------------
Please refer to [Getting Started](http://aspnet.github.io/docfx/#/tutorial/docfx_getting_started.md) to play with `docfx`!
What's included
---------------
File/Folder | Description
------------| ----------
LICENSE | Project license information
README.md | Introduction to the project
CONTRIBUTING.md | Contribution guidelines to how to contribute to the repo
Documentation | Documentation project using `docfx` to produce the documentation site
src | Source code for `docfx`
test | Test cases for `docfx` using *xunit* test framework
tools | Source code for tools used in code build and deployment
How to Contribute
---------------
Check out the [contributing](CONTRIBUTING.md) page to see the best places to log issues and start discussions.
| Update readme.md to include what's included | Update readme.md to include what's included
| Markdown | mit | sergey-vershinin/docfx,pascalberger/docfx,LordZoltan/docfx,sergey-vershinin/docfx,DuncanmaMSFT/docfx,LordZoltan/docfx,pascalberger/docfx,928PJY/docfx,LordZoltan/docfx,LordZoltan/docfx,dotnet/docfx,hellosnow/docfx,superyyrrzz/docfx,hellosnow/docfx,superyyrrzz/docfx,hellosnow/docfx,DuncanmaMSFT/docfx,pascalberger/docfx,superyyrrzz/docfx,928PJY/docfx,sergey-vershinin/docfx,dotnet/docfx,928PJY/docfx,dotnet/docfx | markdown | ## Code Before:
Doc-As-Code
===========
![Appveyor]()
Easily build and publish your API documentation. We currently support C# and VB projects.
Getting Started
---------------
Please refer to [Getting Started](http://aspnet.github.io/docfx/#/tutorial/docfx_getting_started.md) to play with `docfx`!
How to Contribute
---------------
Check out the [contributing](CONTRIBUTING.md) page to see the best places to log issues and start discussions.
## Instruction:
Update readme.md to include what's included
## Code After:
Doc-As-Code
===========
![Appveyor]()
Easily build and publish your API documentation. We currently support C# and VB projects.
Getting Started
---------------
Please refer to [Getting Started](http://aspnet.github.io/docfx/#/tutorial/docfx_getting_started.md) to play with `docfx`!
What's included
---------------
File/Folder | Description
------------| ----------
LICENSE | Project license information
README.md | Introduction to the project
CONTRIBUTING.md | Contribution guidelines to how to contribute to the repo
Documentation | Documentation project using `docfx` to produce the documentation site
src | Source code for `docfx`
test | Test cases for `docfx` using *xunit* test framework
tools | Source code for tools used in code build and deployment
How to Contribute
---------------
Check out the [contributing](CONTRIBUTING.md) page to see the best places to log issues and start discussions.
| Doc-As-Code
===========
![Appveyor]()
Easily build and publish your API documentation. We currently support C# and VB projects.
Getting Started
---------------
Please refer to [Getting Started](http://aspnet.github.io/docfx/#/tutorial/docfx_getting_started.md) to play with `docfx`!
+ What's included
+ ---------------
+
+ File/Folder | Description
+ ------------| ----------
+ LICENSE | Project license information
+ README.md | Introduction to the project
+ CONTRIBUTING.md | Contribution guidelines to how to contribute to the repo
+ Documentation | Documentation project using `docfx` to produce the documentation site
+ src | Source code for `docfx`
+ test | Test cases for `docfx` using *xunit* test framework
+ tools | Source code for tools used in code build and deployment
+
How to Contribute
---------------
Check out the [contributing](CONTRIBUTING.md) page to see the best places to log issues and start discussions. | 13 | 1.083333 | 13 | 0 |
9adb2f964716f4766765a566428ea101773fb878 | helper/DiskDataStore.js | helper/DiskDataStore.js | var fs = require('fs');
var Settings = require('./Settings');
var DiskDataStore = {};
DiskDataStore.init = function (){
this._CACHE_PATH = Settings.get('cache_path');
fs.mkdirSync(this._CACHE_PATH);
};
DiskDataStore.deleteEntry = function (key){
var fileName = this._CACHE_PATH + '/' + key;
fs.unlink(fileName, function (err){
if (err) throw err;
console.log('successfully deleted ' + fileName);
});
};
DiskDataStore.readEntry =
function (key, targetBuffer, targetOffset, sourceOffset, length){
var fileName = this._CACHE_PATH + '/' + key;
var fd = fs.openSync(fileName, 'rs');
fs.readSync(fd, targetBuffer, targetOffset, length, sourceOffset);
fs.closeSync(fd);
};
DiskDataStore.writeEntry = function (key, data, cb){
var fileName = this._CACHE_PATH + '/' + key;
fs.writeFile(fileName, data, cb);
};
module.exports = DiskDataStore;
| var fs = require('fs');
var Settings = require('./Settings');
var DiskDataStore = {};
var fs = require('fs');
var deleteFolderRecursive = function(path) {
if( fs.existsSync(path) ) {
fs.readdirSync(path).forEach(function(file,index){
var curPath = path + '/' + file;
if(fs.lstatSync(curPath).isDirectory()) { // recurse
deleteFolderRecursive(curPath);
} else { // delete file
fs.unlinkSync(curPath);
}
});
fs.rmdirSync(path);
}
};
DiskDataStore.init = function (){
this._CACHE_PATH = Settings.get('cache_path');
deleteFolderRecursive(this._CACHE_PATH);
fs.mkdirSync(this._CACHE_PATH);
};
DiskDataStore.deleteEntry = function (key){
var fileName = this._CACHE_PATH + '/' + key;
fs.unlink(fileName, function (err){
if (err) throw err;
console.log('successfully deleted ' + fileName);
});
};
DiskDataStore.readEntry =
function (key, targetBuffer, targetOffset, sourceOffset, length){
var fileName = this._CACHE_PATH + '/' + key;
var fd = fs.openSync(fileName, 'rs');
fs.readSync(fd, targetBuffer, targetOffset, length, sourceOffset);
fs.closeSync(fd);
};
DiskDataStore.writeEntry = function (key, data, cb){
var fileName = this._CACHE_PATH + '/' + key;
fs.writeFile(fileName, data, cb);
};
module.exports = DiskDataStore;
| Remove the cache folder before initializing data cache. | Remove the cache folder before initializing data cache.
| JavaScript | apache-2.0 | weilonge/unidisk,weilonge/unidisk,weilonge/unidisk | javascript | ## Code Before:
var fs = require('fs');
var Settings = require('./Settings');
var DiskDataStore = {};
DiskDataStore.init = function (){
this._CACHE_PATH = Settings.get('cache_path');
fs.mkdirSync(this._CACHE_PATH);
};
DiskDataStore.deleteEntry = function (key){
var fileName = this._CACHE_PATH + '/' + key;
fs.unlink(fileName, function (err){
if (err) throw err;
console.log('successfully deleted ' + fileName);
});
};
DiskDataStore.readEntry =
function (key, targetBuffer, targetOffset, sourceOffset, length){
var fileName = this._CACHE_PATH + '/' + key;
var fd = fs.openSync(fileName, 'rs');
fs.readSync(fd, targetBuffer, targetOffset, length, sourceOffset);
fs.closeSync(fd);
};
DiskDataStore.writeEntry = function (key, data, cb){
var fileName = this._CACHE_PATH + '/' + key;
fs.writeFile(fileName, data, cb);
};
module.exports = DiskDataStore;
## Instruction:
Remove the cache folder before initializing data cache.
## Code After:
var fs = require('fs');
var Settings = require('./Settings');
var DiskDataStore = {};
var fs = require('fs');
var deleteFolderRecursive = function(path) {
if( fs.existsSync(path) ) {
fs.readdirSync(path).forEach(function(file,index){
var curPath = path + '/' + file;
if(fs.lstatSync(curPath).isDirectory()) { // recurse
deleteFolderRecursive(curPath);
} else { // delete file
fs.unlinkSync(curPath);
}
});
fs.rmdirSync(path);
}
};
DiskDataStore.init = function (){
this._CACHE_PATH = Settings.get('cache_path');
deleteFolderRecursive(this._CACHE_PATH);
fs.mkdirSync(this._CACHE_PATH);
};
DiskDataStore.deleteEntry = function (key){
var fileName = this._CACHE_PATH + '/' + key;
fs.unlink(fileName, function (err){
if (err) throw err;
console.log('successfully deleted ' + fileName);
});
};
DiskDataStore.readEntry =
function (key, targetBuffer, targetOffset, sourceOffset, length){
var fileName = this._CACHE_PATH + '/' + key;
var fd = fs.openSync(fileName, 'rs');
fs.readSync(fd, targetBuffer, targetOffset, length, sourceOffset);
fs.closeSync(fd);
};
DiskDataStore.writeEntry = function (key, data, cb){
var fileName = this._CACHE_PATH + '/' + key;
fs.writeFile(fileName, data, cb);
};
module.exports = DiskDataStore;
| var fs = require('fs');
var Settings = require('./Settings');
var DiskDataStore = {};
+ var fs = require('fs');
+ var deleteFolderRecursive = function(path) {
+ if( fs.existsSync(path) ) {
+ fs.readdirSync(path).forEach(function(file,index){
+ var curPath = path + '/' + file;
+ if(fs.lstatSync(curPath).isDirectory()) { // recurse
+ deleteFolderRecursive(curPath);
+ } else { // delete file
+ fs.unlinkSync(curPath);
+ }
+ });
+ fs.rmdirSync(path);
+ }
+ };
+
DiskDataStore.init = function (){
this._CACHE_PATH = Settings.get('cache_path');
+ deleteFolderRecursive(this._CACHE_PATH);
fs.mkdirSync(this._CACHE_PATH);
};
DiskDataStore.deleteEntry = function (key){
var fileName = this._CACHE_PATH + '/' + key;
fs.unlink(fileName, function (err){
if (err) throw err;
console.log('successfully deleted ' + fileName);
});
};
DiskDataStore.readEntry =
function (key, targetBuffer, targetOffset, sourceOffset, length){
var fileName = this._CACHE_PATH + '/' + key;
var fd = fs.openSync(fileName, 'rs');
fs.readSync(fd, targetBuffer, targetOffset, length, sourceOffset);
fs.closeSync(fd);
};
DiskDataStore.writeEntry = function (key, data, cb){
var fileName = this._CACHE_PATH + '/' + key;
fs.writeFile(fileName, data, cb);
};
module.exports = DiskDataStore; | 16 | 0.5 | 16 | 0 |
3ca56fef49e34e239b66c45c1abdbcadd5b05109 | .travis.yml | .travis.yml | sudo: false
language: node_js
node_js:
- "6"
- "8"
- "10"
os:
- linux
- osx
env:
global:
ELM_VERSION=0.19.0
install:
- node --version
- npm --version
- npm install
script:
- npm test
| sudo: false
language: node_js
node_js:
- "8"
- "10"
os:
- linux
- osx
env:
global:
ELM_VERSION=0.19.0
install:
- node --version
- npm --version
- npm install
script:
- npm test
| Remove node 6 from Travis-CI builds | Remove node 6 from Travis-CI builds | YAML | bsd-3-clause | rtfeldman/node-elm-test,rtfeldman/node-elm-test,rtfeldman/node-test-runner | yaml | ## Code Before:
sudo: false
language: node_js
node_js:
- "6"
- "8"
- "10"
os:
- linux
- osx
env:
global:
ELM_VERSION=0.19.0
install:
- node --version
- npm --version
- npm install
script:
- npm test
## Instruction:
Remove node 6 from Travis-CI builds
## Code After:
sudo: false
language: node_js
node_js:
- "8"
- "10"
os:
- linux
- osx
env:
global:
ELM_VERSION=0.19.0
install:
- node --version
- npm --version
- npm install
script:
- npm test
| sudo: false
language: node_js
node_js:
- - "6"
- "8"
- "10"
os:
- linux
- osx
env:
global:
ELM_VERSION=0.19.0
install:
- node --version
- npm --version
- npm install
script:
- npm test | 1 | 0.043478 | 0 | 1 |
be28da9a6dc871dd35d196055da79175c0b7f882 | README.md | README.md |
This is deployed at [maxwellholder.com](http://maxwellholder.com).
Run `bin/server` to serve site locally for preview. It will live-reload any
changes made in `source/`.
Run `bin/build` to build static pages from `source/` files.
## Deploying
To setup push to deploy, on the server you're deploying to run:
```
git clone https://github.com/mxhold/maxwellholder.com.git
cd maxwellholder.com
git config receive.denyCurrentBranch updateInstead
ln -s ./git_hooks/push-to-checkout ./.git/hooks/push-to-checkout
```
The assumes you have Ruby and Bundler already installed.
|
This is deployed at [maxwellholder.com](http://maxwellholder.com).
Run `bin/server` to serve site locally for preview. It will live-reload any
changes made in `source/`.
Run `bin/build` to build static pages from `source/` files.
## Deploying
To setup push to deploy, on the server you're deploying to run:
```
git clone https://github.com/mxhold/maxwellholder.com.git
cd maxwellholder.com
git config receive.denyCurrentBranch updateInstead
ln -s ./git_hooks/push-to-checkout ./.git/hooks/push-to-checkout
```
The assumes you have Ruby and Bundler already installed on the server.
Then (on your local machine) add the server as a remote and push:
```
git remote add deploy git@server.com:/path/to/repo
git push deploy
```
| Expand on instructions for how to deploy | Expand on instructions for how to deploy
| Markdown | mit | mxhold/maxwellholder.com,mxhold/maxwellholder.com,mxhold/maxwellholder.com,mxhold/maxwellholder.com | markdown | ## Code Before:
This is deployed at [maxwellholder.com](http://maxwellholder.com).
Run `bin/server` to serve site locally for preview. It will live-reload any
changes made in `source/`.
Run `bin/build` to build static pages from `source/` files.
## Deploying
To setup push to deploy, on the server you're deploying to run:
```
git clone https://github.com/mxhold/maxwellholder.com.git
cd maxwellholder.com
git config receive.denyCurrentBranch updateInstead
ln -s ./git_hooks/push-to-checkout ./.git/hooks/push-to-checkout
```
The assumes you have Ruby and Bundler already installed.
## Instruction:
Expand on instructions for how to deploy
## Code After:
This is deployed at [maxwellholder.com](http://maxwellholder.com).
Run `bin/server` to serve site locally for preview. It will live-reload any
changes made in `source/`.
Run `bin/build` to build static pages from `source/` files.
## Deploying
To setup push to deploy, on the server you're deploying to run:
```
git clone https://github.com/mxhold/maxwellholder.com.git
cd maxwellholder.com
git config receive.denyCurrentBranch updateInstead
ln -s ./git_hooks/push-to-checkout ./.git/hooks/push-to-checkout
```
The assumes you have Ruby and Bundler already installed on the server.
Then (on your local machine) add the server as a remote and push:
```
git remote add deploy git@server.com:/path/to/repo
git push deploy
```
|
This is deployed at [maxwellholder.com](http://maxwellholder.com).
Run `bin/server` to serve site locally for preview. It will live-reload any
changes made in `source/`.
Run `bin/build` to build static pages from `source/` files.
## Deploying
To setup push to deploy, on the server you're deploying to run:
```
git clone https://github.com/mxhold/maxwellholder.com.git
cd maxwellholder.com
git config receive.denyCurrentBranch updateInstead
ln -s ./git_hooks/push-to-checkout ./.git/hooks/push-to-checkout
```
- The assumes you have Ruby and Bundler already installed.
+ The assumes you have Ruby and Bundler already installed on the server.
? ++++++++++++++
+ Then (on your local machine) add the server as a remote and push:
+
+ ```
+ git remote add deploy git@server.com:/path/to/repo
+ git push deploy
+ ```
+ | 9 | 0.409091 | 8 | 1 |
9ebdd832f285e845f53380b522041ce74d21fea2 | autobuild.sh | autobuild.sh | set -e
# give -v to open in VirtualBox
# give -u to run vagrant up
vboxflag=0
vagrantflag=0
while getopts 'abf:vu' flag; do
case "${flag}" in
u) vagrantflag=1 ;;
v) vboxflag=1 ;;
*) error "Unexpected option ${flag}" ;;
esac
done
if [ $vagrantflag -eq 1 ]
then
vagrant up
fi
vagrant ssh -c "cd /vagrant/ && ./build.sh"
if [ $vboxflag -eq 1 ]
then
VBoxManage startvm "RustOS"
else
qemu-system-x86_64 -d int -no-reboot build/disk.img
fi
| set -e
# give -v to open in VirtualBox
# give -u to run vagrant up
vboxflag=0
vagrantflag=0
while getopts 'abf:vu' flag; do
case "${flag}" in
u) vagrantflag=1 ;;
v) vboxflag=1 ;;
*) error "Unexpected option ${flag}" ;;
esac
done
if [ $vagrantflag -eq 1 ]
then
vagrant up
fi
vagrant ssh -c "cd /vagrant/ && ./build.sh"
if [ $vboxflag -eq 1 ]
then
VBoxManage startvm "RustOS"
else
qemu-system-x86_64 -d int -no-reboot build/disk.img -monitor stdio
fi
| Add debugger to QEMU flags (VBox has some annoying bugs) | Add debugger to QEMU flags (VBox has some annoying bugs)
| Shell | mit | Dentosal/rust_os,Dentosal/rust_os,Dentosal/rust_os | shell | ## Code Before:
set -e
# give -v to open in VirtualBox
# give -u to run vagrant up
vboxflag=0
vagrantflag=0
while getopts 'abf:vu' flag; do
case "${flag}" in
u) vagrantflag=1 ;;
v) vboxflag=1 ;;
*) error "Unexpected option ${flag}" ;;
esac
done
if [ $vagrantflag -eq 1 ]
then
vagrant up
fi
vagrant ssh -c "cd /vagrant/ && ./build.sh"
if [ $vboxflag -eq 1 ]
then
VBoxManage startvm "RustOS"
else
qemu-system-x86_64 -d int -no-reboot build/disk.img
fi
## Instruction:
Add debugger to QEMU flags (VBox has some annoying bugs)
## Code After:
set -e
# give -v to open in VirtualBox
# give -u to run vagrant up
vboxflag=0
vagrantflag=0
while getopts 'abf:vu' flag; do
case "${flag}" in
u) vagrantflag=1 ;;
v) vboxflag=1 ;;
*) error "Unexpected option ${flag}" ;;
esac
done
if [ $vagrantflag -eq 1 ]
then
vagrant up
fi
vagrant ssh -c "cd /vagrant/ && ./build.sh"
if [ $vboxflag -eq 1 ]
then
VBoxManage startvm "RustOS"
else
qemu-system-x86_64 -d int -no-reboot build/disk.img -monitor stdio
fi
| set -e
# give -v to open in VirtualBox
# give -u to run vagrant up
vboxflag=0
vagrantflag=0
while getopts 'abf:vu' flag; do
case "${flag}" in
u) vagrantflag=1 ;;
v) vboxflag=1 ;;
*) error "Unexpected option ${flag}" ;;
esac
done
if [ $vagrantflag -eq 1 ]
then
vagrant up
fi
vagrant ssh -c "cd /vagrant/ && ./build.sh"
if [ $vboxflag -eq 1 ]
then
VBoxManage startvm "RustOS"
else
- qemu-system-x86_64 -d int -no-reboot build/disk.img
+ qemu-system-x86_64 -d int -no-reboot build/disk.img -monitor stdio
? +++++++++++++++
fi | 2 | 0.068966 | 1 | 1 |
66416ee8e0eec2e3c8069085c4d6a4027a2a4486 | install.sh | install.sh |
set -e
install_pipelines(){
echo "Checking dependencies..."
if [ ! -n "$(which python)" ]; then
echo "Need python installed !";
exit 1;
fi
if [ ! -n "$(which pip)" ]; then
echo "Need pip installed !";
exit 1;
fi
if [ ! -n "$(which git)" ]; then
echo "Need git installed !"
exit 1;
fi
echo "Installing pipelines..."
sudo pip install git+git://github.com/Wiredcraft/pipelines@dev > /dev/null
echo "Creating user..."
id pipelines > /dev/null 2>&1
if [ ! $? -eq 0 ]; then
sudo useradd -m -d /var/lib/pipelines -s /bin/false pipelines
fi
echo "Creating workspace..."
sudo mkdir -p /var/lib/pipelines/workspace
sudo chown -R pipelines:pipelines /var/lib/pipelines
pipelines --version
echo "Successfully installed"
}
install_pipelines "$@" |
set -e
install_pipelines(){
echo "Checking dependencies..."
if [ ! -n "$(which python)" ]; then
echo "Need python installed !";
exit 1;
fi
if [ ! -n "$(which pip)" ]; then
echo "Need pip installed !";
exit 1;
fi
if [ ! -n "$(which git)" ]; then
echo "Need git installed !"
exit 1;
fi
echo "Installing pipelines..."
sudo pip install git+git://github.com/Wiredcraft/pipelines@dev > /dev/null
echo "Creating user..."
id pipelines > /dev/null 2>&1
if [ ! $? -eq 0 ]; then
sudo useradd -m -d /var/lib/pipelines -s /bin/false pipelines
fi
echo "Creating workspace..."
sudo mkdir -p /var/lib/pipelines/workspace
sudo chown -R pipelines:pipelines /var/lib/pipelines
echo ""
pipelines --version
echo ""
echo "Successfully installed"
}
install_pipelines "$@" | Add line around version output | Add line around version output
| Shell | mit | Wiredcraft/pipelines,Wiredcraft/pipelines,Wiredcraft/pipelines,Wiredcraft/pipelines | shell | ## Code Before:
set -e
install_pipelines(){
echo "Checking dependencies..."
if [ ! -n "$(which python)" ]; then
echo "Need python installed !";
exit 1;
fi
if [ ! -n "$(which pip)" ]; then
echo "Need pip installed !";
exit 1;
fi
if [ ! -n "$(which git)" ]; then
echo "Need git installed !"
exit 1;
fi
echo "Installing pipelines..."
sudo pip install git+git://github.com/Wiredcraft/pipelines@dev > /dev/null
echo "Creating user..."
id pipelines > /dev/null 2>&1
if [ ! $? -eq 0 ]; then
sudo useradd -m -d /var/lib/pipelines -s /bin/false pipelines
fi
echo "Creating workspace..."
sudo mkdir -p /var/lib/pipelines/workspace
sudo chown -R pipelines:pipelines /var/lib/pipelines
pipelines --version
echo "Successfully installed"
}
install_pipelines "$@"
## Instruction:
Add line around version output
## Code After:
set -e
install_pipelines(){
echo "Checking dependencies..."
if [ ! -n "$(which python)" ]; then
echo "Need python installed !";
exit 1;
fi
if [ ! -n "$(which pip)" ]; then
echo "Need pip installed !";
exit 1;
fi
if [ ! -n "$(which git)" ]; then
echo "Need git installed !"
exit 1;
fi
echo "Installing pipelines..."
sudo pip install git+git://github.com/Wiredcraft/pipelines@dev > /dev/null
echo "Creating user..."
id pipelines > /dev/null 2>&1
if [ ! $? -eq 0 ]; then
sudo useradd -m -d /var/lib/pipelines -s /bin/false pipelines
fi
echo "Creating workspace..."
sudo mkdir -p /var/lib/pipelines/workspace
sudo chown -R pipelines:pipelines /var/lib/pipelines
echo ""
pipelines --version
echo ""
echo "Successfully installed"
}
install_pipelines "$@" |
set -e
install_pipelines(){
echo "Checking dependencies..."
if [ ! -n "$(which python)" ]; then
echo "Need python installed !";
exit 1;
fi
if [ ! -n "$(which pip)" ]; then
echo "Need pip installed !";
exit 1;
fi
if [ ! -n "$(which git)" ]; then
echo "Need git installed !"
exit 1;
fi
echo "Installing pipelines..."
sudo pip install git+git://github.com/Wiredcraft/pipelines@dev > /dev/null
echo "Creating user..."
id pipelines > /dev/null 2>&1
if [ ! $? -eq 0 ]; then
sudo useradd -m -d /var/lib/pipelines -s /bin/false pipelines
fi
echo "Creating workspace..."
sudo mkdir -p /var/lib/pipelines/workspace
sudo chown -R pipelines:pipelines /var/lib/pipelines
+ echo ""
pipelines --version
+ echo ""
echo "Successfully installed"
}
install_pipelines "$@" | 2 | 0.052632 | 2 | 0 |
91f071787c967be5c35532da6015b2e3cd9aad98 | recipes/default.rb | recipes/default.rb |
include_recipe 'timezone-ii'
include_recipe 'apt'
include_recipe 'zsh'
include_recipe 'git'
include_recipe 'chef-solo-search'
include_recipe 'users::sysadmins'
include_recipe 'sudo'
include_recipe 'oh-my-zsh'
if node['swapsize'] > 0
servermemory = `memsize=$(free -b | grep "Mem:" | awk '{print $2}');echo "$(($memsize/1024/1024))";`
swapfilesize = servermemory.to_i * node['swapsize'].to_i
swap_file "Create #{swapfilesize}MB swap file at /mnt/swap" do
path '/mnt/swap'
size swapfilesize
end
end
include_recipe 'appserver::userconfig'
include_recipe 'appserver::dbserver'
include_recipe 'appserver::webserver'
|
include_recipe 'timezone-ii'
include_recipe 'apt'
include_recipe 'zsh'
include_recipe 'git'
include_recipe 'chef-solo-search'
include_recipe 'users::sysadmins'
include_recipe 'sudo'
include_recipe 'oh-my-zsh'
if node['swapsize'] > 0
servermemory = `memsize=$(free -b | grep "Mem:" | awk '{print $2}');echo "$(($memsize/1024/1024))";`
swapfilesize = servermemory.to_i * node['swapsize'].to_i
swap_file "Create #{swapfilesize}MB swap file at /mnt/swap" do
path '/mnt/swap'
size swapfilesize
persist true
end
end
include_recipe 'appserver::userconfig'
include_recipe 'appserver::dbserver'
include_recipe 'appserver::webserver'
| Make swap persist after restart | Make swap persist after restart
| Ruby | mit | arvidbjorkstrom/chef-appserver,arvidbjorkstrom/chef-appserver,arvidbjorkstrom/chef-appserver | ruby | ## Code Before:
include_recipe 'timezone-ii'
include_recipe 'apt'
include_recipe 'zsh'
include_recipe 'git'
include_recipe 'chef-solo-search'
include_recipe 'users::sysadmins'
include_recipe 'sudo'
include_recipe 'oh-my-zsh'
if node['swapsize'] > 0
servermemory = `memsize=$(free -b | grep "Mem:" | awk '{print $2}');echo "$(($memsize/1024/1024))";`
swapfilesize = servermemory.to_i * node['swapsize'].to_i
swap_file "Create #{swapfilesize}MB swap file at /mnt/swap" do
path '/mnt/swap'
size swapfilesize
end
end
include_recipe 'appserver::userconfig'
include_recipe 'appserver::dbserver'
include_recipe 'appserver::webserver'
## Instruction:
Make swap persist after restart
## Code After:
include_recipe 'timezone-ii'
include_recipe 'apt'
include_recipe 'zsh'
include_recipe 'git'
include_recipe 'chef-solo-search'
include_recipe 'users::sysadmins'
include_recipe 'sudo'
include_recipe 'oh-my-zsh'
if node['swapsize'] > 0
servermemory = `memsize=$(free -b | grep "Mem:" | awk '{print $2}');echo "$(($memsize/1024/1024))";`
swapfilesize = servermemory.to_i * node['swapsize'].to_i
swap_file "Create #{swapfilesize}MB swap file at /mnt/swap" do
path '/mnt/swap'
size swapfilesize
persist true
end
end
include_recipe 'appserver::userconfig'
include_recipe 'appserver::dbserver'
include_recipe 'appserver::webserver'
|
include_recipe 'timezone-ii'
include_recipe 'apt'
include_recipe 'zsh'
include_recipe 'git'
include_recipe 'chef-solo-search'
include_recipe 'users::sysadmins'
include_recipe 'sudo'
include_recipe 'oh-my-zsh'
if node['swapsize'] > 0
servermemory = `memsize=$(free -b | grep "Mem:" | awk '{print $2}');echo "$(($memsize/1024/1024))";`
swapfilesize = servermemory.to_i * node['swapsize'].to_i
swap_file "Create #{swapfilesize}MB swap file at /mnt/swap" do
path '/mnt/swap'
size swapfilesize
+ persist true
end
end
include_recipe 'appserver::userconfig'
include_recipe 'appserver::dbserver'
include_recipe 'appserver::webserver' | 1 | 0.045455 | 1 | 0 |
ddd31507da9b59d8050451e22573e92275c6d283 | beerded_happiness/spec/controllers/event_spec.rb | beerded_happiness/spec/controllers/event_spec.rb | require 'rails_helper'
require 'spec_helper'
describe EventsController do
describe "#index" do
it "renders the index template" do
expect(get :index).to render_template (:index)
end
end
describe "#new" do
it "renders the new event template" do
expect(get :new).to render_template (:new)
end
it "renders form with new event" do
get :new
expect(assigns(:event)).to be_a_new Event
end
end
describe "#create" do
let!(:create_post) { post :create, :event => {title: "DBC BP", status: "Active", creator_id: user.id} }
it "adds a new event to the database" do
expect(Event.where(title: "DBC BP")).to exist
end
it "redirects events index page" do
expect(create_post).to redirect_to events_path
end
end
describe "#show" do
it "renders show template" do
expect(get :show, id: new_event.id).to render_template(:show)
end
end
end | require 'rails_helper'
require 'spec_helper'
describe EventsController do
let(:user) { User.create(name: "numichuu", password: "test", password_confirmation: "test", phone_number: "123-123-1234", email:"numichuu@gmail.com")}
let(:new_event) { Event.create(title: "BeerFest", status: "Active", creator_id: user.id) }
describe "#index" do
it "renders the index template" do
expect(get :index).to render_template (:index)
end
end
describe "#new" do
it "renders the new event template" do
expect(get :new).to render_template (:new)
end
it "renders form with new event" do
get :new
expect(assigns(:event)).to be_a_new Event
end
end
describe "#create" do
let!(:create_post) { post :create, :event => {title: "DBC BP", status: "Active", creator_id: user.id} }
it "adds a new event to the database" do
expect(Event.where(title: "DBC BP")).to exist
end
it "redirects events index page" do
expect(create_post).to redirect_to events_path
end
end
describe "#show" do
it "renders show template" do
expect(get :show, id: new_event.id).to render_template(:show)
end
end
end | Add one test for event show | Add one test for event show
| Ruby | mit | mud-turtles-2014/beerded-happiness,mud-turtles-2014/beerded-happiness | ruby | ## Code Before:
require 'rails_helper'
require 'spec_helper'
describe EventsController do
describe "#index" do
it "renders the index template" do
expect(get :index).to render_template (:index)
end
end
describe "#new" do
it "renders the new event template" do
expect(get :new).to render_template (:new)
end
it "renders form with new event" do
get :new
expect(assigns(:event)).to be_a_new Event
end
end
describe "#create" do
let!(:create_post) { post :create, :event => {title: "DBC BP", status: "Active", creator_id: user.id} }
it "adds a new event to the database" do
expect(Event.where(title: "DBC BP")).to exist
end
it "redirects events index page" do
expect(create_post).to redirect_to events_path
end
end
describe "#show" do
it "renders show template" do
expect(get :show, id: new_event.id).to render_template(:show)
end
end
end
## Instruction:
Add one test for event show
## Code After:
require 'rails_helper'
require 'spec_helper'
describe EventsController do
let(:user) { User.create(name: "numichuu", password: "test", password_confirmation: "test", phone_number: "123-123-1234", email:"numichuu@gmail.com")}
let(:new_event) { Event.create(title: "BeerFest", status: "Active", creator_id: user.id) }
describe "#index" do
it "renders the index template" do
expect(get :index).to render_template (:index)
end
end
describe "#new" do
it "renders the new event template" do
expect(get :new).to render_template (:new)
end
it "renders form with new event" do
get :new
expect(assigns(:event)).to be_a_new Event
end
end
describe "#create" do
let!(:create_post) { post :create, :event => {title: "DBC BP", status: "Active", creator_id: user.id} }
it "adds a new event to the database" do
expect(Event.where(title: "DBC BP")).to exist
end
it "redirects events index page" do
expect(create_post).to redirect_to events_path
end
end
describe "#show" do
it "renders show template" do
expect(get :show, id: new_event.id).to render_template(:show)
end
end
end | require 'rails_helper'
require 'spec_helper'
describe EventsController do
+ let(:user) { User.create(name: "numichuu", password: "test", password_confirmation: "test", phone_number: "123-123-1234", email:"numichuu@gmail.com")}
+ let(:new_event) { Event.create(title: "BeerFest", status: "Active", creator_id: user.id) }
describe "#index" do
it "renders the index template" do
expect(get :index).to render_template (:index)
end
end
describe "#new" do
it "renders the new event template" do
expect(get :new).to render_template (:new)
end
it "renders form with new event" do
get :new
expect(assigns(:event)).to be_a_new Event
end
end
describe "#create" do
let!(:create_post) { post :create, :event => {title: "DBC BP", status: "Active", creator_id: user.id} }
it "adds a new event to the database" do
expect(Event.where(title: "DBC BP")).to exist
end
it "redirects events index page" do
expect(create_post).to redirect_to events_path
end
end
describe "#show" do
it "renders show template" do
expect(get :show, id: new_event.id).to render_template(:show)
end
end
end | 2 | 0.04878 | 2 | 0 |
3437c68db3006c7e9cff6196ccfd0fc25403a7b3 | app/views/ransack_ui/_search.html.haml | app/views/ransack_ui/_search.html.haml | = search_form_for @ransack_search, :url => url_for(:action => :index), :html => {:method => :get, :class => "ransack_search"}, :remote => !!options[:remote] do |f|
:javascript
if (window.Ransack == null) { window.Ransack = {}; }
Ransack.alt_predicates_i18n = #{I18n.translate(:"ransack.predicates.alt", :default => {}).to_json}
.row
.span12.well
= f.grouping_fields do |g|
= render 'ransack_ui/grouping_fields', :f => g
%p
= link_to_add_fields t(:add_group), f, :grouping
.row
.span12.well
.pull-left
= f.sort_fields do |s|
= render 'ransack_ui/sort_fields', :f => s
.pull-right
= hidden_field_tag :distinct, '1'
= hidden_field_tag :page, '1'
= f.submit t(:submit), :class => 'btn btn-primary btn-large'
| = search_form_for @ransack_search, :url => url_for(:action => :index), :html => {:method => :get, :class => "ransack_search"}, :remote => !!options[:remote] do |f|
:javascript
if (window.Ransack == null) { window.Ransack = {}; }
Ransack.alt_predicates_i18n = #{I18n.translate(:"ransack.predicates.alt", :default => {}).to_json}
.row
.span12.well
= f.grouping_fields do |g|
= render 'ransack_ui/grouping_fields', :f => g
%p
= link_to_add_fields t(:add_group), f, :grouping
%hr
.pull-left
= f.sort_fields do |s|
= render 'ransack_ui/sort_fields', :f => s
.pull-right
= hidden_field_tag :distinct, '1'
= hidden_field_tag :page, '1'
= f.submit t(:submit), :class => 'btn btn-primary btn-large'
| Use a single container for search form, split with hr | Use a single container for search form, split with hr
| Haml | mit | scottbarrow/ransack_ui,Guestfolio/ransack_ui,ndbroadbent/ransack_ui,leikir/ransack_ui,ndbroadbent/ransack_ui,ndbroadbent/ransack_ui,Guestfolio/ransack_ui,leikir/ransack_ui,leikir/ransack_ui,scottbarrow/ransack_ui,Guestfolio/ransack_ui,scottbarrow/ransack_ui | haml | ## Code Before:
= search_form_for @ransack_search, :url => url_for(:action => :index), :html => {:method => :get, :class => "ransack_search"}, :remote => !!options[:remote] do |f|
:javascript
if (window.Ransack == null) { window.Ransack = {}; }
Ransack.alt_predicates_i18n = #{I18n.translate(:"ransack.predicates.alt", :default => {}).to_json}
.row
.span12.well
= f.grouping_fields do |g|
= render 'ransack_ui/grouping_fields', :f => g
%p
= link_to_add_fields t(:add_group), f, :grouping
.row
.span12.well
.pull-left
= f.sort_fields do |s|
= render 'ransack_ui/sort_fields', :f => s
.pull-right
= hidden_field_tag :distinct, '1'
= hidden_field_tag :page, '1'
= f.submit t(:submit), :class => 'btn btn-primary btn-large'
## Instruction:
Use a single container for search form, split with hr
## Code After:
= search_form_for @ransack_search, :url => url_for(:action => :index), :html => {:method => :get, :class => "ransack_search"}, :remote => !!options[:remote] do |f|
:javascript
if (window.Ransack == null) { window.Ransack = {}; }
Ransack.alt_predicates_i18n = #{I18n.translate(:"ransack.predicates.alt", :default => {}).to_json}
.row
.span12.well
= f.grouping_fields do |g|
= render 'ransack_ui/grouping_fields', :f => g
%p
= link_to_add_fields t(:add_group), f, :grouping
%hr
.pull-left
= f.sort_fields do |s|
= render 'ransack_ui/sort_fields', :f => s
.pull-right
= hidden_field_tag :distinct, '1'
= hidden_field_tag :page, '1'
= f.submit t(:submit), :class => 'btn btn-primary btn-large'
| = search_form_for @ransack_search, :url => url_for(:action => :index), :html => {:method => :get, :class => "ransack_search"}, :remote => !!options[:remote] do |f|
:javascript
if (window.Ransack == null) { window.Ransack = {}; }
Ransack.alt_predicates_i18n = #{I18n.translate(:"ransack.predicates.alt", :default => {}).to_json}
.row
.span12.well
= f.grouping_fields do |g|
= render 'ransack_ui/grouping_fields', :f => g
%p
= link_to_add_fields t(:add_group), f, :grouping
- .row
- .span12.well
+ %hr
+
.pull-left
= f.sort_fields do |s|
= render 'ransack_ui/sort_fields', :f => s
+
.pull-right
= hidden_field_tag :distinct, '1'
= hidden_field_tag :page, '1'
= f.submit t(:submit), :class => 'btn btn-primary btn-large' | 5 | 0.217391 | 3 | 2 |
2c27b19cda505e2b8f0f07d5a6456af9a8a03356 | src/main/kotlin/com/github/shiraji/permissionsdispatcherplugin/models/GeneratePMCodeModel.kt | src/main/kotlin/com/github/shiraji/permissionsdispatcherplugin/models/GeneratePMCodeModel.kt | package com.github.shiraji.permissionsdispatcherplugin.models
import com.intellij.openapi.project.Project
import com.intellij.psi.JavaPsiFacade
import com.intellij.psi.PsiClass
import com.intellij.psi.search.GlobalSearchScope
class GeneratePMCodeModel {
fun createPsiClass(qualifiedName: String, project: Project): PsiClass? {
val psiFacade = JavaPsiFacade.getInstance(project);
val searchScope = GlobalSearchScope.allScope(project);
return psiFacade.findClass(qualifiedName, searchScope);
}
} | package com.github.shiraji.permissionsdispatcherplugin.models
import com.intellij.openapi.project.Project
import com.intellij.psi.JavaPsiFacade
import com.intellij.psi.PsiClass
import com.intellij.psi.search.GlobalSearchScope
class GeneratePMCodeModel {
fun isActivity(aClass: PsiClass, project: Project): Boolean {
val activity = createPsiClass("android.app.Activity", project) ?: return false
return aClass.isInheritor(activity, true)
}
fun isFragment(aClass: PsiClass, project: Project): Boolean {
val fragment = createPsiClass("android.app.Fragment", project) ?: return false
return aClass.isInheritor(fragment, true)
}
fun isSupportFragment(aClass: PsiClass, project: Project): Boolean {
val fragment = createPsiClass("android.support.v4.app.Fragment", project) ?: return false
return aClass.isInheritor(fragment, true)
}
fun createPsiClass(qualifiedName: String, project: Project): PsiClass? {
val psiFacade = JavaPsiFacade.getInstance(project);
val searchScope = GlobalSearchScope.allScope(project);
return psiFacade.findClass(qualifiedName, searchScope);
}
} | Add features that find if the class is Activity or Fragment | Add features that find if the class is Activity or Fragment
| Kotlin | apache-2.0 | shiraji/permissions-dispatcher-plugin,shiraji/permissions-dispatcher-plugin,shiraji/permissions-dispatcher-plugin | kotlin | ## Code Before:
package com.github.shiraji.permissionsdispatcherplugin.models
import com.intellij.openapi.project.Project
import com.intellij.psi.JavaPsiFacade
import com.intellij.psi.PsiClass
import com.intellij.psi.search.GlobalSearchScope
class GeneratePMCodeModel {
fun createPsiClass(qualifiedName: String, project: Project): PsiClass? {
val psiFacade = JavaPsiFacade.getInstance(project);
val searchScope = GlobalSearchScope.allScope(project);
return psiFacade.findClass(qualifiedName, searchScope);
}
}
## Instruction:
Add features that find if the class is Activity or Fragment
## Code After:
package com.github.shiraji.permissionsdispatcherplugin.models
import com.intellij.openapi.project.Project
import com.intellij.psi.JavaPsiFacade
import com.intellij.psi.PsiClass
import com.intellij.psi.search.GlobalSearchScope
class GeneratePMCodeModel {
fun isActivity(aClass: PsiClass, project: Project): Boolean {
val activity = createPsiClass("android.app.Activity", project) ?: return false
return aClass.isInheritor(activity, true)
}
fun isFragment(aClass: PsiClass, project: Project): Boolean {
val fragment = createPsiClass("android.app.Fragment", project) ?: return false
return aClass.isInheritor(fragment, true)
}
fun isSupportFragment(aClass: PsiClass, project: Project): Boolean {
val fragment = createPsiClass("android.support.v4.app.Fragment", project) ?: return false
return aClass.isInheritor(fragment, true)
}
fun createPsiClass(qualifiedName: String, project: Project): PsiClass? {
val psiFacade = JavaPsiFacade.getInstance(project);
val searchScope = GlobalSearchScope.allScope(project);
return psiFacade.findClass(qualifiedName, searchScope);
}
} | package com.github.shiraji.permissionsdispatcherplugin.models
import com.intellij.openapi.project.Project
import com.intellij.psi.JavaPsiFacade
import com.intellij.psi.PsiClass
import com.intellij.psi.search.GlobalSearchScope
class GeneratePMCodeModel {
+
+ fun isActivity(aClass: PsiClass, project: Project): Boolean {
+ val activity = createPsiClass("android.app.Activity", project) ?: return false
+ return aClass.isInheritor(activity, true)
+ }
+
+ fun isFragment(aClass: PsiClass, project: Project): Boolean {
+ val fragment = createPsiClass("android.app.Fragment", project) ?: return false
+ return aClass.isInheritor(fragment, true)
+ }
+
+ fun isSupportFragment(aClass: PsiClass, project: Project): Boolean {
+ val fragment = createPsiClass("android.support.v4.app.Fragment", project) ?: return false
+ return aClass.isInheritor(fragment, true)
+ }
+
fun createPsiClass(qualifiedName: String, project: Project): PsiClass? {
val psiFacade = JavaPsiFacade.getInstance(project);
val searchScope = GlobalSearchScope.allScope(project);
return psiFacade.findClass(qualifiedName, searchScope);
}
} | 16 | 1.142857 | 16 | 0 |
cb4a7135b91aace4a47c0ae47b809ac51a8fb862 | osx/config/fish/aliases.fish | osx/config/fish/aliases.fish | function ll
ls -al
end
function gll
git log --graph --date=short --pretty=format:'%Cgreen%h %Cblue%cd (%cr) %Cred%an%C(yellow)%d%Creset: %s'
end
function glll
git log --graph --stat --date=short --pretty=format:'%Cgreen%h %Cblue%cd (%cr) %Cred%an%C(yellow)%d%Creset: %s'
end
function goops
git add -A
git reset --hard HEAD
end
| function ll
ls -al
end
function gll
git log --graph --date=short --pretty=format:'%Cgreen%h %Cblue%cd (%cr) %Cred%an%C(yellow)%d%Creset: %s'
end
function glll
git log --graph --stat --date=short --pretty=format:'%Cgreen%h %Cblue%cd (%cr) %Cred%an%C(yellow)%d%Creset: %s'
end
function goops
git add -A
git reset --hard HEAD
end
# Converts ML code into Reason
alias mlre "pbpaste | refmt -use-stdin true -parse ml -print re -is-interface-pp false | pbcopy"
# Converts Reason code into ML
alias reml "pbpaste | refmt -use-stdin true -parse re -print ml -is-interface-pp false | pbcopy"
| Add conversion shortcuts for Reason | Add conversion shortcuts for Reason
| fish | mit | maxdeviant/dotfiles | fish | ## Code Before:
function ll
ls -al
end
function gll
git log --graph --date=short --pretty=format:'%Cgreen%h %Cblue%cd (%cr) %Cred%an%C(yellow)%d%Creset: %s'
end
function glll
git log --graph --stat --date=short --pretty=format:'%Cgreen%h %Cblue%cd (%cr) %Cred%an%C(yellow)%d%Creset: %s'
end
function goops
git add -A
git reset --hard HEAD
end
## Instruction:
Add conversion shortcuts for Reason
## Code After:
function ll
ls -al
end
function gll
git log --graph --date=short --pretty=format:'%Cgreen%h %Cblue%cd (%cr) %Cred%an%C(yellow)%d%Creset: %s'
end
function glll
git log --graph --stat --date=short --pretty=format:'%Cgreen%h %Cblue%cd (%cr) %Cred%an%C(yellow)%d%Creset: %s'
end
function goops
git add -A
git reset --hard HEAD
end
# Converts ML code into Reason
alias mlre "pbpaste | refmt -use-stdin true -parse ml -print re -is-interface-pp false | pbcopy"
# Converts Reason code into ML
alias reml "pbpaste | refmt -use-stdin true -parse re -print ml -is-interface-pp false | pbcopy"
| function ll
ls -al
end
function gll
git log --graph --date=short --pretty=format:'%Cgreen%h %Cblue%cd (%cr) %Cred%an%C(yellow)%d%Creset: %s'
end
function glll
git log --graph --stat --date=short --pretty=format:'%Cgreen%h %Cblue%cd (%cr) %Cred%an%C(yellow)%d%Creset: %s'
end
function goops
git add -A
git reset --hard HEAD
end
+
+ # Converts ML code into Reason
+ alias mlre "pbpaste | refmt -use-stdin true -parse ml -print re -is-interface-pp false | pbcopy"
+
+ # Converts Reason code into ML
+ alias reml "pbpaste | refmt -use-stdin true -parse re -print ml -is-interface-pp false | pbcopy" | 6 | 0.375 | 6 | 0 |
877b9c795f7e8d93db591e248def0f2a3d9ec048 | .travis.yml | .travis.yml | language: go
go:
- 1.3
script:
- export PATH="$PATH:$GOPATH/bin"
- make setup
- make deps
- make dist
| language: go
go:
- 1.3
env:
- PATH=/home/travis/gopath/bin:$PATH
before_install:
- make setup
- make deps
install:
- make install
script:
- make dist
| Modify path of GO bins | Modify path of GO bins
| YAML | mit | Tomohiro/gyazo-cli,syohex/gyazo-cli | yaml | ## Code Before:
language: go
go:
- 1.3
script:
- export PATH="$PATH:$GOPATH/bin"
- make setup
- make deps
- make dist
## Instruction:
Modify path of GO bins
## Code After:
language: go
go:
- 1.3
env:
- PATH=/home/travis/gopath/bin:$PATH
before_install:
- make setup
- make deps
install:
- make install
script:
- make dist
| language: go
go:
- 1.3
-
- script:
- - export PATH="$PATH:$GOPATH/bin"
+ env:
+ - PATH=/home/travis/gopath/bin:$PATH
+ before_install:
- make setup
- make deps
+ install:
+ - make install
+ script:
- make dist | 9 | 0.9 | 6 | 3 |
458804c51c1d335f7cdae916b46e109c9fda4bad | .github/config.yml | .github/config.yml | newIssueWelcomeComment: >
Hello! Thank you for opening your first issue in this repo. It’s people like you who make these host files better!
# Configuration for new-pr-welcome - https://github.com/behaviorbot/new-pr-welcome
# Comment to be posted to on PRs from first time contributors in your repository
newPRWelcomeComment: >
Thank you for submitting this pull request! We’ll get back to you as soon as we can!
# Configuration for first-pr-merge - https://github.com/behaviorbot/first-pr-merge
# Comment to be posted to on pull requests merged by a first time user
firstPRMergeComment: >
Congratulations on merging your first pull request! 🎉🎉🎉
# It is recommend to include as many gifs and emojis as possible! | newIssueWelcomeComment: >
Hello! Thank you for opening your first issue in this repo. It’s people like you who make these host files better!
# Configuration for new-pr-welcome - https://github.com/behaviorbot/new-pr-welcome
# Comment to be posted to on PRs from first time contributors in your repository
newPRWelcomeComment: >
Thank you for submitting this pull request! We’ll get back to you as soon as we can!
# Configuration for first-pr-merge - https://github.com/behaviorbot/first-pr-merge
# Comment to be posted to on pull requests merged by a first time user
firstPRMergeComment: >
Congratulations on merging your first pull request here! 🎉🎉🎉 You are now in our [list of contributors](https://github.com/StevenBlack/hosts/graphs/contributors). Welcome!
# It is recommend to include as many gifs and emojis as possible! | Upgrade the congratulations on merging message to first time contributors. | Upgrade the congratulations on merging message to first time contributors.
| YAML | mit | gfyoung/hosts,borisschapira/hosts,StevenBlack/hosts,StevenBlack/hosts,gfyoung/hosts | yaml | ## Code Before:
newIssueWelcomeComment: >
Hello! Thank you for opening your first issue in this repo. It’s people like you who make these host files better!
# Configuration for new-pr-welcome - https://github.com/behaviorbot/new-pr-welcome
# Comment to be posted to on PRs from first time contributors in your repository
newPRWelcomeComment: >
Thank you for submitting this pull request! We’ll get back to you as soon as we can!
# Configuration for first-pr-merge - https://github.com/behaviorbot/first-pr-merge
# Comment to be posted to on pull requests merged by a first time user
firstPRMergeComment: >
Congratulations on merging your first pull request! 🎉🎉🎉
# It is recommend to include as many gifs and emojis as possible!
## Instruction:
Upgrade the congratulations on merging message to first time contributors.
## Code After:
newIssueWelcomeComment: >
Hello! Thank you for opening your first issue in this repo. It’s people like you who make these host files better!
# Configuration for new-pr-welcome - https://github.com/behaviorbot/new-pr-welcome
# Comment to be posted to on PRs from first time contributors in your repository
newPRWelcomeComment: >
Thank you for submitting this pull request! We’ll get back to you as soon as we can!
# Configuration for first-pr-merge - https://github.com/behaviorbot/first-pr-merge
# Comment to be posted to on pull requests merged by a first time user
firstPRMergeComment: >
Congratulations on merging your first pull request here! 🎉🎉🎉 You are now in our [list of contributors](https://github.com/StevenBlack/hosts/graphs/contributors). Welcome!
# It is recommend to include as many gifs and emojis as possible! | newIssueWelcomeComment: >
Hello! Thank you for opening your first issue in this repo. It’s people like you who make these host files better!
# Configuration for new-pr-welcome - https://github.com/behaviorbot/new-pr-welcome
# Comment to be posted to on PRs from first time contributors in your repository
newPRWelcomeComment: >
Thank you for submitting this pull request! We’ll get back to you as soon as we can!
# Configuration for first-pr-merge - https://github.com/behaviorbot/first-pr-merge
# Comment to be posted to on pull requests merged by a first time user
firstPRMergeComment: >
- Congratulations on merging your first pull request! 🎉🎉🎉
+ Congratulations on merging your first pull request here! 🎉🎉🎉 You are now in our [list of contributors](https://github.com/StevenBlack/hosts/graphs/contributors). Welcome!
# It is recommend to include as many gifs and emojis as possible! | 2 | 0.125 | 1 | 1 |
43a515f25e7bebd90ad2ccce4f60f6b5e0fe71d0 | src/util/arrayEqual.js | src/util/arrayEqual.js | export default function arrayEqual (a, b) {
if (a.length === b.length) {
return a.every((value, index) => {
return value === b[index];
});
}
return false;
}
| export default function arrayEqual (a, b) {
if (a.length === b.length) {
return a.every((value, index) => value === b[index]);
}
return false;
}
| Tidy up array equal function. | Tidy up array equal function.
| JavaScript | mit | ben-eb/css-values | javascript | ## Code Before:
export default function arrayEqual (a, b) {
if (a.length === b.length) {
return a.every((value, index) => {
return value === b[index];
});
}
return false;
}
## Instruction:
Tidy up array equal function.
## Code After:
export default function arrayEqual (a, b) {
if (a.length === b.length) {
return a.every((value, index) => value === b[index]);
}
return false;
}
| export default function arrayEqual (a, b) {
if (a.length === b.length) {
- return a.every((value, index) => {
? ^
+ return a.every((value, index) => value === b[index]);
? ^^^^^^^^^^^^^^^^^^^^
- return value === b[index];
- });
}
return false;
} | 4 | 0.444444 | 1 | 3 |
d81972f565a4316bcc1ed67a17d02d991147ca16 | appveyor.yml | appveyor.yml | version: 1.0.{build}
environment:
ftp_user:
secure: f0a7Ydwv2347FyFroA9W2g==
ftp_password:
secure: Sei1iycTJNEoAKXYRT6R4QsjCmBYgl81iOd2Dp6Ch/s=
install:
- choco install pretzel -y
- choco install pretzel.scriptcs -y
- ps: $env:Path += ";C:\\Python35;C:\\Python35\\Scripts"
- pip install creep
cache:
- C:\tools\Pretzel -> appveyor.yml
- '%LOCALAPPDATA%\pip\Cache -> appveyor.yml'
build_script:
- ps: .\BakeAndDeploy.ps1
test: off
artifacts:
- path: src/_site
name: compiled_site
| version: 1.0.{build}
environment:
ftp_user:
secure: f0a7Ydwv2347FyFroA9W2g==
ftp_password:
secure: Sei1iycTJNEoAKXYRT6R4QsjCmBYgl81iOd2Dp6Ch/s=
install:
- choco install pretzel -y
- choco install pretzel.scriptcs -y
- ps: $env:Path += ";C:\\Python35;C:\\Python35\\Scripts"
- pip install creep
cache:
- C:\tools\Pretzel -> appveyor.yml
- C:\ProgramData\chocolatey\lib -> appveyor.yml
- '%LOCALAPPDATA%\pip\Cache -> appveyor.yml'
build_script:
- ps: .\BakeAndDeploy.ps1
test: off
artifacts:
- path: src/_site
name: compiled_site
| Add cache for chocolatey packages | Add cache for chocolatey packages
| YAML | apache-2.0 | laedit/laedit.net,laedit/laedit.net | yaml | ## Code Before:
version: 1.0.{build}
environment:
ftp_user:
secure: f0a7Ydwv2347FyFroA9W2g==
ftp_password:
secure: Sei1iycTJNEoAKXYRT6R4QsjCmBYgl81iOd2Dp6Ch/s=
install:
- choco install pretzel -y
- choco install pretzel.scriptcs -y
- ps: $env:Path += ";C:\\Python35;C:\\Python35\\Scripts"
- pip install creep
cache:
- C:\tools\Pretzel -> appveyor.yml
- '%LOCALAPPDATA%\pip\Cache -> appveyor.yml'
build_script:
- ps: .\BakeAndDeploy.ps1
test: off
artifacts:
- path: src/_site
name: compiled_site
## Instruction:
Add cache for chocolatey packages
## Code After:
version: 1.0.{build}
environment:
ftp_user:
secure: f0a7Ydwv2347FyFroA9W2g==
ftp_password:
secure: Sei1iycTJNEoAKXYRT6R4QsjCmBYgl81iOd2Dp6Ch/s=
install:
- choco install pretzel -y
- choco install pretzel.scriptcs -y
- ps: $env:Path += ";C:\\Python35;C:\\Python35\\Scripts"
- pip install creep
cache:
- C:\tools\Pretzel -> appveyor.yml
- C:\ProgramData\chocolatey\lib -> appveyor.yml
- '%LOCALAPPDATA%\pip\Cache -> appveyor.yml'
build_script:
- ps: .\BakeAndDeploy.ps1
test: off
artifacts:
- path: src/_site
name: compiled_site
| version: 1.0.{build}
environment:
ftp_user:
secure: f0a7Ydwv2347FyFroA9W2g==
ftp_password:
secure: Sei1iycTJNEoAKXYRT6R4QsjCmBYgl81iOd2Dp6Ch/s=
install:
- choco install pretzel -y
- choco install pretzel.scriptcs -y
- ps: $env:Path += ";C:\\Python35;C:\\Python35\\Scripts"
- pip install creep
cache:
- C:\tools\Pretzel -> appveyor.yml
+ - C:\ProgramData\chocolatey\lib -> appveyor.yml
- '%LOCALAPPDATA%\pip\Cache -> appveyor.yml'
build_script:
- ps: .\BakeAndDeploy.ps1
test: off
artifacts:
- path: src/_site
name: compiled_site | 1 | 0.038462 | 1 | 0 |
4a2aa375a6356b809378a615ea8105b8a2d4ef68 | frontend/partials/profile.html | frontend/partials/profile.html | <h1>{{ 'Profile' | translate }} {{ user.login }}</h1>
<div class="table-responsive">
<table class="table">
<tbody>
<tr>
<td>
<span>
{{ "Win count" | translate }}:
</span>
</td>
<td>
{{ user.win }}
</td>
</tr>
<tr>
<td>
<span>
{{ "Draw count" | translate }}:
</span>
</td>
<td>
{{ user.draw }}
</td>
</tr>
<tr>
<td>
<span>
{{ "Lose count" | translate }}:
</span>
</td>
<td>
{{ user.lose }}
</td>
</tr>
<tr ng-if="user.pgn_link">
<td>
<span>
{{ "Download player games" | translate }}:
</span>
</td>
<td>
<a href="{{ user.pgn_link }}">pgn</a>
</td>
</tr>
</tbody>
</table>
</div> | <h1>{{ 'Profile' | translate }} {{ user.login }}</h1>
<div class="table-responsive">
<table class="table">
<tbody>
<tr>
<td>
<span>
{{ "Win count" | translate }}:
</span>
</td>
<td>
{{ user.win }}
</td>
</tr>
<tr>
<td>
<span>
{{ "Draw count" | translate }}:
</span>
</td>
<td>
{{ user.draw }}
</td>
</tr>
<tr>
<td>
<span>
{{ "Lose count" | translate }}:
</span>
</td>
<td>
{{ user.lose }}
</td>
</tr>
<tr ng-if="user.pgn_link">
<td>
<span>
{{ "Download player games" | translate }}:
</span>
</td>
<td>
<a target="_blank" href="{{ user.pgn_link }}">pgn</a>
</td>
</tr>
</tbody>
</table>
</div> | Fix for download games. Open in blacnk page. | fix(frontend): Fix for download games. Open in blacnk page.
| HTML | mit | StasPiv/playzone,StasPiv/playzone,StasPiv/playzone | html | ## Code Before:
<h1>{{ 'Profile' | translate }} {{ user.login }}</h1>
<div class="table-responsive">
<table class="table">
<tbody>
<tr>
<td>
<span>
{{ "Win count" | translate }}:
</span>
</td>
<td>
{{ user.win }}
</td>
</tr>
<tr>
<td>
<span>
{{ "Draw count" | translate }}:
</span>
</td>
<td>
{{ user.draw }}
</td>
</tr>
<tr>
<td>
<span>
{{ "Lose count" | translate }}:
</span>
</td>
<td>
{{ user.lose }}
</td>
</tr>
<tr ng-if="user.pgn_link">
<td>
<span>
{{ "Download player games" | translate }}:
</span>
</td>
<td>
<a href="{{ user.pgn_link }}">pgn</a>
</td>
</tr>
</tbody>
</table>
</div>
## Instruction:
fix(frontend): Fix for download games. Open in blacnk page.
## Code After:
<h1>{{ 'Profile' | translate }} {{ user.login }}</h1>
<div class="table-responsive">
<table class="table">
<tbody>
<tr>
<td>
<span>
{{ "Win count" | translate }}:
</span>
</td>
<td>
{{ user.win }}
</td>
</tr>
<tr>
<td>
<span>
{{ "Draw count" | translate }}:
</span>
</td>
<td>
{{ user.draw }}
</td>
</tr>
<tr>
<td>
<span>
{{ "Lose count" | translate }}:
</span>
</td>
<td>
{{ user.lose }}
</td>
</tr>
<tr ng-if="user.pgn_link">
<td>
<span>
{{ "Download player games" | translate }}:
</span>
</td>
<td>
<a target="_blank" href="{{ user.pgn_link }}">pgn</a>
</td>
</tr>
</tbody>
</table>
</div> | <h1>{{ 'Profile' | translate }} {{ user.login }}</h1>
<div class="table-responsive">
<table class="table">
<tbody>
<tr>
<td>
<span>
{{ "Win count" | translate }}:
</span>
</td>
<td>
{{ user.win }}
</td>
</tr>
<tr>
<td>
<span>
{{ "Draw count" | translate }}:
</span>
</td>
<td>
{{ user.draw }}
</td>
</tr>
<tr>
<td>
<span>
{{ "Lose count" | translate }}:
</span>
</td>
<td>
{{ user.lose }}
</td>
</tr>
<tr ng-if="user.pgn_link">
<td>
<span>
{{ "Download player games" | translate }}:
</span>
</td>
<td>
- <a href="{{ user.pgn_link }}">pgn</a>
+ <a target="_blank" href="{{ user.pgn_link }}">pgn</a>
? ++++++++++++++++
</td>
</tr>
</tbody>
</table>
</div> | 2 | 0.042553 | 1 | 1 |
d500e290f8c1422f74b1d8c8d2bbb8ec9e5529cb | misc/singleton.py | misc/singleton.py |
class Singleton(object):
"""
This is a class that implements singleton for its subclasses.
The technique is based on a variant of other techniques found in:
http://stackoverflow.com/questions/6760685/creating-a-singleton-in-python
https://gist.github.com/werediver/4396488
The technique is simply to build a map of classes to their unique instances.
The first time called for some particular
class the class is mapped to the instance. On other class to the same class, the mapped instance is returned.
"""
_instances = {}
@classmethod
def instance(cls):
if cls not in cls._instances:
cls._instances[cls] = cls()
return cls._instances[cls]
|
class Singleton(object):
"""
This is a class that implements singleton for its subclasses.
The technique is based on a variant of other techniques found in:
http://stackoverflow.com/questions/6760685/creating-a-singleton-in-python
https://gist.github.com/werediver/4396488
The technique is simply to build a map of classes to their unique instances.
The first time called for some particular
class the class is mapped to the instance. On other class to the same class, the mapped instance is returned.
Classes that use this must:
1) Add Singleton as a superclass.
2) Have this signature for the constructor: __init__(self, *args, **kwargs)
"""
_instances = {}
@classmethod
def instance(cls, *args, **kwargs):
if cls not in cls._instances:
cls._instances[cls] = cls(*args, **kwargs)
return cls._instances[cls]
| Add comments to Singleton about usage. | Add comments to Singleton about usage.
| Python | mit | dpazel/music_rep | python | ## Code Before:
class Singleton(object):
"""
This is a class that implements singleton for its subclasses.
The technique is based on a variant of other techniques found in:
http://stackoverflow.com/questions/6760685/creating-a-singleton-in-python
https://gist.github.com/werediver/4396488
The technique is simply to build a map of classes to their unique instances.
The first time called for some particular
class the class is mapped to the instance. On other class to the same class, the mapped instance is returned.
"""
_instances = {}
@classmethod
def instance(cls):
if cls not in cls._instances:
cls._instances[cls] = cls()
return cls._instances[cls]
## Instruction:
Add comments to Singleton about usage.
## Code After:
class Singleton(object):
"""
This is a class that implements singleton for its subclasses.
The technique is based on a variant of other techniques found in:
http://stackoverflow.com/questions/6760685/creating-a-singleton-in-python
https://gist.github.com/werediver/4396488
The technique is simply to build a map of classes to their unique instances.
The first time called for some particular
class the class is mapped to the instance. On other class to the same class, the mapped instance is returned.
Classes that use this must:
1) Add Singleton as a superclass.
2) Have this signature for the constructor: __init__(self, *args, **kwargs)
"""
_instances = {}
@classmethod
def instance(cls, *args, **kwargs):
if cls not in cls._instances:
cls._instances[cls] = cls(*args, **kwargs)
return cls._instances[cls]
| -
class Singleton(object):
"""
This is a class that implements singleton for its subclasses.
The technique is based on a variant of other techniques found in:
http://stackoverflow.com/questions/6760685/creating-a-singleton-in-python
https://gist.github.com/werediver/4396488
The technique is simply to build a map of classes to their unique instances.
The first time called for some particular
class the class is mapped to the instance. On other class to the same class, the mapped instance is returned.
+ Classes that use this must:
+ 1) Add Singleton as a superclass.
+ 2) Have this signature for the constructor: __init__(self, *args, **kwargs)
"""
_instances = {}
@classmethod
- def instance(cls):
+ def instance(cls, *args, **kwargs):
if cls not in cls._instances:
- cls._instances[cls] = cls()
+ cls._instances[cls] = cls(*args, **kwargs)
? +++++++++++++++
return cls._instances[cls]
| 8 | 0.380952 | 5 | 3 |
4808d01c700ff19f5ff8ca87b2b344c09e973642 | package.json | package.json | {
"name": "mapline",
"version": "0.12.0",
"description": "Create a pdf of maps along a track in printable quality",
"main": "src/index.js",
"keywords": [],
"author": "sgelb",
"license": "GPL-3.0-or-later",
"scripts": {
"serve": "parcel serve index.html",
"build": "parcel build --no-source-maps --detailed-report index.html"
},
"browserslist": [
"defaults",
"not ie <999",
"not Android < 999"
],
"dependencies": {
"@mapbox/geojson-normalize": "^0.0.1",
"@mapbox/mapbox-gl-language": "^0.10.0",
"@mapbox/togeojson": "^0.16.0",
"cheap-ruler": "^2.5.1",
"jspdf": "^1.4.1",
"mapbox-gl": "^0.47.0",
"xmldom": "^0.1.27"
},
"devDependencies": {
"babel-core": "^6.26.3",
"babel-preset-env": "^1.7.0",
"cssnano": "^4.0.3",
"parcel-bundler": "^1.9.7"
}
}
| {
"name": "mapline",
"version": "0.12.0",
"description": "Create maps along a track in print quality.",
"main": "src/index.js",
"keywords": [],
"homepage": "https://github.com/sgelb/mapline",
"author": "sgelb",
"license": "GPL-3.0-or-later",
"scripts": {
"serve": "parcel serve index.html",
"build": "parcel build --no-cache --no-source-maps --detailed-report index.html",
"demo": "parcel build --no-cache --no-source-maps --public-url ./ --out-dir ./demo/mapline/ --detailed-report index.html "
},
"browserslist": [
"defaults",
"not Explorer < 999",
"not Android < 999"
],
"dependencies": {
"@mapbox/geojson-normalize": "^0.0.1",
"@mapbox/mapbox-gl-language": "^0.10.0",
"@mapbox/togeojson": "^0.16.0",
"cheap-ruler": "^2.5.1",
"jspdf": "^1.4.1",
"mapbox-gl": "^0.47.0",
"xmldom": "^0.1.27"
},
"devDependencies": {
"babel-core": "^6.26.3",
"babel-preset-env": "^1.7.0",
"cssnano": "^4.0.3",
"parcel-bundler": "^1.9.7"
}
}
| Add build script for demo page | Add build script for demo page
| JSON | agpl-3.0 | sgelb/mapline,sgelb/mapline,sgelb/mapline | json | ## Code Before:
{
"name": "mapline",
"version": "0.12.0",
"description": "Create a pdf of maps along a track in printable quality",
"main": "src/index.js",
"keywords": [],
"author": "sgelb",
"license": "GPL-3.0-or-later",
"scripts": {
"serve": "parcel serve index.html",
"build": "parcel build --no-source-maps --detailed-report index.html"
},
"browserslist": [
"defaults",
"not ie <999",
"not Android < 999"
],
"dependencies": {
"@mapbox/geojson-normalize": "^0.0.1",
"@mapbox/mapbox-gl-language": "^0.10.0",
"@mapbox/togeojson": "^0.16.0",
"cheap-ruler": "^2.5.1",
"jspdf": "^1.4.1",
"mapbox-gl": "^0.47.0",
"xmldom": "^0.1.27"
},
"devDependencies": {
"babel-core": "^6.26.3",
"babel-preset-env": "^1.7.0",
"cssnano": "^4.0.3",
"parcel-bundler": "^1.9.7"
}
}
## Instruction:
Add build script for demo page
## Code After:
{
"name": "mapline",
"version": "0.12.0",
"description": "Create maps along a track in print quality.",
"main": "src/index.js",
"keywords": [],
"homepage": "https://github.com/sgelb/mapline",
"author": "sgelb",
"license": "GPL-3.0-or-later",
"scripts": {
"serve": "parcel serve index.html",
"build": "parcel build --no-cache --no-source-maps --detailed-report index.html",
"demo": "parcel build --no-cache --no-source-maps --public-url ./ --out-dir ./demo/mapline/ --detailed-report index.html "
},
"browserslist": [
"defaults",
"not Explorer < 999",
"not Android < 999"
],
"dependencies": {
"@mapbox/geojson-normalize": "^0.0.1",
"@mapbox/mapbox-gl-language": "^0.10.0",
"@mapbox/togeojson": "^0.16.0",
"cheap-ruler": "^2.5.1",
"jspdf": "^1.4.1",
"mapbox-gl": "^0.47.0",
"xmldom": "^0.1.27"
},
"devDependencies": {
"babel-core": "^6.26.3",
"babel-preset-env": "^1.7.0",
"cssnano": "^4.0.3",
"parcel-bundler": "^1.9.7"
}
}
| {
"name": "mapline",
"version": "0.12.0",
- "description": "Create a pdf of maps along a track in printable quality",
? --------- ----
+ "description": "Create maps along a track in print quality.",
? +
"main": "src/index.js",
"keywords": [],
+ "homepage": "https://github.com/sgelb/mapline",
"author": "sgelb",
"license": "GPL-3.0-or-later",
"scripts": {
"serve": "parcel serve index.html",
- "build": "parcel build --no-source-maps --detailed-report index.html"
+ "build": "parcel build --no-cache --no-source-maps --detailed-report index.html",
? +++++++++++ +
+ "demo": "parcel build --no-cache --no-source-maps --public-url ./ --out-dir ./demo/mapline/ --detailed-report index.html "
},
"browserslist": [
"defaults",
- "not ie <999",
? ^
+ "not Explorer < 999",
? ^^^^^^ + +
"not Android < 999"
],
"dependencies": {
"@mapbox/geojson-normalize": "^0.0.1",
"@mapbox/mapbox-gl-language": "^0.10.0",
"@mapbox/togeojson": "^0.16.0",
"cheap-ruler": "^2.5.1",
"jspdf": "^1.4.1",
"mapbox-gl": "^0.47.0",
"xmldom": "^0.1.27"
},
"devDependencies": {
"babel-core": "^6.26.3",
"babel-preset-env": "^1.7.0",
"cssnano": "^4.0.3",
"parcel-bundler": "^1.9.7"
}
} | 8 | 0.242424 | 5 | 3 |
74b31ba7fec330ec167c2e001f60695272da71b8 | pages/views.py | pages/views.py | from django.views import generic
from django.contrib.auth.models import Group
from django_countries.fields import Country
from hosting.models import Profile, Place
from hosting.utils import sort_by_name
class AboutView(generic.TemplateView):
template_name = 'pages/about.html'
about = AboutView.as_view()
class TermsAndConditionsView(generic.TemplateView):
template_name = 'pages/terms_conditions.html'
terms_conditions = TermsAndConditionsView.as_view()
class SupervisorsView(generic.TemplateView):
template_name = 'pages/supervisors.html'
def countries(self):
places = Place.objects.filter(in_book=True)
groups = Group.objects.exclude(user=None)
countries = sort_by_name({p.country for p in places})
for country in countries:
try:
group = groups.get(name=str(country))
country.supervisors = sorted(user.profile for user in group.user_set.all())
except Group.DoesNotExist:
pass
country.place_count = places.filter(country=country).count()
return countries
supervisors = SupervisorsView.as_view()
class FaqView(generic.TemplateView):
template_name = 'pages/faq.html'
faq = FaqView.as_view()
| from django.views import generic
from django.contrib.auth.models import Group
from hosting.models import Place
from hosting.utils import sort_by_name
class AboutView(generic.TemplateView):
template_name = 'pages/about.html'
about = AboutView.as_view()
class TermsAndConditionsView(generic.TemplateView):
template_name = 'pages/terms_conditions.html'
terms_conditions = TermsAndConditionsView.as_view()
class SupervisorsView(generic.TemplateView):
template_name = 'pages/supervisors.html'
def countries(self):
places = Place.available_objects.filter(in_book=True)
groups = Group.objects.exclude(user=None)
countries = sort_by_name({p.country for p in places})
for country in countries:
try:
group = groups.get(name=str(country))
country.supervisors = sorted(user.profile for user in group.user_set.all())
except Group.DoesNotExist:
pass
country.place_count = places.filter(country=country).count()
return countries
supervisors = SupervisorsView.as_view()
class FaqView(generic.TemplateView):
template_name = 'pages/faq.html'
faq = FaqView.as_view()
| Fix numbers in LO list. | Fix numbers in LO list.
| Python | agpl-3.0 | batisteo/pasportaservo,tejo-esperanto/pasportaservo,tejo-esperanto/pasportaservo,tejo-esperanto/pasportaservo,tejo-esperanto/pasportaservo,batisteo/pasportaservo,batisteo/pasportaservo,batisteo/pasportaservo | python | ## Code Before:
from django.views import generic
from django.contrib.auth.models import Group
from django_countries.fields import Country
from hosting.models import Profile, Place
from hosting.utils import sort_by_name
class AboutView(generic.TemplateView):
template_name = 'pages/about.html'
about = AboutView.as_view()
class TermsAndConditionsView(generic.TemplateView):
template_name = 'pages/terms_conditions.html'
terms_conditions = TermsAndConditionsView.as_view()
class SupervisorsView(generic.TemplateView):
template_name = 'pages/supervisors.html'
def countries(self):
places = Place.objects.filter(in_book=True)
groups = Group.objects.exclude(user=None)
countries = sort_by_name({p.country for p in places})
for country in countries:
try:
group = groups.get(name=str(country))
country.supervisors = sorted(user.profile for user in group.user_set.all())
except Group.DoesNotExist:
pass
country.place_count = places.filter(country=country).count()
return countries
supervisors = SupervisorsView.as_view()
class FaqView(generic.TemplateView):
template_name = 'pages/faq.html'
faq = FaqView.as_view()
## Instruction:
Fix numbers in LO list.
## Code After:
from django.views import generic
from django.contrib.auth.models import Group
from hosting.models import Place
from hosting.utils import sort_by_name
class AboutView(generic.TemplateView):
template_name = 'pages/about.html'
about = AboutView.as_view()
class TermsAndConditionsView(generic.TemplateView):
template_name = 'pages/terms_conditions.html'
terms_conditions = TermsAndConditionsView.as_view()
class SupervisorsView(generic.TemplateView):
template_name = 'pages/supervisors.html'
def countries(self):
places = Place.available_objects.filter(in_book=True)
groups = Group.objects.exclude(user=None)
countries = sort_by_name({p.country for p in places})
for country in countries:
try:
group = groups.get(name=str(country))
country.supervisors = sorted(user.profile for user in group.user_set.all())
except Group.DoesNotExist:
pass
country.place_count = places.filter(country=country).count()
return countries
supervisors = SupervisorsView.as_view()
class FaqView(generic.TemplateView):
template_name = 'pages/faq.html'
faq = FaqView.as_view()
| from django.views import generic
from django.contrib.auth.models import Group
- from django_countries.fields import Country
- from hosting.models import Profile, Place
? ---------
+ from hosting.models import Place
from hosting.utils import sort_by_name
class AboutView(generic.TemplateView):
template_name = 'pages/about.html'
about = AboutView.as_view()
class TermsAndConditionsView(generic.TemplateView):
template_name = 'pages/terms_conditions.html'
terms_conditions = TermsAndConditionsView.as_view()
class SupervisorsView(generic.TemplateView):
template_name = 'pages/supervisors.html'
def countries(self):
- places = Place.objects.filter(in_book=True)
+ places = Place.available_objects.filter(in_book=True)
? ++++++++++
groups = Group.objects.exclude(user=None)
countries = sort_by_name({p.country for p in places})
for country in countries:
try:
group = groups.get(name=str(country))
country.supervisors = sorted(user.profile for user in group.user_set.all())
except Group.DoesNotExist:
pass
country.place_count = places.filter(country=country).count()
return countries
supervisors = SupervisorsView.as_view()
class FaqView(generic.TemplateView):
template_name = 'pages/faq.html'
faq = FaqView.as_view() | 5 | 0.119048 | 2 | 3 |
6da8b9a9f6f4fead05626062469a7b4200cf91bc | core/css/apps/official/_gallery.scss | core/css/apps/official/_gallery.scss | .app-gallery #app-content {
background-color: $background-normal !important;
}
#content-wrapper .app-gallery {
background-color: $background-dark;
}
#controls .button.sorting {
padding: 8px 5px;
}
#dropdown {
background: $background-normal;
box-shadow: none;
border: 1px solid $decoration-normal;
}
#link {
border-color: $decoration-normal;
}
#slideshow input[type="button"]:not(:disabled):not(.primary):not(#quota) {
&:hover, &:focus, &:active, &.active {
background-color: transparent;
border: none;
}
} | .app-gallery #app-content {
background-color: $background-normal !important;
}
#content-wrapper .app-gallery {
background-color: $background-dark;
}
#controls .actions > div > .button {
padding: 8px 5px;
}
#dropdown {
background: $background-normal;
box-shadow: none;
border: 1px solid $decoration-normal;
}
#link {
border-color: $decoration-normal;
}
#slideshow input[type="button"]:not(:disabled):not(.primary):not(#quota) {
&:hover, &:focus, &:active, &.active {
background-color: transparent;
border: none;
}
} | Fix alignment of sorting buttons in Gallery | Fix alignment of sorting buttons in Gallery
| SCSS | agpl-3.0 | mwalbeck/nextcloud-breeze-dark,mwalbeck/nextcloud-breeze-dark | scss | ## Code Before:
.app-gallery #app-content {
background-color: $background-normal !important;
}
#content-wrapper .app-gallery {
background-color: $background-dark;
}
#controls .button.sorting {
padding: 8px 5px;
}
#dropdown {
background: $background-normal;
box-shadow: none;
border: 1px solid $decoration-normal;
}
#link {
border-color: $decoration-normal;
}
#slideshow input[type="button"]:not(:disabled):not(.primary):not(#quota) {
&:hover, &:focus, &:active, &.active {
background-color: transparent;
border: none;
}
}
## Instruction:
Fix alignment of sorting buttons in Gallery
## Code After:
.app-gallery #app-content {
background-color: $background-normal !important;
}
#content-wrapper .app-gallery {
background-color: $background-dark;
}
#controls .actions > div > .button {
padding: 8px 5px;
}
#dropdown {
background: $background-normal;
box-shadow: none;
border: 1px solid $decoration-normal;
}
#link {
border-color: $decoration-normal;
}
#slideshow input[type="button"]:not(:disabled):not(.primary):not(#quota) {
&:hover, &:focus, &:active, &.active {
background-color: transparent;
border: none;
}
} | .app-gallery #app-content {
background-color: $background-normal !important;
}
#content-wrapper .app-gallery {
background-color: $background-dark;
}
- #controls .button.sorting {
+ #controls .actions > div > .button {
padding: 8px 5px;
}
#dropdown {
background: $background-normal;
box-shadow: none;
border: 1px solid $decoration-normal;
}
#link {
border-color: $decoration-normal;
}
#slideshow input[type="button"]:not(:disabled):not(.primary):not(#quota) {
&:hover, &:focus, &:active, &.active {
background-color: transparent;
border: none;
}
} | 2 | 0.068966 | 1 | 1 |
61a5c5870c7363034eb20b19b54b3df6e622da39 | services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/getsavedvers/SnapshotInfo.java | services/git-bridge/src/main/java/uk/ac/ic/wlgitbridge/snapshot/getsavedvers/SnapshotInfo.java | package uk.ac.ic.wlgitbridge.snapshot.getsavedvers;
import uk.ac.ic.wlgitbridge.util.Util;
/**
* Created by Winston on 06/11/14.
*/
public class SnapshotInfo implements Comparable<SnapshotInfo> {
private int versionId;
private String comment;
private WLUser user;
private String createdAt;
public SnapshotInfo(int versionID, String createdAt, String name, String email) {
this(versionID, "Update on " + Util.getServiceName() + ".", email, name, createdAt);
}
public SnapshotInfo(int versionID, String comment, String email, String name, String createdAt) {
versionId = versionID;
this.comment = comment;
user = new WLUser(name, email);
this.createdAt = createdAt;
}
public int getVersionId() {
return versionId;
}
public String getComment() {
return comment;
}
public WLUser getUser() {
return user;
}
public String getCreatedAt() {
return createdAt;
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof SnapshotInfo)) {
return false;
}
SnapshotInfo that = (SnapshotInfo) obj;
return versionId == that.versionId;
}
@Override
public int compareTo(SnapshotInfo o) {
return Integer.compare(versionId, o.versionId);
}
}
| package uk.ac.ic.wlgitbridge.snapshot.getsavedvers;
import uk.ac.ic.wlgitbridge.util.Util;
/**
* Created by Winston on 06/11/14.
*/
public class SnapshotInfo implements Comparable<SnapshotInfo> {
private int versionId;
private String comment;
private WLUser user;
private String createdAt;
public SnapshotInfo(int versionID, String createdAt, String name, String email) {
this(versionID, "Update on " + Util.getServiceName() + ".", email, name, createdAt);
}
public SnapshotInfo(int versionID, String comment, String email, String name, String createdAt) {
versionId = versionID;
this.comment = comment;
user = new WLUser(name, email);
this.createdAt = createdAt;
}
public int getVersionId() {
return versionId;
}
public String getComment() {
return comment;
}
public WLUser getUser() {
return user != null ? user : new WLUser();
}
public String getCreatedAt() {
return createdAt;
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof SnapshotInfo)) {
return false;
}
SnapshotInfo that = (SnapshotInfo) obj;
return versionId == that.versionId;
}
@Override
public int compareTo(SnapshotInfo o) {
return Integer.compare(versionId, o.versionId);
}
}
| Support for anonymous saved versions | Support for anonymous saved versions
This fixes a case when de API sends a version without user. There was a
bug allowing to create anonymous versions in the application and we
have to support the old data.
The problem here is that SnapshotInfo classes are inflated from json
via Gson. This method does not call any constructor and, since the json
does not include the ‘user’ key, the bridge crashes because we’re not
expecting null users.
I’m not happy with this fix, but is the minimum solution that does not
affect anything else.
| Java | agpl-3.0 | sharelatex/sharelatex | java | ## Code Before:
package uk.ac.ic.wlgitbridge.snapshot.getsavedvers;
import uk.ac.ic.wlgitbridge.util.Util;
/**
* Created by Winston on 06/11/14.
*/
public class SnapshotInfo implements Comparable<SnapshotInfo> {
private int versionId;
private String comment;
private WLUser user;
private String createdAt;
public SnapshotInfo(int versionID, String createdAt, String name, String email) {
this(versionID, "Update on " + Util.getServiceName() + ".", email, name, createdAt);
}
public SnapshotInfo(int versionID, String comment, String email, String name, String createdAt) {
versionId = versionID;
this.comment = comment;
user = new WLUser(name, email);
this.createdAt = createdAt;
}
public int getVersionId() {
return versionId;
}
public String getComment() {
return comment;
}
public WLUser getUser() {
return user;
}
public String getCreatedAt() {
return createdAt;
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof SnapshotInfo)) {
return false;
}
SnapshotInfo that = (SnapshotInfo) obj;
return versionId == that.versionId;
}
@Override
public int compareTo(SnapshotInfo o) {
return Integer.compare(versionId, o.versionId);
}
}
## Instruction:
Support for anonymous saved versions
This fixes a case when de API sends a version without user. There was a
bug allowing to create anonymous versions in the application and we
have to support the old data.
The problem here is that SnapshotInfo classes are inflated from json
via Gson. This method does not call any constructor and, since the json
does not include the ‘user’ key, the bridge crashes because we’re not
expecting null users.
I’m not happy with this fix, but is the minimum solution that does not
affect anything else.
## Code After:
package uk.ac.ic.wlgitbridge.snapshot.getsavedvers;
import uk.ac.ic.wlgitbridge.util.Util;
/**
* Created by Winston on 06/11/14.
*/
public class SnapshotInfo implements Comparable<SnapshotInfo> {
private int versionId;
private String comment;
private WLUser user;
private String createdAt;
public SnapshotInfo(int versionID, String createdAt, String name, String email) {
this(versionID, "Update on " + Util.getServiceName() + ".", email, name, createdAt);
}
public SnapshotInfo(int versionID, String comment, String email, String name, String createdAt) {
versionId = versionID;
this.comment = comment;
user = new WLUser(name, email);
this.createdAt = createdAt;
}
public int getVersionId() {
return versionId;
}
public String getComment() {
return comment;
}
public WLUser getUser() {
return user != null ? user : new WLUser();
}
public String getCreatedAt() {
return createdAt;
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof SnapshotInfo)) {
return false;
}
SnapshotInfo that = (SnapshotInfo) obj;
return versionId == that.versionId;
}
@Override
public int compareTo(SnapshotInfo o) {
return Integer.compare(versionId, o.versionId);
}
}
| package uk.ac.ic.wlgitbridge.snapshot.getsavedvers;
import uk.ac.ic.wlgitbridge.util.Util;
/**
* Created by Winston on 06/11/14.
*/
public class SnapshotInfo implements Comparable<SnapshotInfo> {
private int versionId;
private String comment;
private WLUser user;
private String createdAt;
public SnapshotInfo(int versionID, String createdAt, String name, String email) {
this(versionID, "Update on " + Util.getServiceName() + ".", email, name, createdAt);
}
public SnapshotInfo(int versionID, String comment, String email, String name, String createdAt) {
versionId = versionID;
this.comment = comment;
user = new WLUser(name, email);
this.createdAt = createdAt;
}
public int getVersionId() {
return versionId;
}
public String getComment() {
return comment;
}
public WLUser getUser() {
- return user;
+ return user != null ? user : new WLUser();
}
public String getCreatedAt() {
return createdAt;
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof SnapshotInfo)) {
return false;
}
SnapshotInfo that = (SnapshotInfo) obj;
return versionId == that.versionId;
}
@Override
public int compareTo(SnapshotInfo o) {
return Integer.compare(versionId, o.versionId);
}
} | 2 | 0.036364 | 1 | 1 |
a803e50a49effa0f1dafe4fbbea720125026e473 | process.sh | process.sh |
folder=$1
camName=$2
basePath=$3
#http://stackoverflow.com/q/11448885
threshold=`date -d "5 minutes ago" +%Y%m%d%H%M%S`
#http://unix.stackexchange.com/a/84859/50868
shopt -s nullglob
for path in $basePath/raw/$folder/record/*.mkv ; do
file=$(basename $path)
#http://stackoverflow.com/a/5257398/316108
parts=(${file//_/ })
#http://www.catonmat.net/blog/bash-one-liners-explained-part-two/
date=${parts[1]:0:4}-${parts[1]:4:2}-${parts[1]:6:2}
time=${parts[2]:0:2}:${parts[2]:2:2}:${parts[2]:4:2}
vidTimestamp=${parts[1]:0:4}${parts[1]:4:2}${parts[1]:6:2}${parts[2]:0:2}${parts[2]:2:2}${parts[2]:4:2}
if test "$vidTimestamp" -lt "$threshold"
then
mkdir -p $basePath/processed/$date/$camName
mv $path $basePath/processed/$date/$camName/$time.mkv
fi
done |
folder=$1
camName=$2
basePath=$3
#http://stackoverflow.com/q/11448885
threshold=`date -d "5 minutes ago" +%Y%m%d%H%M%S`
#http://unix.stackexchange.com/a/84859/50868
shopt -s nullglob
for path in $basePath/raw/$folder/record/*.mkv ; do
file=$(basename $path)
#http://stackoverflow.com/a/5257398/316108
parts=(${file//_/ })
#http://www.catonmat.net/blog/bash-one-liners-explained-part-two/
date=${parts[1]:0:4}-${parts[1]:4:2}-${parts[1]:6:2}
time=${parts[2]:0:2}:${parts[2]:2:2}:${parts[2]:4:2}
vidTimestamp=${parts[1]:0:4}${parts[1]:4:2}${parts[1]:6:2}${parts[2]:0:2}${parts[2]:2:2}${parts[2]:4:2}
if test "$vidTimestamp" -lt "$threshold"
then
mkdir -p $basePath/processed/$date/$camName
# need to do a codec copy here since foscam cameras make some kind of corrupt mkv file
# ffmpeg fixes it so mkvmerge can operate on it later
ffmpeg -i $path -codec copy $basePath/processed/$date/$camName/$time.mkv
rm $path
fi
done | Use ffmpeg for codec copy of foscam mkvs | Use ffmpeg for codec copy of foscam mkvs
| Shell | mit | chadly/cams,chadly/vlc-rtsp,chadly/vlc-rtsp,chadly/vlc-rtsp | shell | ## Code Before:
folder=$1
camName=$2
basePath=$3
#http://stackoverflow.com/q/11448885
threshold=`date -d "5 minutes ago" +%Y%m%d%H%M%S`
#http://unix.stackexchange.com/a/84859/50868
shopt -s nullglob
for path in $basePath/raw/$folder/record/*.mkv ; do
file=$(basename $path)
#http://stackoverflow.com/a/5257398/316108
parts=(${file//_/ })
#http://www.catonmat.net/blog/bash-one-liners-explained-part-two/
date=${parts[1]:0:4}-${parts[1]:4:2}-${parts[1]:6:2}
time=${parts[2]:0:2}:${parts[2]:2:2}:${parts[2]:4:2}
vidTimestamp=${parts[1]:0:4}${parts[1]:4:2}${parts[1]:6:2}${parts[2]:0:2}${parts[2]:2:2}${parts[2]:4:2}
if test "$vidTimestamp" -lt "$threshold"
then
mkdir -p $basePath/processed/$date/$camName
mv $path $basePath/processed/$date/$camName/$time.mkv
fi
done
## Instruction:
Use ffmpeg for codec copy of foscam mkvs
## Code After:
folder=$1
camName=$2
basePath=$3
#http://stackoverflow.com/q/11448885
threshold=`date -d "5 minutes ago" +%Y%m%d%H%M%S`
#http://unix.stackexchange.com/a/84859/50868
shopt -s nullglob
for path in $basePath/raw/$folder/record/*.mkv ; do
file=$(basename $path)
#http://stackoverflow.com/a/5257398/316108
parts=(${file//_/ })
#http://www.catonmat.net/blog/bash-one-liners-explained-part-two/
date=${parts[1]:0:4}-${parts[1]:4:2}-${parts[1]:6:2}
time=${parts[2]:0:2}:${parts[2]:2:2}:${parts[2]:4:2}
vidTimestamp=${parts[1]:0:4}${parts[1]:4:2}${parts[1]:6:2}${parts[2]:0:2}${parts[2]:2:2}${parts[2]:4:2}
if test "$vidTimestamp" -lt "$threshold"
then
mkdir -p $basePath/processed/$date/$camName
# need to do a codec copy here since foscam cameras make some kind of corrupt mkv file
# ffmpeg fixes it so mkvmerge can operate on it later
ffmpeg -i $path -codec copy $basePath/processed/$date/$camName/$time.mkv
rm $path
fi
done |
folder=$1
camName=$2
basePath=$3
#http://stackoverflow.com/q/11448885
threshold=`date -d "5 minutes ago" +%Y%m%d%H%M%S`
#http://unix.stackexchange.com/a/84859/50868
shopt -s nullglob
for path in $basePath/raw/$folder/record/*.mkv ; do
file=$(basename $path)
#http://stackoverflow.com/a/5257398/316108
parts=(${file//_/ })
#http://www.catonmat.net/blog/bash-one-liners-explained-part-two/
date=${parts[1]:0:4}-${parts[1]:4:2}-${parts[1]:6:2}
time=${parts[2]:0:2}:${parts[2]:2:2}:${parts[2]:4:2}
vidTimestamp=${parts[1]:0:4}${parts[1]:4:2}${parts[1]:6:2}${parts[2]:0:2}${parts[2]:2:2}${parts[2]:4:2}
if test "$vidTimestamp" -lt "$threshold"
then
mkdir -p $basePath/processed/$date/$camName
+
+ # need to do a codec copy here since foscam cameras make some kind of corrupt mkv file
+ # ffmpeg fixes it so mkvmerge can operate on it later
- mv $path $basePath/processed/$date/$camName/$time.mkv
? ^
+ ffmpeg -i $path -codec copy $basePath/processed/$date/$camName/$time.mkv
? ++ ^^^^^^ ++++++++++++
+
+ rm $path
fi
done | 7 | 0.241379 | 6 | 1 |
9244b207764324cc2276fdd9919f811940465108 | .babelrc.json | .babelrc.json | {
"presets": [["@babel/preset-env", { "targets": { "node": "current" } }]],
"plugins": [
"./resources/load-statically-from-npm.js",
"@babel/plugin-transform-flow-strip-types"
]
}
| {
"presets": [["@babel/preset-env", { "targets": { "node": "10" } }]],
"plugins": [
"./resources/load-statically-from-npm.js",
"@babel/plugin-transform-flow-strip-types"
],
"overrides": [
{
"include": ["**/__tests__/**/*"],
"presets": [["@babel/preset-env", { "targets": { "node": "current" } }]]
}
]
}
| Fix NPM package on Node10 | Fix NPM package on Node10
| JSON | mit | graphql/express-graphql,graphql/express-graphql,graphql/express-graphql | json | ## Code Before:
{
"presets": [["@babel/preset-env", { "targets": { "node": "current" } }]],
"plugins": [
"./resources/load-statically-from-npm.js",
"@babel/plugin-transform-flow-strip-types"
]
}
## Instruction:
Fix NPM package on Node10
## Code After:
{
"presets": [["@babel/preset-env", { "targets": { "node": "10" } }]],
"plugins": [
"./resources/load-statically-from-npm.js",
"@babel/plugin-transform-flow-strip-types"
],
"overrides": [
{
"include": ["**/__tests__/**/*"],
"presets": [["@babel/preset-env", { "targets": { "node": "current" } }]]
}
]
}
| {
- "presets": [["@babel/preset-env", { "targets": { "node": "current" } }]],
? ^^^^^^^
+ "presets": [["@babel/preset-env", { "targets": { "node": "10" } }]],
? ^^
"plugins": [
"./resources/load-statically-from-npm.js",
"@babel/plugin-transform-flow-strip-types"
+ ],
+ "overrides": [
+ {
+ "include": ["**/__tests__/**/*"],
+ "presets": [["@babel/preset-env", { "targets": { "node": "current" } }]]
+ }
]
} | 8 | 1.142857 | 7 | 1 |
a1cd93f7aa6a2d60ea11123be321850a5ee7596d | src/app/core/pet-tag.reducer.ts | src/app/core/pet-tag.reducer.ts | import { ActionReducer, Action } from '@ngrx/store';
import { PetTag, initialTag } from './../core/pet-tag.model';
// Export action types
export const SELECT_SHAPE = 'SELECT_SHAPE';
export const SELECT_FONT = 'SELECT_FONT';
export const ADD_TEXT = 'ADD_TEXT';
export const INCLUDE_CLIP = 'INCLUDE_CLIP';
export const ADD_GEMS = 'ADD_GEMS';
export const COMPLETE = 'COMPLETE';
export const RESET = 'RESET';
// Create pet tag reducer
export const petTagReducer: ActionReducer<PetTag> =
(state: PetTag = initialTag, action: Action) => {
switch(action.type) {
case SELECT_SHAPE:
return Object.assign({}, state, {
shape: action.payload
});
case SELECT_FONT:
return Object.assign({}, state, {
font: action.payload
});
case ADD_TEXT:
return Object.assign({}, state, {
text: action.payload
});
case INCLUDE_CLIP:
return Object.assign({}, state, {
clip: action.payload
});
case ADD_GEMS:
return Object.assign({}, state, {
gems: action.payload
});
case COMPLETE:
return Object.assign({}, state, {
complete: action.payload
});
case RESET:
return Object.assign({}, state, action.payload);
default:
return state;
}
}
| import { Action } from '@ngrx/store';
import { PetTag, initialTag } from './../core/pet-tag.model';
// Export action types
export const SELECT_SHAPE = 'SELECT_SHAPE';
export const SELECT_FONT = 'SELECT_FONT';
export const ADD_TEXT = 'ADD_TEXT';
export const INCLUDE_CLIP = 'INCLUDE_CLIP';
export const ADD_GEMS = 'ADD_GEMS';
export const COMPLETE = 'COMPLETE';
export const RESET = 'RESET';
// Create pet tag reducer
export function petTagReducer(state: PetTag = initialTag, action: Action) {
switch(action.type) {
case SELECT_SHAPE:
return Object.assign({}, state, {
shape: action.payload
});
case SELECT_FONT:
return Object.assign({}, state, {
font: action.payload
});
case ADD_TEXT:
return Object.assign({}, state, {
text: action.payload
});
case INCLUDE_CLIP:
return Object.assign({}, state, {
clip: action.payload
});
case ADD_GEMS:
return Object.assign({}, state, {
gems: action.payload
});
case COMPLETE:
return Object.assign({}, state, {
complete: action.payload
});
case RESET:
return Object.assign({}, state, action.payload);
default:
return state;
}
}
| Fix ng serve compile error. | Fix ng serve compile error.
| TypeScript | mit | kmaida/pet-tags-ngrx,auth0-blog/pet-tags-ngrx,auth0-blog/pet-tags-ngrx,auth0-blog/pet-tags-ngrx,kmaida/pet-tags-ngrx,kmaida/pet-tags-ngrx | typescript | ## Code Before:
import { ActionReducer, Action } from '@ngrx/store';
import { PetTag, initialTag } from './../core/pet-tag.model';
// Export action types
export const SELECT_SHAPE = 'SELECT_SHAPE';
export const SELECT_FONT = 'SELECT_FONT';
export const ADD_TEXT = 'ADD_TEXT';
export const INCLUDE_CLIP = 'INCLUDE_CLIP';
export const ADD_GEMS = 'ADD_GEMS';
export const COMPLETE = 'COMPLETE';
export const RESET = 'RESET';
// Create pet tag reducer
export const petTagReducer: ActionReducer<PetTag> =
(state: PetTag = initialTag, action: Action) => {
switch(action.type) {
case SELECT_SHAPE:
return Object.assign({}, state, {
shape: action.payload
});
case SELECT_FONT:
return Object.assign({}, state, {
font: action.payload
});
case ADD_TEXT:
return Object.assign({}, state, {
text: action.payload
});
case INCLUDE_CLIP:
return Object.assign({}, state, {
clip: action.payload
});
case ADD_GEMS:
return Object.assign({}, state, {
gems: action.payload
});
case COMPLETE:
return Object.assign({}, state, {
complete: action.payload
});
case RESET:
return Object.assign({}, state, action.payload);
default:
return state;
}
}
## Instruction:
Fix ng serve compile error.
## Code After:
import { Action } from '@ngrx/store';
import { PetTag, initialTag } from './../core/pet-tag.model';
// Export action types
export const SELECT_SHAPE = 'SELECT_SHAPE';
export const SELECT_FONT = 'SELECT_FONT';
export const ADD_TEXT = 'ADD_TEXT';
export const INCLUDE_CLIP = 'INCLUDE_CLIP';
export const ADD_GEMS = 'ADD_GEMS';
export const COMPLETE = 'COMPLETE';
export const RESET = 'RESET';
// Create pet tag reducer
export function petTagReducer(state: PetTag = initialTag, action: Action) {
switch(action.type) {
case SELECT_SHAPE:
return Object.assign({}, state, {
shape: action.payload
});
case SELECT_FONT:
return Object.assign({}, state, {
font: action.payload
});
case ADD_TEXT:
return Object.assign({}, state, {
text: action.payload
});
case INCLUDE_CLIP:
return Object.assign({}, state, {
clip: action.payload
});
case ADD_GEMS:
return Object.assign({}, state, {
gems: action.payload
});
case COMPLETE:
return Object.assign({}, state, {
complete: action.payload
});
case RESET:
return Object.assign({}, state, action.payload);
default:
return state;
}
}
| - import { ActionReducer, Action } from '@ngrx/store';
? ---------------
+ import { Action } from '@ngrx/store';
import { PetTag, initialTag } from './../core/pet-tag.model';
// Export action types
export const SELECT_SHAPE = 'SELECT_SHAPE';
export const SELECT_FONT = 'SELECT_FONT';
export const ADD_TEXT = 'ADD_TEXT';
export const INCLUDE_CLIP = 'INCLUDE_CLIP';
export const ADD_GEMS = 'ADD_GEMS';
export const COMPLETE = 'COMPLETE';
export const RESET = 'RESET';
// Create pet tag reducer
- export const petTagReducer: ActionReducer<PetTag> =
- (state: PetTag = initialTag, action: Action) => {
? ---
+ export function petTagReducer(state: PetTag = initialTag, action: Action) {
? ++++++ ++++++++ +++++++++++++
- switch(action.type) {
? --
+ switch(action.type) {
- case SELECT_SHAPE:
? --
+ case SELECT_SHAPE:
- return Object.assign({}, state, {
? --
+ return Object.assign({}, state, {
- shape: action.payload
? --
+ shape: action.payload
- });
? --
+ });
- case SELECT_FONT:
? --
+ case SELECT_FONT:
- return Object.assign({}, state, {
? --
+ return Object.assign({}, state, {
- font: action.payload
? --
+ font: action.payload
- });
? --
+ });
- case ADD_TEXT:
? --
+ case ADD_TEXT:
- return Object.assign({}, state, {
? --
+ return Object.assign({}, state, {
- text: action.payload
? --
+ text: action.payload
- });
? --
+ });
- case INCLUDE_CLIP:
? --
+ case INCLUDE_CLIP:
- return Object.assign({}, state, {
? --
+ return Object.assign({}, state, {
- clip: action.payload
? --
+ clip: action.payload
- });
? --
+ });
- case ADD_GEMS:
? --
+ case ADD_GEMS:
- return Object.assign({}, state, {
? --
+ return Object.assign({}, state, {
- gems: action.payload
? --
+ gems: action.payload
- });
? --
+ });
- case COMPLETE:
? --
+ case COMPLETE:
- return Object.assign({}, state, {
? --
+ return Object.assign({}, state, {
- complete: action.payload
? --
+ complete: action.payload
- });
? --
+ });
- case RESET:
? --
+ case RESET:
- return Object.assign({}, state, action.payload);
? --
+ return Object.assign({}, state, action.payload);
- default:
? --
+ default:
- return state;
? --
+ return state;
}
} | 63 | 1.369565 | 31 | 32 |
592d524dc114964ff37aa5477b5f6a2f4c469bdb | index.js | index.js | var Filter = require('broccoli-filter')
var UglifyJS = require('uglify-js')
module.exports = UglifyJSFilter
UglifyJSFilter.prototype = Object.create(Filter.prototype)
UglifyJSFilter.prototype.constructor = UglifyJSFilter
function UglifyJSFilter (inputTree, options) {
if (!(this instanceof UglifyJSFilter)) return new UglifyJSFilter(inputTree, options)
this.inputTree = inputTree
this.options = options || {}
}
UglifyJSFilter.prototype.extensions = ['js']
UglifyJSFilter.prototype.targetExtension = 'js'
UglifyJSFilter.prototype.processString = function (string) {
var result = UglifyJS.minify(string, {
fromString: true,
mangle: this.options.mangle,
compress: this.options.compress,
output: this.options.output
})
return result.code
}
| var Filter = require('broccoli-filter')
var UglifyJS = require('uglify-js')
module.exports = UglifyJSFilter
UglifyJSFilter.prototype = Object.create(Filter.prototype)
UglifyJSFilter.prototype.constructor = UglifyJSFilter
function UglifyJSFilter (inputTree, options) {
if (!(this instanceof UglifyJSFilter)) return new UglifyJSFilter(inputTree, options)
Filter.call(this, inputTree, options)
this.options = options || {}
}
UglifyJSFilter.prototype.extensions = ['js']
UglifyJSFilter.prototype.targetExtension = 'js'
UglifyJSFilter.prototype.processString = function (string) {
var result = UglifyJS.minify(string, {
fromString: true,
mangle: this.options.mangle,
compress: this.options.compress,
output: this.options.output
})
return result.code
}
| Call Filter constructor in UglifyJSFilter | Call Filter constructor in UglifyJSFilter
This allows passing in options like targetExtension when using this
filter in a Brocfile.
| JavaScript | mit | joliss/broccoli-uglify-js | javascript | ## Code Before:
var Filter = require('broccoli-filter')
var UglifyJS = require('uglify-js')
module.exports = UglifyJSFilter
UglifyJSFilter.prototype = Object.create(Filter.prototype)
UglifyJSFilter.prototype.constructor = UglifyJSFilter
function UglifyJSFilter (inputTree, options) {
if (!(this instanceof UglifyJSFilter)) return new UglifyJSFilter(inputTree, options)
this.inputTree = inputTree
this.options = options || {}
}
UglifyJSFilter.prototype.extensions = ['js']
UglifyJSFilter.prototype.targetExtension = 'js'
UglifyJSFilter.prototype.processString = function (string) {
var result = UglifyJS.minify(string, {
fromString: true,
mangle: this.options.mangle,
compress: this.options.compress,
output: this.options.output
})
return result.code
}
## Instruction:
Call Filter constructor in UglifyJSFilter
This allows passing in options like targetExtension when using this
filter in a Brocfile.
## Code After:
var Filter = require('broccoli-filter')
var UglifyJS = require('uglify-js')
module.exports = UglifyJSFilter
UglifyJSFilter.prototype = Object.create(Filter.prototype)
UglifyJSFilter.prototype.constructor = UglifyJSFilter
function UglifyJSFilter (inputTree, options) {
if (!(this instanceof UglifyJSFilter)) return new UglifyJSFilter(inputTree, options)
Filter.call(this, inputTree, options)
this.options = options || {}
}
UglifyJSFilter.prototype.extensions = ['js']
UglifyJSFilter.prototype.targetExtension = 'js'
UglifyJSFilter.prototype.processString = function (string) {
var result = UglifyJS.minify(string, {
fromString: true,
mangle: this.options.mangle,
compress: this.options.compress,
output: this.options.output
})
return result.code
}
| var Filter = require('broccoli-filter')
var UglifyJS = require('uglify-js')
module.exports = UglifyJSFilter
UglifyJSFilter.prototype = Object.create(Filter.prototype)
UglifyJSFilter.prototype.constructor = UglifyJSFilter
function UglifyJSFilter (inputTree, options) {
if (!(this instanceof UglifyJSFilter)) return new UglifyJSFilter(inputTree, options)
- this.inputTree = inputTree
+ Filter.call(this, inputTree, options)
this.options = options || {}
}
UglifyJSFilter.prototype.extensions = ['js']
UglifyJSFilter.prototype.targetExtension = 'js'
UglifyJSFilter.prototype.processString = function (string) {
var result = UglifyJS.minify(string, {
fromString: true,
mangle: this.options.mangle,
compress: this.options.compress,
output: this.options.output
})
return result.code
} | 2 | 0.083333 | 1 | 1 |
80b293f1c11f656802bd9392988b67df37ba7463 | tests/Pack/IndexTest.php | tests/Pack/IndexTest.php | <?php
namespace adrianclay\git\Pack;
use adrianclay\git\Repository;
use adrianclay\git\SHA;
use PHPUnit\Framework\TestCase;
class IndexTest extends TestCase
{
private $repository;
public function setUp(): void
{
parent::setUp();
$path = __DIR__ . '/../../';
$this->repository = new Repository( $path );
}
public function testCanGetAOffsetOfZeroPrefixedSha() {
$sha = new SHA( "00" . "f5270dc9fcb8ec4c31e04334128a4a9f596470" );
$object = $this->repository->getObject( $sha );
$this->assertNotNull( $object );
}
} | <?php
namespace adrianclay\git\Pack;
use adrianclay\git\Repository;
use adrianclay\git\SHA;
use PHPUnit\Framework\TestCase;
class IndexTest extends TestCase
{
private $repository;
public function setUp(): void
{
parent::setUp();
$path = __DIR__ . '/../../';
$this->repository = new Repository( $path );
}
public function testCanGetAOffsetOfZeroPrefixedSha() {
$sha = new SHA( "00" . "f5270dc9fcb8ec4c31e04334128a4a9f596470" );
$object = $this->repository->getObject( $sha );
$this->assertNotNull( $object );
}
public function testGivenPhpFileThrowsInvalidException() {
$this->expectException(\InvalidArgumentException::class);
new Index(__FILE__);
}
} | Add test for magic value check | Add test for magic value check
| PHP | mit | adrianclay/php-git | php | ## Code Before:
<?php
namespace adrianclay\git\Pack;
use adrianclay\git\Repository;
use adrianclay\git\SHA;
use PHPUnit\Framework\TestCase;
class IndexTest extends TestCase
{
private $repository;
public function setUp(): void
{
parent::setUp();
$path = __DIR__ . '/../../';
$this->repository = new Repository( $path );
}
public function testCanGetAOffsetOfZeroPrefixedSha() {
$sha = new SHA( "00" . "f5270dc9fcb8ec4c31e04334128a4a9f596470" );
$object = $this->repository->getObject( $sha );
$this->assertNotNull( $object );
}
}
## Instruction:
Add test for magic value check
## Code After:
<?php
namespace adrianclay\git\Pack;
use adrianclay\git\Repository;
use adrianclay\git\SHA;
use PHPUnit\Framework\TestCase;
class IndexTest extends TestCase
{
private $repository;
public function setUp(): void
{
parent::setUp();
$path = __DIR__ . '/../../';
$this->repository = new Repository( $path );
}
public function testCanGetAOffsetOfZeroPrefixedSha() {
$sha = new SHA( "00" . "f5270dc9fcb8ec4c31e04334128a4a9f596470" );
$object = $this->repository->getObject( $sha );
$this->assertNotNull( $object );
}
public function testGivenPhpFileThrowsInvalidException() {
$this->expectException(\InvalidArgumentException::class);
new Index(__FILE__);
}
} | <?php
namespace adrianclay\git\Pack;
use adrianclay\git\Repository;
use adrianclay\git\SHA;
use PHPUnit\Framework\TestCase;
class IndexTest extends TestCase
{
private $repository;
public function setUp(): void
{
parent::setUp();
$path = __DIR__ . '/../../';
$this->repository = new Repository( $path );
}
public function testCanGetAOffsetOfZeroPrefixedSha() {
$sha = new SHA( "00" . "f5270dc9fcb8ec4c31e04334128a4a9f596470" );
$object = $this->repository->getObject( $sha );
$this->assertNotNull( $object );
}
+
+ public function testGivenPhpFileThrowsInvalidException() {
+ $this->expectException(\InvalidArgumentException::class);
+
+ new Index(__FILE__);
+ }
} | 6 | 0.24 | 6 | 0 |
da3194784401182ab8fca77a014f71f261e08cb0 | .atom/config.cson | .atom/config.cson | "*":
core:
themes: [
"one-light-ui"
"solarized-dark-syntax"
]
editor:
fontFamily: "mononoki"
fontSize: 18
invisibles: {}
showIndentGuide: true
showInvisibles: true
"exception-reporting":
userId: "76d6bd0a-16fd-42b8-0163-bce5c8c7e379"
welcome:
showOnStartup: false
whitespace:
ensureSingleTrailingNewline: false
| "*":
core:
telemetryConsent: "no"
themes: [
"one-light-ui"
"solarized-dark-syntax"
]
editor:
fontFamily: "mononoki"
fontSize: 18
invisibles: {}
showIndentGuide: true
showInvisibles: true
"exception-reporting":
userId: "76d6bd0a-16fd-42b8-0163-bce5c8c7e379"
welcome:
showOnStartup: false
whitespace:
ensureSingleTrailingNewline: false
| Set telementry consent to "no" for atom | Set telementry consent to "no" for atom
| CoffeeScript | mit | dreadwarrior/dotfiles,dreadwarrior/dotfiles | coffeescript | ## Code Before:
"*":
core:
themes: [
"one-light-ui"
"solarized-dark-syntax"
]
editor:
fontFamily: "mononoki"
fontSize: 18
invisibles: {}
showIndentGuide: true
showInvisibles: true
"exception-reporting":
userId: "76d6bd0a-16fd-42b8-0163-bce5c8c7e379"
welcome:
showOnStartup: false
whitespace:
ensureSingleTrailingNewline: false
## Instruction:
Set telementry consent to "no" for atom
## Code After:
"*":
core:
telemetryConsent: "no"
themes: [
"one-light-ui"
"solarized-dark-syntax"
]
editor:
fontFamily: "mononoki"
fontSize: 18
invisibles: {}
showIndentGuide: true
showInvisibles: true
"exception-reporting":
userId: "76d6bd0a-16fd-42b8-0163-bce5c8c7e379"
welcome:
showOnStartup: false
whitespace:
ensureSingleTrailingNewline: false
| "*":
core:
+ telemetryConsent: "no"
themes: [
"one-light-ui"
"solarized-dark-syntax"
]
editor:
fontFamily: "mononoki"
fontSize: 18
invisibles: {}
showIndentGuide: true
showInvisibles: true
"exception-reporting":
userId: "76d6bd0a-16fd-42b8-0163-bce5c8c7e379"
welcome:
showOnStartup: false
whitespace:
ensureSingleTrailingNewline: false | 1 | 0.055556 | 1 | 0 |
7af6f6f5a1e27fc00008a763046170ec5ab3beb4 | src/charly/visitors/DumpVisitor.cr | src/charly/visitors/DumpVisitor.cr | require "./TreeVisitor.cr"
module Charly::AST
# Dump a human-readable version of the AST
class DumpVisitor < TreeVisitor
# Catch all rule
visit ASTNode do
io.puts name node
rest children
end
visit StringLiteral do
io << name node
io << " | "
io.puts "\"#{node.value}\""
end
visit PrecalculatedValue, NumericLiteral, BooleanLiteral do
io << name node
io << " | "
io.puts node.value
end
visit IdentifierLiteral, FunctionLiteral, ClassLiteral, PrimitiveClassLiteral do
io << name node
io << " | "
io.puts "\"#{node.name}\""
rest children
end
visit UnaryExpression, BinaryExpression, ComparisonExpression do
io << name node
io << " | "
io.puts node.operator
rest children
end
macro rest(children)
{{children}}.each_with_index do |child, index|
unless child.is_a? ASTNode
next
end
str = String.build do |str|
child.accept self, str
end
str.lines.each_with_index do |line, line_index|
if line_index == 0
if {{children}}.size > 1 && index < {{children}}.size - 1
io << "├─"
else
io << "└─"
end
else
if {{children}}.size > 1 && index < {{children}}.size - 1
io << "│ "
else
io << " "
end
end
io << " "
io << line
io << "\n"
end
end
end
end
end
| require "./TreeVisitor.cr"
module Charly::AST
# Dump a human-readable version of the AST
class DumpVisitor < TreeVisitor
# Catch all rule
visit ASTNode do
io.puts name node
rest children
end
visit StringLiteral do
io << name node
io << " | "
io.puts "\"#{node.value}\""
end
visit PrecalculatedValue, NumericLiteral, BooleanLiteral do
io << name node
io << " | "
io.puts node.value
end
visit IdentifierLiteral, FunctionLiteral, ClassLiteral, PrimitiveClassLiteral do
io << name node
io << " | "
io.puts "\"#{node.name}\""
rest children
end
visit UnaryExpression, BinaryExpression, ComparisonExpression do
io << name node
io << " | "
io.puts node.operator
rest children
end
macro rest(children)
{{children}}.each_with_index do |child, index|
unless child.is_a? ASTNode
next
end
str = String.build do |str|
child.accept self, str
end
str.lines.each_with_index do |line, line_index|
if line_index == 0
if {{children}}.size > 1 && index < {{children}}.size - 1
io << "├─"
else
io << "└─"
end
else
if {{children}}.size > 1 && index < {{children}}.size - 1
io << "│ "
else
io << " "
end
end
io << " "
io << line
io << "\n"
end
end
io << "\n"
end
end
end
| Append newline after nodes with children | Append newline after nodes with children
Signed-off-by: Leonard Schuetz <0a869347170afd1ee7c6a6a90d899177d625e0cf@me.com>
| Crystal | mit | KCreate/charly-lang,charly-lang/charly | crystal | ## Code Before:
require "./TreeVisitor.cr"
module Charly::AST
# Dump a human-readable version of the AST
class DumpVisitor < TreeVisitor
# Catch all rule
visit ASTNode do
io.puts name node
rest children
end
visit StringLiteral do
io << name node
io << " | "
io.puts "\"#{node.value}\""
end
visit PrecalculatedValue, NumericLiteral, BooleanLiteral do
io << name node
io << " | "
io.puts node.value
end
visit IdentifierLiteral, FunctionLiteral, ClassLiteral, PrimitiveClassLiteral do
io << name node
io << " | "
io.puts "\"#{node.name}\""
rest children
end
visit UnaryExpression, BinaryExpression, ComparisonExpression do
io << name node
io << " | "
io.puts node.operator
rest children
end
macro rest(children)
{{children}}.each_with_index do |child, index|
unless child.is_a? ASTNode
next
end
str = String.build do |str|
child.accept self, str
end
str.lines.each_with_index do |line, line_index|
if line_index == 0
if {{children}}.size > 1 && index < {{children}}.size - 1
io << "├─"
else
io << "└─"
end
else
if {{children}}.size > 1 && index < {{children}}.size - 1
io << "│ "
else
io << " "
end
end
io << " "
io << line
io << "\n"
end
end
end
end
end
## Instruction:
Append newline after nodes with children
Signed-off-by: Leonard Schuetz <0a869347170afd1ee7c6a6a90d899177d625e0cf@me.com>
## Code After:
require "./TreeVisitor.cr"
module Charly::AST
# Dump a human-readable version of the AST
class DumpVisitor < TreeVisitor
# Catch all rule
visit ASTNode do
io.puts name node
rest children
end
visit StringLiteral do
io << name node
io << " | "
io.puts "\"#{node.value}\""
end
visit PrecalculatedValue, NumericLiteral, BooleanLiteral do
io << name node
io << " | "
io.puts node.value
end
visit IdentifierLiteral, FunctionLiteral, ClassLiteral, PrimitiveClassLiteral do
io << name node
io << " | "
io.puts "\"#{node.name}\""
rest children
end
visit UnaryExpression, BinaryExpression, ComparisonExpression do
io << name node
io << " | "
io.puts node.operator
rest children
end
macro rest(children)
{{children}}.each_with_index do |child, index|
unless child.is_a? ASTNode
next
end
str = String.build do |str|
child.accept self, str
end
str.lines.each_with_index do |line, line_index|
if line_index == 0
if {{children}}.size > 1 && index < {{children}}.size - 1
io << "├─"
else
io << "└─"
end
else
if {{children}}.size > 1 && index < {{children}}.size - 1
io << "│ "
else
io << " "
end
end
io << " "
io << line
io << "\n"
end
end
io << "\n"
end
end
end
| require "./TreeVisitor.cr"
module Charly::AST
# Dump a human-readable version of the AST
class DumpVisitor < TreeVisitor
# Catch all rule
visit ASTNode do
io.puts name node
rest children
end
visit StringLiteral do
io << name node
io << " | "
io.puts "\"#{node.value}\""
end
visit PrecalculatedValue, NumericLiteral, BooleanLiteral do
io << name node
io << " | "
io.puts node.value
end
visit IdentifierLiteral, FunctionLiteral, ClassLiteral, PrimitiveClassLiteral do
io << name node
io << " | "
io.puts "\"#{node.name}\""
rest children
end
visit UnaryExpression, BinaryExpression, ComparisonExpression do
io << name node
io << " | "
io.puts node.operator
rest children
end
macro rest(children)
{{children}}.each_with_index do |child, index|
unless child.is_a? ASTNode
next
end
str = String.build do |str|
child.accept self, str
end
str.lines.each_with_index do |line, line_index|
if line_index == 0
if {{children}}.size > 1 && index < {{children}}.size - 1
io << "├─"
else
io << "└─"
end
else
if {{children}}.size > 1 && index < {{children}}.size - 1
io << "│ "
else
io << " "
end
end
io << " "
io << line
io << "\n"
end
end
+
+ io << "\n"
end
end
end | 2 | 0.025974 | 2 | 0 |
1033d4444919a69afe5ebd207591dc2930ed4a77 | lib/benchmark/memory/measurement.rb | lib/benchmark/memory/measurement.rb | require "benchmark/memory/measurement/metric"
module Benchmark
module Memory
# Encapsulate the combined metrics of an action.
class Measurement
include Enumerable
extend Forwardable
# Create a Measurement from a MemoryProfiler::Results object.
#
# @param result [MemoryProfiler::Results] The results of a MemoryProfiler report.
def self.from_result(result)
memory = Metric.new(:memsize, result.total_allocated_memsize, result.total_retained_memsize)
objects = Metric.new(:objects, result.total_allocated, result.total_retained)
strings = Metric.new(:strings, result.strings_allocated.size, result.strings_retained.size)
new(:memory => memory, :objects => objects, :strings => strings)
end
# Instantiate a Measurement of memory usage.
#
# @param memory [Metric] The memory usage of an action.
# @param objects [Metric] The object allocations of an action.
# @param strings [Metric] The string allocations of an action.
def initialize(memory:, objects:, strings:)
@metrics = [memory, objects, strings]
end
# @return [Array<Metric>] The metrics for the measurement.
attr_reader :metrics
# Enumerate through the metrics when enumerating a measurement.
def_delegator :@metrics, :each
end
end
end
| require "forwardable"
require "benchmark/memory/measurement/metric"
module Benchmark
module Memory
# Encapsulate the combined metrics of an action.
class Measurement
include Enumerable
extend Forwardable
# Create a Measurement from a MemoryProfiler::Results object.
#
# @param result [MemoryProfiler::Results] The results of a MemoryProfiler report.
def self.from_result(result)
memory = Metric.new(:memsize, result.total_allocated_memsize, result.total_retained_memsize)
objects = Metric.new(:objects, result.total_allocated, result.total_retained)
strings = Metric.new(:strings, result.strings_allocated.size, result.strings_retained.size)
new(:memory => memory, :objects => objects, :strings => strings)
end
# Instantiate a Measurement of memory usage.
#
# @param memory [Metric] The memory usage of an action.
# @param objects [Metric] The object allocations of an action.
# @param strings [Metric] The string allocations of an action.
def initialize(memory:, objects:, strings:)
@metrics = [memory, objects, strings]
end
# @return [Array<Metric>] The metrics for the measurement.
attr_reader :metrics
# Enumerate through the metrics when enumerating a measurement.
def_delegator :@metrics, :each
end
end
end
| Add forgotten require of Forwardable | Add forgotten require of Forwardable
| Ruby | mit | michaelherold/benchmark-memory,michaelherold/benchmark-memory | ruby | ## Code Before:
require "benchmark/memory/measurement/metric"
module Benchmark
module Memory
# Encapsulate the combined metrics of an action.
class Measurement
include Enumerable
extend Forwardable
# Create a Measurement from a MemoryProfiler::Results object.
#
# @param result [MemoryProfiler::Results] The results of a MemoryProfiler report.
def self.from_result(result)
memory = Metric.new(:memsize, result.total_allocated_memsize, result.total_retained_memsize)
objects = Metric.new(:objects, result.total_allocated, result.total_retained)
strings = Metric.new(:strings, result.strings_allocated.size, result.strings_retained.size)
new(:memory => memory, :objects => objects, :strings => strings)
end
# Instantiate a Measurement of memory usage.
#
# @param memory [Metric] The memory usage of an action.
# @param objects [Metric] The object allocations of an action.
# @param strings [Metric] The string allocations of an action.
def initialize(memory:, objects:, strings:)
@metrics = [memory, objects, strings]
end
# @return [Array<Metric>] The metrics for the measurement.
attr_reader :metrics
# Enumerate through the metrics when enumerating a measurement.
def_delegator :@metrics, :each
end
end
end
## Instruction:
Add forgotten require of Forwardable
## Code After:
require "forwardable"
require "benchmark/memory/measurement/metric"
module Benchmark
module Memory
# Encapsulate the combined metrics of an action.
class Measurement
include Enumerable
extend Forwardable
# Create a Measurement from a MemoryProfiler::Results object.
#
# @param result [MemoryProfiler::Results] The results of a MemoryProfiler report.
def self.from_result(result)
memory = Metric.new(:memsize, result.total_allocated_memsize, result.total_retained_memsize)
objects = Metric.new(:objects, result.total_allocated, result.total_retained)
strings = Metric.new(:strings, result.strings_allocated.size, result.strings_retained.size)
new(:memory => memory, :objects => objects, :strings => strings)
end
# Instantiate a Measurement of memory usage.
#
# @param memory [Metric] The memory usage of an action.
# @param objects [Metric] The object allocations of an action.
# @param strings [Metric] The string allocations of an action.
def initialize(memory:, objects:, strings:)
@metrics = [memory, objects, strings]
end
# @return [Array<Metric>] The metrics for the measurement.
attr_reader :metrics
# Enumerate through the metrics when enumerating a measurement.
def_delegator :@metrics, :each
end
end
end
| + require "forwardable"
require "benchmark/memory/measurement/metric"
module Benchmark
module Memory
# Encapsulate the combined metrics of an action.
class Measurement
include Enumerable
extend Forwardable
# Create a Measurement from a MemoryProfiler::Results object.
#
# @param result [MemoryProfiler::Results] The results of a MemoryProfiler report.
def self.from_result(result)
memory = Metric.new(:memsize, result.total_allocated_memsize, result.total_retained_memsize)
objects = Metric.new(:objects, result.total_allocated, result.total_retained)
strings = Metric.new(:strings, result.strings_allocated.size, result.strings_retained.size)
new(:memory => memory, :objects => objects, :strings => strings)
end
# Instantiate a Measurement of memory usage.
#
# @param memory [Metric] The memory usage of an action.
# @param objects [Metric] The object allocations of an action.
# @param strings [Metric] The string allocations of an action.
def initialize(memory:, objects:, strings:)
@metrics = [memory, objects, strings]
end
# @return [Array<Metric>] The metrics for the measurement.
attr_reader :metrics
# Enumerate through the metrics when enumerating a measurement.
def_delegator :@metrics, :each
end
end
end | 1 | 0.027027 | 1 | 0 |
d29e87eeb062df4d52c0c744919be4cae770fc2c | testing/config/settings/__init__.py | testing/config/settings/__init__.py | from daiquiri.core.settings.django import *
from daiquiri.core.settings.celery import *
from daiquiri.core.settings.daiquiri import *
from daiquiri.core.settings.logging import *
from daiquiri.core.settings.vendor import *
from daiquiri.archive.settings import *
from daiquiri.auth.settings import *
from daiquiri.conesearch.settings import *
from daiquiri.cutout.settings import *
from daiquiri.files.settings import *
from daiquiri.meetings.settings import *
from daiquiri.metadata.settings import *
from daiquiri.oai.settings import *
from daiquiri.query.settings import *
from daiquiri.serve.settings import *
from daiquiri.stats.settings import *
from daiquiri.tap.settings import *
from daiquiri.wordpress.settings import *
# override settings from base.py (which is checked in to git)
try:
from .base import *
except ImportError:
pass
# override settings from local.py (which is not checked in to git)
try:
from .local import *
except ImportError:
pass
| from daiquiri.core.settings.django import *
from daiquiri.core.settings.celery import *
from daiquiri.core.settings.daiquiri import *
from daiquiri.core.settings.logging import *
from daiquiri.core.settings.vendor import *
from daiquiri.archive.settings import *
from daiquiri.auth.settings import *
from daiquiri.conesearch.settings import *
from daiquiri.cutout.settings import *
from daiquiri.files.settings import *
from daiquiri.meetings.settings import *
from daiquiri.metadata.settings import *
from daiquiri.oai.settings import *
from daiquiri.query.settings import *
from daiquiri.registry.settings import *
from daiquiri.serve.settings import *
from daiquiri.stats.settings import *
from daiquiri.tap.settings import *
from daiquiri.wordpress.settings import *
# override settings from base.py (which is checked in to git)
try:
from .base import *
except ImportError:
pass
# override settings from local.py (which is not checked in to git)
try:
from .local import *
except ImportError:
pass
| Add registry settings to testing | Add registry settings to testing
| Python | apache-2.0 | aipescience/django-daiquiri,aipescience/django-daiquiri,aipescience/django-daiquiri | python | ## Code Before:
from daiquiri.core.settings.django import *
from daiquiri.core.settings.celery import *
from daiquiri.core.settings.daiquiri import *
from daiquiri.core.settings.logging import *
from daiquiri.core.settings.vendor import *
from daiquiri.archive.settings import *
from daiquiri.auth.settings import *
from daiquiri.conesearch.settings import *
from daiquiri.cutout.settings import *
from daiquiri.files.settings import *
from daiquiri.meetings.settings import *
from daiquiri.metadata.settings import *
from daiquiri.oai.settings import *
from daiquiri.query.settings import *
from daiquiri.serve.settings import *
from daiquiri.stats.settings import *
from daiquiri.tap.settings import *
from daiquiri.wordpress.settings import *
# override settings from base.py (which is checked in to git)
try:
from .base import *
except ImportError:
pass
# override settings from local.py (which is not checked in to git)
try:
from .local import *
except ImportError:
pass
## Instruction:
Add registry settings to testing
## Code After:
from daiquiri.core.settings.django import *
from daiquiri.core.settings.celery import *
from daiquiri.core.settings.daiquiri import *
from daiquiri.core.settings.logging import *
from daiquiri.core.settings.vendor import *
from daiquiri.archive.settings import *
from daiquiri.auth.settings import *
from daiquiri.conesearch.settings import *
from daiquiri.cutout.settings import *
from daiquiri.files.settings import *
from daiquiri.meetings.settings import *
from daiquiri.metadata.settings import *
from daiquiri.oai.settings import *
from daiquiri.query.settings import *
from daiquiri.registry.settings import *
from daiquiri.serve.settings import *
from daiquiri.stats.settings import *
from daiquiri.tap.settings import *
from daiquiri.wordpress.settings import *
# override settings from base.py (which is checked in to git)
try:
from .base import *
except ImportError:
pass
# override settings from local.py (which is not checked in to git)
try:
from .local import *
except ImportError:
pass
| from daiquiri.core.settings.django import *
from daiquiri.core.settings.celery import *
from daiquiri.core.settings.daiquiri import *
from daiquiri.core.settings.logging import *
from daiquiri.core.settings.vendor import *
from daiquiri.archive.settings import *
from daiquiri.auth.settings import *
from daiquiri.conesearch.settings import *
from daiquiri.cutout.settings import *
from daiquiri.files.settings import *
from daiquiri.meetings.settings import *
from daiquiri.metadata.settings import *
from daiquiri.oai.settings import *
from daiquiri.query.settings import *
+ from daiquiri.registry.settings import *
from daiquiri.serve.settings import *
from daiquiri.stats.settings import *
from daiquiri.tap.settings import *
from daiquiri.wordpress.settings import *
# override settings from base.py (which is checked in to git)
try:
from .base import *
except ImportError:
pass
# override settings from local.py (which is not checked in to git)
try:
from .local import *
except ImportError:
pass | 1 | 0.032258 | 1 | 0 |
7b73a3519a460cb7ef8fd1c5368d1497a3bb2668 | composer.json | composer.json | {
"name": "signify-nz/silverstripe-mailblock",
"description": "CMS configurable email redirection.",
"type": "silverstripe-module",
"homepage": "http://github.com/signify-nz/silverstripe-mailblock",
"keywords": ["silverstripe", "mailblock", "email", "redirect"],
"license": "BSD-3-Clause",
"authors": [
{"name": "Signify","homepage": "http://signify.co.nz"}
],
"support": {
"issues": "http://github.com/signify-nz/silverstripe-mailblock/issues"
},
"require": {
"silverstripe/cms": "^4.0",
"silverstripe/framework": "^4.0"
}
}
| {
"name": "signify-nz/silverstripe-mailblock",
"description": "CMS configurable email redirection.",
"type": "silverstripe-vendormodule",
"homepage": "http://github.com/signify-nz/silverstripe-mailblock",
"keywords": ["silverstripe", "mailblock", "email", "redirect"],
"license": "BSD-3-Clause",
"authors": [
{"name": "Signify","homepage": "http://signify.co.nz"}
],
"support": {
"issues": "http://github.com/signify-nz/silverstripe-mailblock/issues"
},
"require": {
"silverstripe/cms": "^4",
"silverstripe/framework": "^4"
}
}
| Fix version constraints for SS4. | Fix version constraints for SS4.
| JSON | bsd-3-clause | signify-nz/silverstripe-mailblock,signify-nz/silverstripe-mailblock | json | ## Code Before:
{
"name": "signify-nz/silverstripe-mailblock",
"description": "CMS configurable email redirection.",
"type": "silverstripe-module",
"homepage": "http://github.com/signify-nz/silverstripe-mailblock",
"keywords": ["silverstripe", "mailblock", "email", "redirect"],
"license": "BSD-3-Clause",
"authors": [
{"name": "Signify","homepage": "http://signify.co.nz"}
],
"support": {
"issues": "http://github.com/signify-nz/silverstripe-mailblock/issues"
},
"require": {
"silverstripe/cms": "^4.0",
"silverstripe/framework": "^4.0"
}
}
## Instruction:
Fix version constraints for SS4.
## Code After:
{
"name": "signify-nz/silverstripe-mailblock",
"description": "CMS configurable email redirection.",
"type": "silverstripe-vendormodule",
"homepage": "http://github.com/signify-nz/silverstripe-mailblock",
"keywords": ["silverstripe", "mailblock", "email", "redirect"],
"license": "BSD-3-Clause",
"authors": [
{"name": "Signify","homepage": "http://signify.co.nz"}
],
"support": {
"issues": "http://github.com/signify-nz/silverstripe-mailblock/issues"
},
"require": {
"silverstripe/cms": "^4",
"silverstripe/framework": "^4"
}
}
| {
- "name": "signify-nz/silverstripe-mailblock",
? ^
+ "name": "signify-nz/silverstripe-mailblock",
? ^^^^
- "description": "CMS configurable email redirection.",
? ^
+ "description": "CMS configurable email redirection.",
? ^^^^
- "type": "silverstripe-module",
? ^
+ "type": "silverstripe-vendormodule",
? ^^^^ ++++++
- "homepage": "http://github.com/signify-nz/silverstripe-mailblock",
? ^
+ "homepage": "http://github.com/signify-nz/silverstripe-mailblock",
? ^^^^
- "keywords": ["silverstripe", "mailblock", "email", "redirect"],
? ^
+ "keywords": ["silverstripe", "mailblock", "email", "redirect"],
? ^^^^
- "license": "BSD-3-Clause",
? ^
+ "license": "BSD-3-Clause",
? ^^^^
- "authors": [
? ^
+ "authors": [
? ^^^^
- {"name": "Signify","homepage": "http://signify.co.nz"}
? ^^
+ {"name": "Signify","homepage": "http://signify.co.nz"}
? ^^^^^^^^
- ],
+ ],
- "support": {
? ^
+ "support": {
? ^^^^
- "issues": "http://github.com/signify-nz/silverstripe-mailblock/issues"
? ^^
+ "issues": "http://github.com/signify-nz/silverstripe-mailblock/issues"
? ^^^^^^^^
- },
+ },
- "require": {
? ^
+ "require": {
? ^^^^
- "silverstripe/cms": "^4.0",
? ^^ --
+ "silverstripe/cms": "^4",
? ^^^^^^^^
- "silverstripe/framework": "^4.0"
? ^^ --
+ "silverstripe/framework": "^4"
? ^^^^^^^^
- }
+ }
} | 32 | 1.777778 | 16 | 16 |
1f693d5beca0708583f2cfd99039f135fcbaae7d | src/main/webapp/index.html | src/main/webapp/index.html | <!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<title>Bills</title>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/2.1.4/jquery.min.js"></script>
<script src="/scripts/bills.js"></script>
</head>
<body onload="main();">
<div id="content"></div>
<form id="valueInput">
<label for="basic">Dollar Value:</label>
<input type="number" name="dollarValue" id="basic" value="" step="0.01">
</form>
</body>
</html> | <!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<title>Bills</title>
<script
id="sap-ui-bootstrap"
src="tp/ui5/resources/sap-ui-core.js"
data-sap-ui-libs="sap.ui.commons"
data-sap-ui-theme="sap_bluecrystal" >
</script>
<!-- <script src="https://ajax.googleapis.com/ajax/libs/jquery/2.1.4/jquery.min.js"></script> -->
<script src="/scripts/bills.js"></script>
</head>
<body onload="main();">
<div id="content"></div>
<form id="valueInput">
<label for="basic">Dollar Value:</label>
<input type="number" name="dollarValue" id="basic" value="" step="0.01">
</form>
</body>
</html> | Load UI5 library instead of jQuery. | Load UI5 library instead of jQuery.
Load the UI5 library instead of plain jQuery
| HTML | mit | tcgfh/bills,tcgfh/bills,tcgfh/bills | html | ## Code Before:
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<title>Bills</title>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/2.1.4/jquery.min.js"></script>
<script src="/scripts/bills.js"></script>
</head>
<body onload="main();">
<div id="content"></div>
<form id="valueInput">
<label for="basic">Dollar Value:</label>
<input type="number" name="dollarValue" id="basic" value="" step="0.01">
</form>
</body>
</html>
## Instruction:
Load UI5 library instead of jQuery.
Load the UI5 library instead of plain jQuery
## Code After:
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<title>Bills</title>
<script
id="sap-ui-bootstrap"
src="tp/ui5/resources/sap-ui-core.js"
data-sap-ui-libs="sap.ui.commons"
data-sap-ui-theme="sap_bluecrystal" >
</script>
<!-- <script src="https://ajax.googleapis.com/ajax/libs/jquery/2.1.4/jquery.min.js"></script> -->
<script src="/scripts/bills.js"></script>
</head>
<body onload="main();">
<div id="content"></div>
<form id="valueInput">
<label for="basic">Dollar Value:</label>
<input type="number" name="dollarValue" id="basic" value="" step="0.01">
</form>
</body>
</html> | <!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<title>Bills</title>
+ <script
+ id="sap-ui-bootstrap"
+ src="tp/ui5/resources/sap-ui-core.js"
+ data-sap-ui-libs="sap.ui.commons"
+ data-sap-ui-theme="sap_bluecrystal" >
+ </script>
- <script src="https://ajax.googleapis.com/ajax/libs/jquery/2.1.4/jquery.min.js"></script>
+ <!-- <script src="https://ajax.googleapis.com/ajax/libs/jquery/2.1.4/jquery.min.js"></script> -->
? +++++ ++++
<script src="/scripts/bills.js"></script>
</head>
<body onload="main();">
<div id="content"></div>
<form id="valueInput">
<label for="basic">Dollar Value:</label>
<input type="number" name="dollarValue" id="basic" value="" step="0.01">
</form>
</body>
</html> | 8 | 0.444444 | 7 | 1 |
db477b8ad766d711fcabf83acf6b4448676becd0 | frontend/index.html | frontend/index.html | <!DOCTYPE html>
<html lang="en">
<head>
<% if (process.env.NODE_ENV === 'production') { %>
<!-- Lodash templates are available by default for html-webpack-plugin -->
<!-- Example usages here https://learn.co/lessons/javascript-lodash-templates -->
<!-- Global site tag (gtag.js) - Google Analytics, but only in production -->
<script async src="https://www.googletagmanager.com/gtag/js?id=UA-116965240-1"></script>
<script>
if (process.env.NODE_ENV === 'production') {
window.dataLayer = window.dataLayer || []
function gtag () { dataLayer.push(arguments) }
gtag('js', new Date())
gtag('config', 'UA-116965240-1')
}
</script>
<% } %>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
<meta name="google-site-verification" content="iLduelR9qGqpKOdqZ8y3vDet-8w5mTocNhPKmF0m9NM" />
<title>Parcel</title>
<link href="https://fonts.googleapis.com/css?family=Merriweather|Source+Sans+Pro" rel="stylesheet">
<link rel="icon" type="image/png" href="/static/img/favicon.png">
</head>
<body>
<div id="app"></div>
</body>
</html>
| <!DOCTYPE html>
<html lang="en">
<head>
<% if (process.env.NODE_ENV === 'production') { %>
<!-- Lodash templates are available by default for html-webpack-plugin -->
<!-- Example usages here https://learn.co/lessons/javascript-lodash-templates -->
<!-- Global site tag (gtag.js) - Google Analytics, but only in production -->
<script async src="https://www.googletagmanager.com/gtag/js?id=UA-116965240-1"></script>
<script>
window.dataLayer = window.dataLayer || []
function gtag () { dataLayer.push(arguments) }
gtag('js', new Date())
gtag('config', 'UA-116965240-1')
</script>
<% } %>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
<meta name="google-site-verification" content="iLduelR9qGqpKOdqZ8y3vDet-8w5mTocNhPKmF0m9NM" />
<title>Parcel</title>
<link href="https://fonts.googleapis.com/css?family=Merriweather|Source+Sans+Pro" rel="stylesheet">
<link rel="icon" type="image/png" href="/static/img/favicon.png">
</head>
<body>
<div id="app"></div>
</body>
</html>
| Remove extra environment check to fix Google Analytics | Remove extra environment check to fix Google Analytics
| HTML | mit | code-for-nashville/parcel,code-for-nashville/parcel,code-for-nashville/parcel | html | ## Code Before:
<!DOCTYPE html>
<html lang="en">
<head>
<% if (process.env.NODE_ENV === 'production') { %>
<!-- Lodash templates are available by default for html-webpack-plugin -->
<!-- Example usages here https://learn.co/lessons/javascript-lodash-templates -->
<!-- Global site tag (gtag.js) - Google Analytics, but only in production -->
<script async src="https://www.googletagmanager.com/gtag/js?id=UA-116965240-1"></script>
<script>
if (process.env.NODE_ENV === 'production') {
window.dataLayer = window.dataLayer || []
function gtag () { dataLayer.push(arguments) }
gtag('js', new Date())
gtag('config', 'UA-116965240-1')
}
</script>
<% } %>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
<meta name="google-site-verification" content="iLduelR9qGqpKOdqZ8y3vDet-8w5mTocNhPKmF0m9NM" />
<title>Parcel</title>
<link href="https://fonts.googleapis.com/css?family=Merriweather|Source+Sans+Pro" rel="stylesheet">
<link rel="icon" type="image/png" href="/static/img/favicon.png">
</head>
<body>
<div id="app"></div>
</body>
</html>
## Instruction:
Remove extra environment check to fix Google Analytics
## Code After:
<!DOCTYPE html>
<html lang="en">
<head>
<% if (process.env.NODE_ENV === 'production') { %>
<!-- Lodash templates are available by default for html-webpack-plugin -->
<!-- Example usages here https://learn.co/lessons/javascript-lodash-templates -->
<!-- Global site tag (gtag.js) - Google Analytics, but only in production -->
<script async src="https://www.googletagmanager.com/gtag/js?id=UA-116965240-1"></script>
<script>
window.dataLayer = window.dataLayer || []
function gtag () { dataLayer.push(arguments) }
gtag('js', new Date())
gtag('config', 'UA-116965240-1')
</script>
<% } %>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
<meta name="google-site-verification" content="iLduelR9qGqpKOdqZ8y3vDet-8w5mTocNhPKmF0m9NM" />
<title>Parcel</title>
<link href="https://fonts.googleapis.com/css?family=Merriweather|Source+Sans+Pro" rel="stylesheet">
<link rel="icon" type="image/png" href="/static/img/favicon.png">
</head>
<body>
<div id="app"></div>
</body>
</html>
| <!DOCTYPE html>
<html lang="en">
<head>
<% if (process.env.NODE_ENV === 'production') { %>
<!-- Lodash templates are available by default for html-webpack-plugin -->
<!-- Example usages here https://learn.co/lessons/javascript-lodash-templates -->
<!-- Global site tag (gtag.js) - Google Analytics, but only in production -->
<script async src="https://www.googletagmanager.com/gtag/js?id=UA-116965240-1"></script>
<script>
- if (process.env.NODE_ENV === 'production') {
- window.dataLayer = window.dataLayer || []
? --
+ window.dataLayer = window.dataLayer || []
- function gtag () { dataLayer.push(arguments) }
? --
+ function gtag () { dataLayer.push(arguments) }
- gtag('js', new Date())
? --
+ gtag('js', new Date())
- gtag('config', 'UA-116965240-1')
? --
+ gtag('config', 'UA-116965240-1')
- }
</script>
<% } %>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
<meta name="google-site-verification" content="iLduelR9qGqpKOdqZ8y3vDet-8w5mTocNhPKmF0m9NM" />
<title>Parcel</title>
<link href="https://fonts.googleapis.com/css?family=Merriweather|Source+Sans+Pro" rel="stylesheet">
<link rel="icon" type="image/png" href="/static/img/favicon.png">
</head>
<body>
<div id="app"></div>
</body>
</html> | 10 | 0.344828 | 4 | 6 |
ee6f5b3312d72d5dc4861feaf32f0900f649a0f3 | packages/components/containers/api/DelinquentModal.tsx | packages/components/containers/api/DelinquentModal.tsx | import { c } from 'ttag';
import { getInvoicesPathname } from '@proton/shared/lib/apps/helper';
import { AlertModal, ButtonLike, ModalProps, SettingsLink } from '../../components';
import { useConfig } from '../../hooks';
const DelinquentModal = (props: ModalProps) => {
const { APP_NAME } = useConfig();
const title = c('Delinquent modal title').t`Overdue invoice`;
return (
<AlertModal
title={title}
buttons={[
<ButtonLike color="norm" as={SettingsLink} path={getInvoicesPathname(APP_NAME)}>
{c('Action').t`View invoice`}
</ButtonLike>,
]}
{...props}
>
<div>
{c('Info')
.t`Your Proton account is currently on hold. To continue using your account, please pay any overdue invoices.`}
</div>
</AlertModal>
);
};
export default DelinquentModal;
| import { c } from 'ttag';
import { getInvoicesPathname } from '@proton/shared/lib/apps/helper';
import { AlertModal, ButtonLike, ModalProps, SettingsLink } from '../../components';
import { useConfig } from '../../hooks';
const DelinquentModal = (props: ModalProps) => {
const { APP_NAME } = useConfig();
const title = c('Delinquent modal title').t`Overdue invoice`;
return (
<AlertModal
title={title}
buttons={[
<ButtonLike color="norm" as={SettingsLink} path={getInvoicesPathname(APP_NAME)} onClick={props.onClose}>
{c('Action').t`View invoice`}
</ButtonLike>,
]}
{...props}
>
<div>
{c('Info')
.t`Your Proton account is currently on hold. To continue using your account, please pay any overdue invoices.`}
</div>
</AlertModal>
);
};
export default DelinquentModal;
| Allow to close the delinquent modal | Allow to close the delinquent modal
It may be triggered from API handlers in account
| TypeScript | mit | ProtonMail/WebClient,ProtonMail/WebClient,ProtonMail/WebClient | typescript | ## Code Before:
import { c } from 'ttag';
import { getInvoicesPathname } from '@proton/shared/lib/apps/helper';
import { AlertModal, ButtonLike, ModalProps, SettingsLink } from '../../components';
import { useConfig } from '../../hooks';
const DelinquentModal = (props: ModalProps) => {
const { APP_NAME } = useConfig();
const title = c('Delinquent modal title').t`Overdue invoice`;
return (
<AlertModal
title={title}
buttons={[
<ButtonLike color="norm" as={SettingsLink} path={getInvoicesPathname(APP_NAME)}>
{c('Action').t`View invoice`}
</ButtonLike>,
]}
{...props}
>
<div>
{c('Info')
.t`Your Proton account is currently on hold. To continue using your account, please pay any overdue invoices.`}
</div>
</AlertModal>
);
};
export default DelinquentModal;
## Instruction:
Allow to close the delinquent modal
It may be triggered from API handlers in account
## Code After:
import { c } from 'ttag';
import { getInvoicesPathname } from '@proton/shared/lib/apps/helper';
import { AlertModal, ButtonLike, ModalProps, SettingsLink } from '../../components';
import { useConfig } from '../../hooks';
const DelinquentModal = (props: ModalProps) => {
const { APP_NAME } = useConfig();
const title = c('Delinquent modal title').t`Overdue invoice`;
return (
<AlertModal
title={title}
buttons={[
<ButtonLike color="norm" as={SettingsLink} path={getInvoicesPathname(APP_NAME)} onClick={props.onClose}>
{c('Action').t`View invoice`}
</ButtonLike>,
]}
{...props}
>
<div>
{c('Info')
.t`Your Proton account is currently on hold. To continue using your account, please pay any overdue invoices.`}
</div>
</AlertModal>
);
};
export default DelinquentModal;
| import { c } from 'ttag';
import { getInvoicesPathname } from '@proton/shared/lib/apps/helper';
import { AlertModal, ButtonLike, ModalProps, SettingsLink } from '../../components';
import { useConfig } from '../../hooks';
const DelinquentModal = (props: ModalProps) => {
const { APP_NAME } = useConfig();
const title = c('Delinquent modal title').t`Overdue invoice`;
return (
<AlertModal
title={title}
buttons={[
- <ButtonLike color="norm" as={SettingsLink} path={getInvoicesPathname(APP_NAME)}>
+ <ButtonLike color="norm" as={SettingsLink} path={getInvoicesPathname(APP_NAME)} onClick={props.onClose}>
? ++++++++++++++++++++++++
{c('Action').t`View invoice`}
</ButtonLike>,
]}
{...props}
>
<div>
{c('Info')
.t`Your Proton account is currently on hold. To continue using your account, please pay any overdue invoices.`}
</div>
</AlertModal>
);
};
export default DelinquentModal; | 2 | 0.068966 | 1 | 1 |
25e1cd086d785d4270b8bf269a19a28d5f372615 | src/themes/WordpressStandard/Resources/assets/js/cookies.js | src/themes/WordpressStandard/Resources/assets/js/cookies.js | /*
* This file is part of the WordPress Standard project.
*
* Copyright (c) 2015-2016 LIN3S <info@lin3s.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*
* @author Gorka Laucirica <gorka.lauzirika@gmail.com>
* @author Beñat Espiña <bespina@lin3s.com>
*/
'use strict';
(function ($) {
var $allLinks = $('a, button, .cookies__actions .button'),
$window = $(window),
scrollTop = 400;
if (!localStorage.getItem('cookies')) {
$('.cookies').addClass('cookies--visible');
}
function acceptCookies() {
localStorage.setItem('cookies', true);
$('.cookies').removeClass('cookies--visible');
}
$allLinks.click(function () {
acceptCookies();
});
$window.on('scroll', function () {
if (typeof window.requestAnimationFrame !== 'undefined') {
if ($(this).scrollTop() > scrollTop) {
window.requestAnimationFrame(acceptCookies);
}
} else {
if ($(this).scrollTop() > scrollTop) {
acceptCookies();
}
}
});
}(jQuery));
| /*
* This file is part of the WordPress Standard project.
*
* Copyright (c) 2015-2016 LIN3S <info@lin3s.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*
* @author Gorka Laucirica <gorka.lauzirika@gmail.com>
* @author Beñat Espiña <bespina@lin3s.com>
*/
'use strict';
(function ($) {
var $allLinks = $('a, button, .cookies__actions .button'),
$window = $(window),
scrollTop = 400;
if (!localStorage.getItem('cookies')) {
$('.cookies').addClass('cookies--visible');
}
function acceptCookies() {
localStorage.setItem('cookies', true);
$('.cookies').removeClass('cookies--visible');
}
$allLinks.click(function () {
acceptCookies();
});
$window.on('scroll', function () {
if (typeof window.requestAnimationFrame !== 'undefined') {
if ($(this).scrollTop() > scrollTop) {
window.requestAnimationFrame(acceptCookies);
}
} else if ($(this).scrollTop() > scrollTop) {
acceptCookies();
}
});
}(jQuery));
| Delete not needed nested block | Delete not needed nested block | JavaScript | mit | LIN3S/WordpressStandard,LIN3S/WordpressStandard | javascript | ## Code Before:
/*
* This file is part of the WordPress Standard project.
*
* Copyright (c) 2015-2016 LIN3S <info@lin3s.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*
* @author Gorka Laucirica <gorka.lauzirika@gmail.com>
* @author Beñat Espiña <bespina@lin3s.com>
*/
'use strict';
(function ($) {
var $allLinks = $('a, button, .cookies__actions .button'),
$window = $(window),
scrollTop = 400;
if (!localStorage.getItem('cookies')) {
$('.cookies').addClass('cookies--visible');
}
function acceptCookies() {
localStorage.setItem('cookies', true);
$('.cookies').removeClass('cookies--visible');
}
$allLinks.click(function () {
acceptCookies();
});
$window.on('scroll', function () {
if (typeof window.requestAnimationFrame !== 'undefined') {
if ($(this).scrollTop() > scrollTop) {
window.requestAnimationFrame(acceptCookies);
}
} else {
if ($(this).scrollTop() > scrollTop) {
acceptCookies();
}
}
});
}(jQuery));
## Instruction:
Delete not needed nested block
## Code After:
/*
* This file is part of the WordPress Standard project.
*
* Copyright (c) 2015-2016 LIN3S <info@lin3s.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*
* @author Gorka Laucirica <gorka.lauzirika@gmail.com>
* @author Beñat Espiña <bespina@lin3s.com>
*/
'use strict';
(function ($) {
var $allLinks = $('a, button, .cookies__actions .button'),
$window = $(window),
scrollTop = 400;
if (!localStorage.getItem('cookies')) {
$('.cookies').addClass('cookies--visible');
}
function acceptCookies() {
localStorage.setItem('cookies', true);
$('.cookies').removeClass('cookies--visible');
}
$allLinks.click(function () {
acceptCookies();
});
$window.on('scroll', function () {
if (typeof window.requestAnimationFrame !== 'undefined') {
if ($(this).scrollTop() > scrollTop) {
window.requestAnimationFrame(acceptCookies);
}
} else if ($(this).scrollTop() > scrollTop) {
acceptCookies();
}
});
}(jQuery));
| /*
* This file is part of the WordPress Standard project.
*
* Copyright (c) 2015-2016 LIN3S <info@lin3s.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*
* @author Gorka Laucirica <gorka.lauzirika@gmail.com>
* @author Beñat Espiña <bespina@lin3s.com>
*/
'use strict';
(function ($) {
var $allLinks = $('a, button, .cookies__actions .button'),
$window = $(window),
scrollTop = 400;
if (!localStorage.getItem('cookies')) {
$('.cookies').addClass('cookies--visible');
}
function acceptCookies() {
localStorage.setItem('cookies', true);
$('.cookies').removeClass('cookies--visible');
}
$allLinks.click(function () {
acceptCookies();
});
$window.on('scroll', function () {
if (typeof window.requestAnimationFrame !== 'undefined') {
if ($(this).scrollTop() > scrollTop) {
window.requestAnimationFrame(acceptCookies);
}
- } else {
- if ($(this).scrollTop() > scrollTop) {
+ } else if ($(this).scrollTop() > scrollTop) {
? + ++++
- acceptCookies();
? --
+ acceptCookies();
- }
}
});
}(jQuery)); | 6 | 0.130435 | 2 | 4 |
3be3e440f09a9c4bbbcf6d23a682783dcbf634e8 | lib/stairway/stairs.rb | lib/stairway/stairs.rb | require 'observer'
module Stairway
class Stairs
include Observable
attr_accessor :name
def initialize(name)
@name = name
end
def steps
@steps
end
def steps=(steps)
@steps = steps
@steps.each do |name, klass|
add_observer(klass)
end
end
def run(context={}, options={})
notify(context, options)
@steps.each do |name, klass|
begin
klass.run
notify(klass.context, klass.options)
rescue Stairway::Stop
exit
end
end
end
protected
def notify(context, options)
changed
notify_observers(context, options)
end
end
end
| require 'observer'
module Stairway
class Stairs
include Observable
attr_accessor :name
def initialize(name)
@name = name
end
def steps
@steps
end
def steps=(steps)
@steps = steps
@steps.each do |name, klass|
add_observer(klass)
end
end
def run(context={}, options={})
notify(context, options)
@steps.each do |name, klass|
begin
klass.run
notify(klass.context, klass.options)
rescue Stairway::Stop
exit
end
end
end
def run_step(name, context={}, options={})
notify(context, options)
@steps[name].run
end
protected
def notify(context, options)
changed
notify_observers(context, options)
end
end
end
| Add run_step to run a single step at once | Add run_step to run a single step at once
| Ruby | mit | garno/stairway | ruby | ## Code Before:
require 'observer'
module Stairway
class Stairs
include Observable
attr_accessor :name
def initialize(name)
@name = name
end
def steps
@steps
end
def steps=(steps)
@steps = steps
@steps.each do |name, klass|
add_observer(klass)
end
end
def run(context={}, options={})
notify(context, options)
@steps.each do |name, klass|
begin
klass.run
notify(klass.context, klass.options)
rescue Stairway::Stop
exit
end
end
end
protected
def notify(context, options)
changed
notify_observers(context, options)
end
end
end
## Instruction:
Add run_step to run a single step at once
## Code After:
require 'observer'
module Stairway
class Stairs
include Observable
attr_accessor :name
def initialize(name)
@name = name
end
def steps
@steps
end
def steps=(steps)
@steps = steps
@steps.each do |name, klass|
add_observer(klass)
end
end
def run(context={}, options={})
notify(context, options)
@steps.each do |name, klass|
begin
klass.run
notify(klass.context, klass.options)
rescue Stairway::Stop
exit
end
end
end
def run_step(name, context={}, options={})
notify(context, options)
@steps[name].run
end
protected
def notify(context, options)
changed
notify_observers(context, options)
end
end
end
| require 'observer'
module Stairway
class Stairs
include Observable
attr_accessor :name
def initialize(name)
@name = name
end
def steps
@steps
end
def steps=(steps)
@steps = steps
@steps.each do |name, klass|
add_observer(klass)
end
end
def run(context={}, options={})
notify(context, options)
@steps.each do |name, klass|
begin
klass.run
notify(klass.context, klass.options)
rescue Stairway::Stop
exit
end
end
end
+ def run_step(name, context={}, options={})
+ notify(context, options)
+
+ @steps[name].run
+ end
+
protected
def notify(context, options)
changed
notify_observers(context, options)
end
end
end | 6 | 0.130435 | 6 | 0 |
8535072e8843fe7cf955abb35f9b6aa7e2652835 | test/CodeGen/ARM/load-global.ll | test/CodeGen/ARM/load-global.ll | ; RUN: llvm-upgrade < %s | llvm-as | \
; RUN: llc -mtriple=arm-apple-darwin -relocation-model=dynamic-no-pic | \
; RUN: grep {L_G\$non_lazy_ptr} | wc -l | grep 2
; RUN: llvm-upgrade < %s | llvm-as | \
; RUN: llc -mtriple=arm-apple-darwin -relocation-model=pic | \
; RUN: grep {ldr.*pc} | wc -l | grep 1
; RUN: llvm-upgrade < %s | llvm-as | \
; RUN: llc -mtriple=arm-linux-gnueabi -relocation-model=pic | \
; RUN: grep {GOT} | wc -l | grep 1
%G = external global int
int %test1() {
%tmp = load int* %G
ret int %tmp
}
| ; RUN: llvm-as < %s | \
; RUN: llc -mtriple=arm-apple-darwin -relocation-model=static | \
; RUN: not grep {L_G\$non_lazy_ptr}
; RUN: llvm-as < %s | \
; RUN: llc -mtriple=arm-apple-darwin -relocation-model=dynamic-no-pic | \
; RUN: grep {L_G\$non_lazy_ptr} | wc -l | grep 2
; RUN: llvm-as < %s | \
; RUN: llc -mtriple=arm-apple-darwin -relocation-model=pic | \
; RUN: grep {ldr.*pc} | wc -l | grep 1
; RUN: llvm-as < %s | \
; RUN: llc -mtriple=arm-linux-gnueabi -relocation-model=pic | \
; RUN: grep {GOT} | wc -l | grep 1
@G = external global i32
define i32 @test1() {
%tmp = load i32* @G
ret i32 %tmp
}
| Test load global in static mode. | Test load global in static mode.
git-svn-id: 0ff597fd157e6f4fc38580e8d64ab130330d2411@36719 91177308-0d34-0410-b5e6-96231b3b80d8
| LLVM | apache-2.0 | llvm-mirror/llvm,chubbymaggie/asap,chubbymaggie/asap,chubbymaggie/asap,dslab-epfl/asap,chubbymaggie/asap,llvm-mirror/llvm,apple/swift-llvm,GPUOpen-Drivers/llvm,dslab-epfl/asap,apple/swift-llvm,llvm-mirror/llvm,GPUOpen-Drivers/llvm,dslab-epfl/asap,llvm-mirror/llvm,GPUOpen-Drivers/llvm,llvm-mirror/llvm,dslab-epfl/asap,chubbymaggie/asap,apple/swift-llvm,GPUOpen-Drivers/llvm,llvm-mirror/llvm,apple/swift-llvm,apple/swift-llvm,llvm-mirror/llvm,GPUOpen-Drivers/llvm,dslab-epfl/asap,chubbymaggie/asap,dslab-epfl/asap,GPUOpen-Drivers/llvm,llvm-mirror/llvm,apple/swift-llvm,GPUOpen-Drivers/llvm,llvm-mirror/llvm,apple/swift-llvm,apple/swift-llvm,dslab-epfl/asap,GPUOpen-Drivers/llvm | llvm | ## Code Before:
; RUN: llvm-upgrade < %s | llvm-as | \
; RUN: llc -mtriple=arm-apple-darwin -relocation-model=dynamic-no-pic | \
; RUN: grep {L_G\$non_lazy_ptr} | wc -l | grep 2
; RUN: llvm-upgrade < %s | llvm-as | \
; RUN: llc -mtriple=arm-apple-darwin -relocation-model=pic | \
; RUN: grep {ldr.*pc} | wc -l | grep 1
; RUN: llvm-upgrade < %s | llvm-as | \
; RUN: llc -mtriple=arm-linux-gnueabi -relocation-model=pic | \
; RUN: grep {GOT} | wc -l | grep 1
%G = external global int
int %test1() {
%tmp = load int* %G
ret int %tmp
}
## Instruction:
Test load global in static mode.
git-svn-id: 0ff597fd157e6f4fc38580e8d64ab130330d2411@36719 91177308-0d34-0410-b5e6-96231b3b80d8
## Code After:
; RUN: llvm-as < %s | \
; RUN: llc -mtriple=arm-apple-darwin -relocation-model=static | \
; RUN: not grep {L_G\$non_lazy_ptr}
; RUN: llvm-as < %s | \
; RUN: llc -mtriple=arm-apple-darwin -relocation-model=dynamic-no-pic | \
; RUN: grep {L_G\$non_lazy_ptr} | wc -l | grep 2
; RUN: llvm-as < %s | \
; RUN: llc -mtriple=arm-apple-darwin -relocation-model=pic | \
; RUN: grep {ldr.*pc} | wc -l | grep 1
; RUN: llvm-as < %s | \
; RUN: llc -mtriple=arm-linux-gnueabi -relocation-model=pic | \
; RUN: grep {GOT} | wc -l | grep 1
@G = external global i32
define i32 @test1() {
%tmp = load i32* @G
ret i32 %tmp
}
| - ; RUN: llvm-upgrade < %s | llvm-as | \
+ ; RUN: llvm-as < %s | \
+ ; RUN: llc -mtriple=arm-apple-darwin -relocation-model=static | \
+ ; RUN: not grep {L_G\$non_lazy_ptr}
+ ; RUN: llvm-as < %s | \
; RUN: llc -mtriple=arm-apple-darwin -relocation-model=dynamic-no-pic | \
; RUN: grep {L_G\$non_lazy_ptr} | wc -l | grep 2
- ; RUN: llvm-upgrade < %s | llvm-as | \
+ ; RUN: llvm-as < %s | \
; RUN: llc -mtriple=arm-apple-darwin -relocation-model=pic | \
; RUN: grep {ldr.*pc} | wc -l | grep 1
- ; RUN: llvm-upgrade < %s | llvm-as | \
+ ; RUN: llvm-as < %s | \
; RUN: llc -mtriple=arm-linux-gnueabi -relocation-model=pic | \
; RUN: grep {GOT} | wc -l | grep 1
- %G = external global int
? ^ ^^
+ @G = external global i32
? ^ ^^
- int %test1() {
+ define i32 @test1() {
- %tmp = load int* %G
? ^^ ^
+ %tmp = load i32* @G
? ^^ ^
- ret int %tmp
? ^^
+ ret i32 %tmp
? ^^
} | 17 | 1.0625 | 10 | 7 |
80399ba74e45b7d071f6e81868bc8af40c516647 | Cargo.toml | Cargo.toml | [package]
name = "riscan-pro"
version = "0.1.0"
authors = ["Pete Gadomski <pete.gadomski@gmail.com>"]
[dependencies]
docopt = "0.6"
las = "0.3"
nalgebra = "0.9"
pbr = "0.3"
rustc-serialize = "0.3"
xmltree = "0.3"
| [package]
name = "riscan-pro"
version = "0.1.0"
authors = ["Pete Gadomski <pete.gadomski@gmail.com>"]
[dependencies]
docopt = "0.6"
las = "0.3"
nalgebra = "0.9"
pbr = "0.3"
rustc-serialize = "0.3"
xmltree = "0.3"
[[bin]]
name = "riscan-pro"
doc = false
| Set doc to false for the binary | Set doc to false for the binary
| TOML | mit | gadomski/riscan-pro | toml | ## Code Before:
[package]
name = "riscan-pro"
version = "0.1.0"
authors = ["Pete Gadomski <pete.gadomski@gmail.com>"]
[dependencies]
docopt = "0.6"
las = "0.3"
nalgebra = "0.9"
pbr = "0.3"
rustc-serialize = "0.3"
xmltree = "0.3"
## Instruction:
Set doc to false for the binary
## Code After:
[package]
name = "riscan-pro"
version = "0.1.0"
authors = ["Pete Gadomski <pete.gadomski@gmail.com>"]
[dependencies]
docopt = "0.6"
las = "0.3"
nalgebra = "0.9"
pbr = "0.3"
rustc-serialize = "0.3"
xmltree = "0.3"
[[bin]]
name = "riscan-pro"
doc = false
| [package]
name = "riscan-pro"
version = "0.1.0"
authors = ["Pete Gadomski <pete.gadomski@gmail.com>"]
[dependencies]
docopt = "0.6"
las = "0.3"
nalgebra = "0.9"
pbr = "0.3"
rustc-serialize = "0.3"
xmltree = "0.3"
+
+ [[bin]]
+ name = "riscan-pro"
+ doc = false | 4 | 0.333333 | 4 | 0 |
496975c9986ef7fc4272a54e3060aa584ea4ee64 | README.md | README.md |
In January 2014 Github Team [announced Deployments API](http://developer.github.com/changes/2014-01-09-preview-the-new-deployments-api/) and you can use it with Capistrano 3.
## Installation
Add this line to your application's Gemfile:
gem 'capistrano-github'
gem 'octokit', github: 'octokit/octokit.rb', branch: 'deployments-preview'
And then execute:
$ bundle
Require github tasks and set `github_access_token`:
```ruby
# Capfile
require 'capistrano/github'
```
```ruby
# deploy.rb
set :github_access_token, '89c3be3d1f917b6ccf5e2c141dbc403f57bc140c'
```
You can get your personal GH token [here](https://github.com/settings/applications)
## Usage
New deployment record will be created automatically on each `cap deploy` run.
To see the list of deployments, execute
```bash
cap production github:deployments
```
## Contributing
1. Fork it ( http://github.com/<my-github-username>/capistrano-github/fork )
2. Create your feature branch (`git checkout -b my-new-feature`)
3. Commit your changes (`git commit -am 'Add some feature'`)
4. Push to the branch (`git push origin my-new-feature`)
5. Create new Pull Request
|
In January 2014 Github Team [announced Deployments API](http://developer.github.com/changes/2014-01-09-preview-the-new-deployments-api/) and you can use it with Capistrano 3.
## Installation
Add this line to your application's Gemfile:
gem 'capistrano-github', github: 'capistrano/github'
gem 'octokit', github: 'octokit/octokit.rb', branch: 'deployments-preview'
And then execute:
$ bundle
Require github tasks and set `github_access_token`:
```ruby
# Capfile
require 'capistrano/github'
```
```ruby
# deploy.rb
set :github_access_token, '89c3be3d1f917b6ccf5e2c141dbc403f57bc140c'
```
You can get your personal GH token [here](https://github.com/settings/applications)
## Usage
New deployment record will be created automatically on each `cap deploy` run.
To see the list of deployments, execute
```bash
cap production github:deployments
```
## Contributing
1. Fork it ( http://github.com/<my-github-username>/capistrano-github/fork )
2. Create your feature branch (`git checkout -b my-new-feature`)
3. Commit your changes (`git commit -am 'Add some feature'`)
4. Push to the branch (`git push origin my-new-feature`)
5. Create new Pull Request
| Update the Readme to use Github repo. | Update the Readme to use Github repo. | Markdown | mit | 3scale/capistrano-github,net2b/capistrano-github,tjwallace/github,capistrano/github | markdown | ## Code Before:
In January 2014 Github Team [announced Deployments API](http://developer.github.com/changes/2014-01-09-preview-the-new-deployments-api/) and you can use it with Capistrano 3.
## Installation
Add this line to your application's Gemfile:
gem 'capistrano-github'
gem 'octokit', github: 'octokit/octokit.rb', branch: 'deployments-preview'
And then execute:
$ bundle
Require github tasks and set `github_access_token`:
```ruby
# Capfile
require 'capistrano/github'
```
```ruby
# deploy.rb
set :github_access_token, '89c3be3d1f917b6ccf5e2c141dbc403f57bc140c'
```
You can get your personal GH token [here](https://github.com/settings/applications)
## Usage
New deployment record will be created automatically on each `cap deploy` run.
To see the list of deployments, execute
```bash
cap production github:deployments
```
## Contributing
1. Fork it ( http://github.com/<my-github-username>/capistrano-github/fork )
2. Create your feature branch (`git checkout -b my-new-feature`)
3. Commit your changes (`git commit -am 'Add some feature'`)
4. Push to the branch (`git push origin my-new-feature`)
5. Create new Pull Request
## Instruction:
Update the Readme to use Github repo.
## Code After:
In January 2014 Github Team [announced Deployments API](http://developer.github.com/changes/2014-01-09-preview-the-new-deployments-api/) and you can use it with Capistrano 3.
## Installation
Add this line to your application's Gemfile:
gem 'capistrano-github', github: 'capistrano/github'
gem 'octokit', github: 'octokit/octokit.rb', branch: 'deployments-preview'
And then execute:
$ bundle
Require github tasks and set `github_access_token`:
```ruby
# Capfile
require 'capistrano/github'
```
```ruby
# deploy.rb
set :github_access_token, '89c3be3d1f917b6ccf5e2c141dbc403f57bc140c'
```
You can get your personal GH token [here](https://github.com/settings/applications)
## Usage
New deployment record will be created automatically on each `cap deploy` run.
To see the list of deployments, execute
```bash
cap production github:deployments
```
## Contributing
1. Fork it ( http://github.com/<my-github-username>/capistrano-github/fork )
2. Create your feature branch (`git checkout -b my-new-feature`)
3. Commit your changes (`git commit -am 'Add some feature'`)
4. Push to the branch (`git push origin my-new-feature`)
5. Create new Pull Request
|
In January 2014 Github Team [announced Deployments API](http://developer.github.com/changes/2014-01-09-preview-the-new-deployments-api/) and you can use it with Capistrano 3.
## Installation
Add this line to your application's Gemfile:
- gem 'capistrano-github'
+ gem 'capistrano-github', github: 'capistrano/github'
gem 'octokit', github: 'octokit/octokit.rb', branch: 'deployments-preview'
And then execute:
$ bundle
Require github tasks and set `github_access_token`:
```ruby
# Capfile
require 'capistrano/github'
```
```ruby
# deploy.rb
set :github_access_token, '89c3be3d1f917b6ccf5e2c141dbc403f57bc140c'
```
You can get your personal GH token [here](https://github.com/settings/applications)
## Usage
New deployment record will be created automatically on each `cap deploy` run.
To see the list of deployments, execute
```bash
cap production github:deployments
```
## Contributing
1. Fork it ( http://github.com/<my-github-username>/capistrano-github/fork )
2. Create your feature branch (`git checkout -b my-new-feature`)
3. Commit your changes (`git commit -am 'Add some feature'`)
4. Push to the branch (`git push origin my-new-feature`)
5. Create new Pull Request | 2 | 0.043478 | 1 | 1 |
b84de1583bf0f922169578d40cecdf126688f8d8 | popup.css | popup.css | body {
width:450px;
text-align: left;
background-color: lightblue;
}
h1 {
text-align: center;
}
ol {
font-size: 14px;
}
ul {
font-size: 14px;
}
p {
font-size: 20px;
}
.button {
width: 150px;
text-align: center;
font-size: 16px;
}
| /*
Colors used in icon:
#1977BA
#6FCBD4
#FCC1B6
*/
body {
width:450px;
text-align: left;
background-color: lightblue;
}
h1 {
text-align: center;
}
ol {
font-size: 14px;
}
ul {
font-size: 14px;
}
p {
font-size: 20px;
}
.button {
width: 150px;
text-align: center;
font-size: 16px;
}
| Add comment with colors used in icon to CSS | Add comment with colors used in icon to CSS
| CSS | agpl-3.0 | realityfabric/Tumblr-Batch-Block,realityfabric/Tumblr-Batch-Block | css | ## Code Before:
body {
width:450px;
text-align: left;
background-color: lightblue;
}
h1 {
text-align: center;
}
ol {
font-size: 14px;
}
ul {
font-size: 14px;
}
p {
font-size: 20px;
}
.button {
width: 150px;
text-align: center;
font-size: 16px;
}
## Instruction:
Add comment with colors used in icon to CSS
## Code After:
/*
Colors used in icon:
#1977BA
#6FCBD4
#FCC1B6
*/
body {
width:450px;
text-align: left;
background-color: lightblue;
}
h1 {
text-align: center;
}
ol {
font-size: 14px;
}
ul {
font-size: 14px;
}
p {
font-size: 20px;
}
.button {
width: 150px;
text-align: center;
font-size: 16px;
}
| + /*
+ Colors used in icon:
+ #1977BA
+ #6FCBD4
+ #FCC1B6
+ */
+
body {
width:450px;
text-align: left;
background-color: lightblue;
}
h1 {
text-align: center;
}
ol {
font-size: 14px;
}
ul {
font-size: 14px;
}
p {
font-size: 20px;
}
.button {
width: 150px;
text-align: center;
font-size: 16px;
} | 7 | 0.259259 | 7 | 0 |
2481ecf42e612498453e5925b4d1d8d33d70e10d | lib/dirty_history.rb | lib/dirty_history.rb | require "dirty_history/version"
require "active_record"
$LOAD_PATH.unshift(File.dirname(__FILE__))
autoload :DirtyHistoryRecord, "dirty_history/dirty_history_record"
require "dirty_history/dirty_history_mixin"
$LOAD_PATH.shift
| require "dirty_history/version"
require "active_record"
$LOAD_PATH.unshift(File.dirname(__FILE__))
autoload :DirtyHistoryRecord, "dirty_history/dirty_history_record"
require "dirty_history/dirty_history_mixin"
ActiveRecord::Base.send :include, DirtyHistory::Mixin
$LOAD_PATH.shift
| Include into AR outside of mixin file so it can be tested | Include into AR outside of mixin file so it can be tested | Ruby | mit | GAV1N/dirty_history | ruby | ## Code Before:
require "dirty_history/version"
require "active_record"
$LOAD_PATH.unshift(File.dirname(__FILE__))
autoload :DirtyHistoryRecord, "dirty_history/dirty_history_record"
require "dirty_history/dirty_history_mixin"
$LOAD_PATH.shift
## Instruction:
Include into AR outside of mixin file so it can be tested
## Code After:
require "dirty_history/version"
require "active_record"
$LOAD_PATH.unshift(File.dirname(__FILE__))
autoload :DirtyHistoryRecord, "dirty_history/dirty_history_record"
require "dirty_history/dirty_history_mixin"
ActiveRecord::Base.send :include, DirtyHistory::Mixin
$LOAD_PATH.shift
| require "dirty_history/version"
require "active_record"
$LOAD_PATH.unshift(File.dirname(__FILE__))
autoload :DirtyHistoryRecord, "dirty_history/dirty_history_record"
require "dirty_history/dirty_history_mixin"
+ ActiveRecord::Base.send :include, DirtyHistory::Mixin
+
$LOAD_PATH.shift
| 2 | 0.2 | 2 | 0 |
0a73186344c871b6e30e1a892d7b497600ef4600 | bin/run-demos.sh | bin/run-demos.sh | set -e
# run demos with job_hazard.ini and job_risk.ini
for demo_dir in $(find "$1" -type d | sort); do
if [ -f $demo_dir/job_hazard.ini ]; then
python -m openquake.commands engine --run $demo_dir/job_hazard.ini
python -m openquake.commands engine --run $demo_dir/job_risk.ini --hc -1
fi
done
# run the other demos
if [ ! -d "$1" ]; then
echo "Please specify the location of the folder containing the demos. Aborting." >&2
exit 1
fi
for ini in $(find $1 -name job.ini | sort); do
python -m openquake.commands engine --run $ini
done
# do something with the generated data; -2 is LogicTreeCase3ClassicalPSHA
python -m openquake.commands export hcurves-rlzs -2 --exports hdf5 -d /tmp
python -m openquake.commands engine --lhc
MPLBACKEND=Agg python -m openquake.commands plot -2
MPLBACKEND=Agg python -m openquake.commands plot_uhs -2
# fake a wrong calculation still in executing status
python -m openquake.commands db set_status 1 executing
# repeat the failed/executing calculation, which is useful for QGIS
python -m openquake.commands engine --run $1/hazard/AreaSourceClassicalPSHA/job.ini
| set -e
# run demos with job_hazard.ini and job_risk.ini
for demo_dir in $(find "$1" -type d | sort); do
if [ -f $demo_dir/job_hazard.ini ]; then
python -m openquake.commands engine --run $demo_dir/job_hazard.ini
python -m openquake.commands engine --run $demo_dir/job_risk.ini --hc -1
fi
done
# run the other demos
if [ ! -d "$1" ]; then
echo "Please specify the location of the folder containing the demos. Aborting." >&2
exit 1
fi
for ini in $(find $1 -name job.ini | sort); do
python -m openquake.commands engine --run $ini
done
# do something with the generated data; -2 is LogicTreeCase3ClassicalPSHA
python -m openquake.commands export hcurves-rlzs -2 --exports hdf5 -d /tmp
python -m openquake.commands engine --lhc
MPLBACKEND=Agg python -m openquake.commands plot -2
MPLBACKEND=Agg python -m openquake.commands plot_uhs -2
# fake a wrong calculation still in executing status
python -m openquake.commands db set_status 1 executing
# repeat the failed/executing calculation, which is useful for QGIS
python -m openquake.commands engine --run $1/hazard/AreaSourceClassicalPSHA/job.ini
# display the calculations
python -m openquake.commands db find %
# build an HTML report
python -m openquake.engine --make-html-report today
| Build a report after running the demos | Build a report after running the demos
Former-commit-id: 4a9fc6c1fcb798a5f75f9a6549fbf65af0e728b8 | Shell | agpl-3.0 | gem/oq-engine,gem/oq-engine,gem/oq-engine,gem/oq-engine,gem/oq-engine | shell | ## Code Before:
set -e
# run demos with job_hazard.ini and job_risk.ini
for demo_dir in $(find "$1" -type d | sort); do
if [ -f $demo_dir/job_hazard.ini ]; then
python -m openquake.commands engine --run $demo_dir/job_hazard.ini
python -m openquake.commands engine --run $demo_dir/job_risk.ini --hc -1
fi
done
# run the other demos
if [ ! -d "$1" ]; then
echo "Please specify the location of the folder containing the demos. Aborting." >&2
exit 1
fi
for ini in $(find $1 -name job.ini | sort); do
python -m openquake.commands engine --run $ini
done
# do something with the generated data; -2 is LogicTreeCase3ClassicalPSHA
python -m openquake.commands export hcurves-rlzs -2 --exports hdf5 -d /tmp
python -m openquake.commands engine --lhc
MPLBACKEND=Agg python -m openquake.commands plot -2
MPLBACKEND=Agg python -m openquake.commands plot_uhs -2
# fake a wrong calculation still in executing status
python -m openquake.commands db set_status 1 executing
# repeat the failed/executing calculation, which is useful for QGIS
python -m openquake.commands engine --run $1/hazard/AreaSourceClassicalPSHA/job.ini
## Instruction:
Build a report after running the demos
Former-commit-id: 4a9fc6c1fcb798a5f75f9a6549fbf65af0e728b8
## Code After:
set -e
# run demos with job_hazard.ini and job_risk.ini
for demo_dir in $(find "$1" -type d | sort); do
if [ -f $demo_dir/job_hazard.ini ]; then
python -m openquake.commands engine --run $demo_dir/job_hazard.ini
python -m openquake.commands engine --run $demo_dir/job_risk.ini --hc -1
fi
done
# run the other demos
if [ ! -d "$1" ]; then
echo "Please specify the location of the folder containing the demos. Aborting." >&2
exit 1
fi
for ini in $(find $1 -name job.ini | sort); do
python -m openquake.commands engine --run $ini
done
# do something with the generated data; -2 is LogicTreeCase3ClassicalPSHA
python -m openquake.commands export hcurves-rlzs -2 --exports hdf5 -d /tmp
python -m openquake.commands engine --lhc
MPLBACKEND=Agg python -m openquake.commands plot -2
MPLBACKEND=Agg python -m openquake.commands plot_uhs -2
# fake a wrong calculation still in executing status
python -m openquake.commands db set_status 1 executing
# repeat the failed/executing calculation, which is useful for QGIS
python -m openquake.commands engine --run $1/hazard/AreaSourceClassicalPSHA/job.ini
# display the calculations
python -m openquake.commands db find %
# build an HTML report
python -m openquake.engine --make-html-report today
| set -e
# run demos with job_hazard.ini and job_risk.ini
for demo_dir in $(find "$1" -type d | sort); do
if [ -f $demo_dir/job_hazard.ini ]; then
python -m openquake.commands engine --run $demo_dir/job_hazard.ini
python -m openquake.commands engine --run $demo_dir/job_risk.ini --hc -1
fi
done
# run the other demos
if [ ! -d "$1" ]; then
echo "Please specify the location of the folder containing the demos. Aborting." >&2
exit 1
fi
for ini in $(find $1 -name job.ini | sort); do
python -m openquake.commands engine --run $ini
done
# do something with the generated data; -2 is LogicTreeCase3ClassicalPSHA
python -m openquake.commands export hcurves-rlzs -2 --exports hdf5 -d /tmp
python -m openquake.commands engine --lhc
MPLBACKEND=Agg python -m openquake.commands plot -2
MPLBACKEND=Agg python -m openquake.commands plot_uhs -2
# fake a wrong calculation still in executing status
python -m openquake.commands db set_status 1 executing
# repeat the failed/executing calculation, which is useful for QGIS
python -m openquake.commands engine --run $1/hazard/AreaSourceClassicalPSHA/job.ini
+
+
+ # display the calculations
+ python -m openquake.commands db find %
+
+ # build an HTML report
+ python -m openquake.engine --make-html-report today | 7 | 0.25 | 7 | 0 |
ec3582492bee170a3569555a02a1ae5506fd47bc | plugins/strings.js | plugins/strings.js | 'use strict'
exports.init = (bot, prefs) => {
bot.register.command('echo', {
format: true,
fn: message => {
var response = message.text.replace('/echo', "");
if (!response)
return "Supply some text to echo in markdown. (For example: <code>/echo *bold text*</code>.)";
bot.api.sendMessage(message.chat.id, response, {
parseMode: 'markdown',
reply: (message.reply_to_message || message).message_id
}).catch(e =>
e.description.includes('entity')
? message.tag(response)
: message.error(e.error_code, e.description)
);
}
});
}
| 'use strict'
const {unformat} = require('./unformat')
exports.init = (bot, prefs) => {
bot.register.command('echo', {
format: true,
fn: message => {
if (!message.args) {
return "Supply some text to echo in markdown. (For example: <code>/echo *bold text*</code>.)";
}
const cut = message.entities[0].length + 1;
// we run unformat with the command intact, to not mess up entity offsets,
// only after that we get rid of command
let response = unformat(message).slice(cut);
bot.api.sendMessage(message.chat.id, response, {
parseMode: 'markdown',
reply: (message.reply_to_message || message).message_id
}).catch(e =>
e.description.includes('entity')
? message.tag(response)
: message.error(e.error_code, e.description)
);
}
});
}
| Add support for formated text to /echo | Add support for formated text to /echo
Runs unformat aka tomd before echoing to replace formatting
with markdown producing that formatting,
which allows the bot to reproduce that formatting.
| JavaScript | bsd-3-clause | lkd70/the-engine | javascript | ## Code Before:
'use strict'
exports.init = (bot, prefs) => {
bot.register.command('echo', {
format: true,
fn: message => {
var response = message.text.replace('/echo', "");
if (!response)
return "Supply some text to echo in markdown. (For example: <code>/echo *bold text*</code>.)";
bot.api.sendMessage(message.chat.id, response, {
parseMode: 'markdown',
reply: (message.reply_to_message || message).message_id
}).catch(e =>
e.description.includes('entity')
? message.tag(response)
: message.error(e.error_code, e.description)
);
}
});
}
## Instruction:
Add support for formated text to /echo
Runs unformat aka tomd before echoing to replace formatting
with markdown producing that formatting,
which allows the bot to reproduce that formatting.
## Code After:
'use strict'
const {unformat} = require('./unformat')
exports.init = (bot, prefs) => {
bot.register.command('echo', {
format: true,
fn: message => {
if (!message.args) {
return "Supply some text to echo in markdown. (For example: <code>/echo *bold text*</code>.)";
}
const cut = message.entities[0].length + 1;
// we run unformat with the command intact, to not mess up entity offsets,
// only after that we get rid of command
let response = unformat(message).slice(cut);
bot.api.sendMessage(message.chat.id, response, {
parseMode: 'markdown',
reply: (message.reply_to_message || message).message_id
}).catch(e =>
e.description.includes('entity')
? message.tag(response)
: message.error(e.error_code, e.description)
);
}
});
}
| 'use strict'
+
+ const {unformat} = require('./unformat')
exports.init = (bot, prefs) => {
bot.register.command('echo', {
format: true,
fn: message => {
- var response = message.text.replace('/echo', "");
+ if (!message.args) {
+ return "Supply some text to echo in markdown. (For example: <code>/echo *bold text*</code>.)";
+ }
- if (!response)
- return "Supply some text to echo in markdown. (For example: <code>/echo *bold text*</code>.)";
+ const cut = message.entities[0].length + 1;
+
+ // we run unformat with the command intact, to not mess up entity offsets,
+ // only after that we get rid of command
+ let response = unformat(message).slice(cut);
bot.api.sendMessage(message.chat.id, response, {
parseMode: 'markdown',
reply: (message.reply_to_message || message).message_id
}).catch(e =>
e.description.includes('entity')
? message.tag(response)
: message.error(e.error_code, e.description)
);
}
});
} | 13 | 0.464286 | 10 | 3 |
fcb8b6053f2d03966d264adc011773d0e1a04ebf | app/models/user.rb | app/models/user.rb | class User
include Mongoid::Document
# Include default devise modules. Others available are: :token_authenticatable, :encryptable,
# :confirmable, :lockable, :timeoutable, :database_authenticatable, :registerable, :recoverable,
# :rememberable, :trackable, :validatable and :omniauthable
devise :registerable, :trackable
field :contact, :type => Boolean
validates_presence_of :email, :contact
# validates_uniqueness_of :email, :case_sensitive => false
attr_accessible :email, :contact
end
| class User
include Mongoid::Document
# Include default devise modules. Others available are: :token_authenticatable, :encryptable,
# :confirmable, :lockable, :timeoutable, :database_authenticatable, :registerable, :recoverable,
# :rememberable, :trackable, :validatable and :omniauthable
devise :registerable, :trackable
field :name
field :contact, :type => Boolean
validates_presence_of :email, :contact, :name
validates_uniqueness_of :email, :case_sensitive => false
attr_accessible :name, :email, :contact
end
| Add name to User model. | Add name to User model.
| Ruby | epl-1.0 | prathamesh-sonpatki/frontend,prathamesh-sonpatki/frontend,RayRutjes/frontend,circleci/frontend,circleci/frontend,RayRutjes/frontend,circleci/frontend | ruby | ## Code Before:
class User
include Mongoid::Document
# Include default devise modules. Others available are: :token_authenticatable, :encryptable,
# :confirmable, :lockable, :timeoutable, :database_authenticatable, :registerable, :recoverable,
# :rememberable, :trackable, :validatable and :omniauthable
devise :registerable, :trackable
field :contact, :type => Boolean
validates_presence_of :email, :contact
# validates_uniqueness_of :email, :case_sensitive => false
attr_accessible :email, :contact
end
## Instruction:
Add name to User model.
## Code After:
class User
include Mongoid::Document
# Include default devise modules. Others available are: :token_authenticatable, :encryptable,
# :confirmable, :lockable, :timeoutable, :database_authenticatable, :registerable, :recoverable,
# :rememberable, :trackable, :validatable and :omniauthable
devise :registerable, :trackable
field :name
field :contact, :type => Boolean
validates_presence_of :email, :contact, :name
validates_uniqueness_of :email, :case_sensitive => false
attr_accessible :name, :email, :contact
end
| class User
include Mongoid::Document
# Include default devise modules. Others available are: :token_authenticatable, :encryptable,
# :confirmable, :lockable, :timeoutable, :database_authenticatable, :registerable, :recoverable,
# :rememberable, :trackable, :validatable and :omniauthable
devise :registerable, :trackable
+ field :name
field :contact, :type => Boolean
- validates_presence_of :email, :contact
+ validates_presence_of :email, :contact, :name
? +++++++
- # validates_uniqueness_of :email, :case_sensitive => false
? ---
+ validates_uniqueness_of :email, :case_sensitive => false
- attr_accessible :email, :contact
+ attr_accessible :name, :email, :contact
? +++++++
end | 7 | 0.538462 | 4 | 3 |
2c85bd944cd6f5166c9b2d9562e6015732c66a8e | job_runner/apps/job_runner/static/job_runner/less/job_runner.less | job_runner/apps/job_runner/static/job_runner/less/job_runner.less | h4 {
margin-bottom: 30px;
text-align: center;
}
h5 {
margin: 0px;
}
.job-run, .job {
padding: 10px;
border-radius: 15px;
margin-bottom: 15px;
ul {
list-style: none;
margin-left: 0px;
margin-bottom: 0px;
}
}
.job {
background: #d9edf7;
}
.job-scheduled, .job-in-queue, .job-started {
background: #d9edf7;
}
.job-completed {
background: #dff0d8;
}
.job-completed-with-error {
background: #f2dede;
}
}
.modal {
width: 900px;
margin-left: -450px;
}
| h4 {
margin-bottom: 30px;
text-align: center;
}
h5 {
margin: 0px;
}
.job-run, .job {
padding: 10px;
border-radius: 15px;
margin-bottom: 15px;
h5 {
height: 40px;
}
ul {
list-style: none;
margin-left: 0px;
margin-bottom: 0px;
}
}
.job {
background: #d9edf7;
}
.job-scheduled, .job-in-queue, .job-started {
background: #d9edf7;
}
.job-completed {
background: #dff0d8;
}
.job-completed-with-error {
background: #f2dede;
}
}
.modal {
width: 900px;
margin-left: -450px;
}
| Make space for two title lines. | Make space for two title lines.
| Less | bsd-3-clause | spilgames/job-runner,spilgames/job-runner | less | ## Code Before:
h4 {
margin-bottom: 30px;
text-align: center;
}
h5 {
margin: 0px;
}
.job-run, .job {
padding: 10px;
border-radius: 15px;
margin-bottom: 15px;
ul {
list-style: none;
margin-left: 0px;
margin-bottom: 0px;
}
}
.job {
background: #d9edf7;
}
.job-scheduled, .job-in-queue, .job-started {
background: #d9edf7;
}
.job-completed {
background: #dff0d8;
}
.job-completed-with-error {
background: #f2dede;
}
}
.modal {
width: 900px;
margin-left: -450px;
}
## Instruction:
Make space for two title lines.
## Code After:
h4 {
margin-bottom: 30px;
text-align: center;
}
h5 {
margin: 0px;
}
.job-run, .job {
padding: 10px;
border-radius: 15px;
margin-bottom: 15px;
h5 {
height: 40px;
}
ul {
list-style: none;
margin-left: 0px;
margin-bottom: 0px;
}
}
.job {
background: #d9edf7;
}
.job-scheduled, .job-in-queue, .job-started {
background: #d9edf7;
}
.job-completed {
background: #dff0d8;
}
.job-completed-with-error {
background: #f2dede;
}
}
.modal {
width: 900px;
margin-left: -450px;
}
| h4 {
margin-bottom: 30px;
text-align: center;
}
h5 {
margin: 0px;
}
.job-run, .job {
padding: 10px;
border-radius: 15px;
margin-bottom: 15px;
+
+ h5 {
+ height: 40px;
+ }
ul {
list-style: none;
margin-left: 0px;
margin-bottom: 0px;
}
}
.job {
background: #d9edf7;
}
.job-scheduled, .job-in-queue, .job-started {
background: #d9edf7;
}
.job-completed {
background: #dff0d8;
}
.job-completed-with-error {
background: #f2dede;
}
}
.modal {
width: 900px;
margin-left: -450px;
} | 4 | 0.093023 | 4 | 0 |
7369be9e24c15ac04a2e44a29c3ac275303c4f38 | README.md | README.md | CCLRequestReplay
################
[](https://travis-ci.org/cocodelabs/CCLRequestReplay)
CCLRequestReplay is greatly inspired by VCRURLConnection, however it supports
creating a recording purely from code instead of having to actually record the
requests manually and store them in a json file.
*Very alpha, work in progress. New features coming by the hour/day.*
|
[](https://travis-ci.org/cocodelabs/CCLRequestReplay)
CCLRequestReplay is greatly inspired by VCRURLConnection, however it supports
creating a recording purely from code instead of having to actually record the
requests manually and store them in a json file.
## Recording
```objective-c
CCLRequestReplayManager *manager = [[CCLRequestReplayManager alloc] init];
[manager record];
/* Make an NSURLConnection */
[manager stopRecording];
```
## Re-playing
```objective-c
[manager replay];
/* Make an NSURLConnection, it will be served from the manager */
[manager stopReplay];
```
| Update readme to include recording and replay | Update readme to include recording and replay
Closes #2
Closes #3
| Markdown | bsd-2-clause | alessandrostone/CCLRequestReplay,cocodelabs/CCLRequestReplay | markdown | ## Code Before:
CCLRequestReplay
################
[](https://travis-ci.org/cocodelabs/CCLRequestReplay)
CCLRequestReplay is greatly inspired by VCRURLConnection, however it supports
creating a recording purely from code instead of having to actually record the
requests manually and store them in a json file.
*Very alpha, work in progress. New features coming by the hour/day.*
## Instruction:
Update readme to include recording and replay
Closes #2
Closes #3
## Code After:
[](https://travis-ci.org/cocodelabs/CCLRequestReplay)
CCLRequestReplay is greatly inspired by VCRURLConnection, however it supports
creating a recording purely from code instead of having to actually record the
requests manually and store them in a json file.
## Recording
```objective-c
CCLRequestReplayManager *manager = [[CCLRequestReplayManager alloc] init];
[manager record];
/* Make an NSURLConnection */
[manager stopRecording];
```
## Re-playing
```objective-c
[manager replay];
/* Make an NSURLConnection, it will be served from the manager */
[manager stopReplay];
```
| - CCLRequestReplay
- ################
[](https://travis-ci.org/cocodelabs/CCLRequestReplay)
CCLRequestReplay is greatly inspired by VCRURLConnection, however it supports
creating a recording purely from code instead of having to actually record the
requests manually and store them in a json file.
- *Very alpha, work in progress. New features coming by the hour/day.*
+ ## Recording
+ ```objective-c
+ CCLRequestReplayManager *manager = [[CCLRequestReplayManager alloc] init];
+ [manager record];
+
+ /* Make an NSURLConnection */
+
+ [manager stopRecording];
+ ```
+
+ ## Re-playing
+
+ ```objective-c
+ [manager replay];
+
+ /* Make an NSURLConnection, it will be served from the manager */
+
+ [manager stopReplay];
+ ```
+ | 23 | 2.090909 | 20 | 3 |
46f7b28b088a43e44afbff7cb69e8f1614575920 | app/search_builders/deposit_search_builder.rb | app/search_builders/deposit_search_builder.rb | class DepositSearchBuilder < CurationConcerns::SearchBuilder
# includes the depositor_facet to get information on deposits.
# use caution when combining this with other searches as it sets the rows to zero to just get the facet information
# @param solr_parameters the current solr parameters
def include_depositor_facet(solr_parameters)
solr_parameters[:"facet.field"].concat([Solrizer.solr_name("depositor", :symbol)])
# defualt facet limit is 10, which will only show the top 10 users not all users deposits
solr_parameters[:"facet.limit"] = ::User.count
# only get file information
solr_parameters[:fq] = "has_model_ssim:GenericWork"
# we only want the facte counts not the actual data
solr_parameters[:rows] = 0
end
end
| class DepositSearchBuilder < ::SearchBuilder
# includes the depositor_facet to get information on deposits.
# use caution when combining this with other searches as it sets the rows to zero to just get the facet information
# @param solr_parameters the current solr parameters
def include_depositor_facet(solr_parameters)
solr_parameters[:"facet.field"].concat([Solrizer.solr_name("depositor", :symbol)])
# defualt facet limit is 10, which will only show the top 10 users not all users deposits
solr_parameters[:"facet.limit"] = ::User.count
# only get file information
solr_parameters[:fq] = "has_model_ssim:GenericWork"
# we only want the facte counts not the actual data
solr_parameters[:rows] = 0
end
end
| Stop using the deprecated CurationConcerns::SearchBuilder. | Stop using the deprecated CurationConcerns::SearchBuilder.
Fixes #1911
| Ruby | apache-2.0 | samvera/hyrax,samvera/hyrax,samvera/hyrax,samvera/hyrax | ruby | ## Code Before:
class DepositSearchBuilder < CurationConcerns::SearchBuilder
# includes the depositor_facet to get information on deposits.
# use caution when combining this with other searches as it sets the rows to zero to just get the facet information
# @param solr_parameters the current solr parameters
def include_depositor_facet(solr_parameters)
solr_parameters[:"facet.field"].concat([Solrizer.solr_name("depositor", :symbol)])
# defualt facet limit is 10, which will only show the top 10 users not all users deposits
solr_parameters[:"facet.limit"] = ::User.count
# only get file information
solr_parameters[:fq] = "has_model_ssim:GenericWork"
# we only want the facte counts not the actual data
solr_parameters[:rows] = 0
end
end
## Instruction:
Stop using the deprecated CurationConcerns::SearchBuilder.
Fixes #1911
## Code After:
class DepositSearchBuilder < ::SearchBuilder
# includes the depositor_facet to get information on deposits.
# use caution when combining this with other searches as it sets the rows to zero to just get the facet information
# @param solr_parameters the current solr parameters
def include_depositor_facet(solr_parameters)
solr_parameters[:"facet.field"].concat([Solrizer.solr_name("depositor", :symbol)])
# defualt facet limit is 10, which will only show the top 10 users not all users deposits
solr_parameters[:"facet.limit"] = ::User.count
# only get file information
solr_parameters[:fq] = "has_model_ssim:GenericWork"
# we only want the facte counts not the actual data
solr_parameters[:rows] = 0
end
end
| - class DepositSearchBuilder < CurationConcerns::SearchBuilder
? ----------------
+ class DepositSearchBuilder < ::SearchBuilder
# includes the depositor_facet to get information on deposits.
# use caution when combining this with other searches as it sets the rows to zero to just get the facet information
# @param solr_parameters the current solr parameters
def include_depositor_facet(solr_parameters)
solr_parameters[:"facet.field"].concat([Solrizer.solr_name("depositor", :symbol)])
# defualt facet limit is 10, which will only show the top 10 users not all users deposits
solr_parameters[:"facet.limit"] = ::User.count
# only get file information
solr_parameters[:fq] = "has_model_ssim:GenericWork"
# we only want the facte counts not the actual data
solr_parameters[:rows] = 0
end
end | 2 | 0.117647 | 1 | 1 |
d8b3c2fe61944b088fff4050da0a16185b10669c | lib/node_modules/@stdlib/math/base/special/gamma-lanczos-sum-expg-scaled/docs/repl.txt | lib/node_modules/@stdlib/math/base/special/gamma-lanczos-sum-expg-scaled/docs/repl.txt |
{{alias}}( x )
Calculates the Lanczos sum for the approximation of the gamma function
(scaled by `exp(-g)`, where `g` is {{alias:@stdlib/math/constants/float64-gamma-lanczos-g}}.
Parameters
----------
x: number
Input value.
Returns
-------
y: number
Scaled Lanczos sum.
Examples
--------
> var y = {{alias}}( 4.0 )
~0.018
> y = {{alias}}( -1.5 )
~25.337
> y = {{alias}}( -0.5 )
~-12.911
> y = {{alias}}( 0.5 )
~1.772
> y = {{alias}}( 0.0 )
infinity
> y = {{alias}}( NaN )
NaN
See Also
--------
|
{{alias}}( x )
Calculates the scaled Lanczos sum for the approximation of the gamma
function.
Parameters
----------
x: number
Input value.
Returns
-------
y: number
Scaled Lanczos sum.
Examples
--------
> var y = {{alias}}( 4.0 )
~0.018
> y = {{alias}}( -1.5 )
~25.337
> y = {{alias}}( -0.5 )
~-12.911
> y = {{alias}}( 0.5 )
~1.772
> y = {{alias}}( 0.0 )
infinity
> y = {{alias}}( NaN )
NaN
See Also
--------
| Remove implementation details from REPL text | Remove implementation details from REPL text
| Text | apache-2.0 | stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib | text | ## Code Before:
{{alias}}( x )
Calculates the Lanczos sum for the approximation of the gamma function
(scaled by `exp(-g)`, where `g` is {{alias:@stdlib/math/constants/float64-gamma-lanczos-g}}.
Parameters
----------
x: number
Input value.
Returns
-------
y: number
Scaled Lanczos sum.
Examples
--------
> var y = {{alias}}( 4.0 )
~0.018
> y = {{alias}}( -1.5 )
~25.337
> y = {{alias}}( -0.5 )
~-12.911
> y = {{alias}}( 0.5 )
~1.772
> y = {{alias}}( 0.0 )
infinity
> y = {{alias}}( NaN )
NaN
See Also
--------
## Instruction:
Remove implementation details from REPL text
## Code After:
{{alias}}( x )
Calculates the scaled Lanczos sum for the approximation of the gamma
function.
Parameters
----------
x: number
Input value.
Returns
-------
y: number
Scaled Lanczos sum.
Examples
--------
> var y = {{alias}}( 4.0 )
~0.018
> y = {{alias}}( -1.5 )
~25.337
> y = {{alias}}( -0.5 )
~-12.911
> y = {{alias}}( 0.5 )
~1.772
> y = {{alias}}( 0.0 )
infinity
> y = {{alias}}( NaN )
NaN
See Also
--------
|
{{alias}}( x )
- Calculates the Lanczos sum for the approximation of the gamma function
? ---------
+ Calculates the scaled Lanczos sum for the approximation of the gamma
? +++++++
- (scaled by `exp(-g)`, where `g` is {{alias:@stdlib/math/constants/float64-gamma-lanczos-g}}.
+ function.
Parameters
----------
x: number
Input value.
Returns
-------
y: number
Scaled Lanczos sum.
Examples
--------
> var y = {{alias}}( 4.0 )
~0.018
> y = {{alias}}( -1.5 )
~25.337
> y = {{alias}}( -0.5 )
~-12.911
> y = {{alias}}( 0.5 )
~1.772
> y = {{alias}}( 0.0 )
infinity
> y = {{alias}}( NaN )
NaN
See Also
--------
| 4 | 0.121212 | 2 | 2 |
ebd7a5a26e1238ca296486ec112a5e09852b7057 | metadata/com.tomer.dbz.widget.txt | metadata/com.tomer.dbz.widget.txt | Categories:Theming
License:GPL-3.0
Author Name:Tomer Rosenfeld
Author Email:tomer@tomerrosenfeld.com
Web Site:
Source Code:https://github.com/rosenpin/DBZ-Battery-Widget
Issue Tracker:https://github.com/rosenpin/DBZ-Battery-Widget/issues
Donate:https://www.paypal.me/rosenfeldtomer/2usd
Auto Name:Dragon Battery Widget
Summary:View your battery status with your favorite DBZ character
Description:
Choose your favorite character from Dragon Ball Z/Dragon Ball Super and view
your battery status, data and time in a meaningful way
.
Repo Type:git
Repo:https://github.com/rosenpin/DBZ-Battery-Widget.git
Build:1.5,4
commit=1.5
subdir=app
gradle=yes
Auto Update Mode:Version %v
Update Check Mode:Tags ^[0-9\.]+$
Current Version:1.5
Current Version Code:4
| Categories:Theming
License:GPL-3.0
Author Name:Tomer Rosenfeld
Author Email:tomer@tomerrosenfeld.com
Web Site:
Source Code:https://github.com/rosenpin/DBZ-Battery-Widget
Issue Tracker:https://github.com/rosenpin/DBZ-Battery-Widget/issues
Donate:https://www.paypal.me/rosenfeldtomer/2usd
Auto Name:DBZ Battery Widget
Summary:View your battery status with your favorite DBZ character
Description:
Choose your favorite character from Dragon Ball Z/Dragon Ball Super and view
your battery status, data and time in a meaningful way
.
Repo Type:git
Repo:https://github.com/rosenpin/DBZ-Battery-Widget.git
Build:1.5,4
commit=1.5
subdir=app
gradle=yes
Build:1.6,5
commit=1.6
subdir=app
gradle=yes
Auto Update Mode:Version %v
Update Check Mode:Tags ^[0-9\.]+$
Current Version:1.6
Current Version Code:5
| Update DBZ Battery Widget to 1.6 (5) | Update DBZ Battery Widget to 1.6 (5)
| Text | agpl-3.0 | f-droid/fdroiddata,f-droid/fdroiddata,f-droid/fdroid-data | text | ## Code Before:
Categories:Theming
License:GPL-3.0
Author Name:Tomer Rosenfeld
Author Email:tomer@tomerrosenfeld.com
Web Site:
Source Code:https://github.com/rosenpin/DBZ-Battery-Widget
Issue Tracker:https://github.com/rosenpin/DBZ-Battery-Widget/issues
Donate:https://www.paypal.me/rosenfeldtomer/2usd
Auto Name:Dragon Battery Widget
Summary:View your battery status with your favorite DBZ character
Description:
Choose your favorite character from Dragon Ball Z/Dragon Ball Super and view
your battery status, data and time in a meaningful way
.
Repo Type:git
Repo:https://github.com/rosenpin/DBZ-Battery-Widget.git
Build:1.5,4
commit=1.5
subdir=app
gradle=yes
Auto Update Mode:Version %v
Update Check Mode:Tags ^[0-9\.]+$
Current Version:1.5
Current Version Code:4
## Instruction:
Update DBZ Battery Widget to 1.6 (5)
## Code After:
Categories:Theming
License:GPL-3.0
Author Name:Tomer Rosenfeld
Author Email:tomer@tomerrosenfeld.com
Web Site:
Source Code:https://github.com/rosenpin/DBZ-Battery-Widget
Issue Tracker:https://github.com/rosenpin/DBZ-Battery-Widget/issues
Donate:https://www.paypal.me/rosenfeldtomer/2usd
Auto Name:DBZ Battery Widget
Summary:View your battery status with your favorite DBZ character
Description:
Choose your favorite character from Dragon Ball Z/Dragon Ball Super and view
your battery status, data and time in a meaningful way
.
Repo Type:git
Repo:https://github.com/rosenpin/DBZ-Battery-Widget.git
Build:1.5,4
commit=1.5
subdir=app
gradle=yes
Build:1.6,5
commit=1.6
subdir=app
gradle=yes
Auto Update Mode:Version %v
Update Check Mode:Tags ^[0-9\.]+$
Current Version:1.6
Current Version Code:5
| Categories:Theming
License:GPL-3.0
Author Name:Tomer Rosenfeld
Author Email:tomer@tomerrosenfeld.com
Web Site:
Source Code:https://github.com/rosenpin/DBZ-Battery-Widget
Issue Tracker:https://github.com/rosenpin/DBZ-Battery-Widget/issues
Donate:https://www.paypal.me/rosenfeldtomer/2usd
- Auto Name:Dragon Battery Widget
? ^^^^^
+ Auto Name:DBZ Battery Widget
? ^^
Summary:View your battery status with your favorite DBZ character
Description:
Choose your favorite character from Dragon Ball Z/Dragon Ball Super and view
your battery status, data and time in a meaningful way
.
Repo Type:git
Repo:https://github.com/rosenpin/DBZ-Battery-Widget.git
Build:1.5,4
commit=1.5
subdir=app
gradle=yes
+ Build:1.6,5
+ commit=1.6
+ subdir=app
+ gradle=yes
+
Auto Update Mode:Version %v
Update Check Mode:Tags ^[0-9\.]+$
- Current Version:1.5
? ^
+ Current Version:1.6
? ^
- Current Version Code:4
? ^
+ Current Version Code:5
? ^
| 11 | 0.392857 | 8 | 3 |
5bc06a719fc7b6e7abccb226a4a7c35943f1600a | Sources/Danger/CliArgsParser.swift | Sources/Danger/CliArgsParser.swift | //
// CliArgsParser.swift
// Danger
//
// Created by Franco Meloni on 12/11/2018.
//
import Foundation
public final class CliArgsParser {
public init() {}
public func parseCli(fromData data: Data) -> CliArgs? {
if let dictionary = try? JSONSerialization.jsonObject(with: data, options: .allowFragments) as? NSDictionary,
let cliArgsDictionary = dictionary?.value(forKeyPath: "danger.settings.cliArgs") as? [String:Any] {
return CliArgs(dictionary: cliArgsDictionary)
}
return nil
}
}
| //
// CliArgsParser.swift
// Danger
//
// Created by Franco Meloni on 12/11/2018.
//
import Foundation
public final class CliArgsParser {
public init() {}
public func parseCli(fromData data: Data) -> CliArgs? {
if let dictionary = try? JSONSerialization.jsonObject(with: data, options: .allowFragments) as? [String:Any],
let danger = dictionary?["danger"] as? [String:Any],
let settings = danger["settings"] as? [String:Any],
let cliArgsDictionary = settings["cliArgs"] as? [String:Any] {
return CliArgs(dictionary: cliArgsDictionary)
}
return nil
}
}
| Remove value(forKeyPath:) method call because is not supported by Linux | Remove value(forKeyPath:) method call because is not supported by Linux
| Swift | mit | danger/danger-swift | swift | ## Code Before:
//
// CliArgsParser.swift
// Danger
//
// Created by Franco Meloni on 12/11/2018.
//
import Foundation
public final class CliArgsParser {
public init() {}
public func parseCli(fromData data: Data) -> CliArgs? {
if let dictionary = try? JSONSerialization.jsonObject(with: data, options: .allowFragments) as? NSDictionary,
let cliArgsDictionary = dictionary?.value(forKeyPath: "danger.settings.cliArgs") as? [String:Any] {
return CliArgs(dictionary: cliArgsDictionary)
}
return nil
}
}
## Instruction:
Remove value(forKeyPath:) method call because is not supported by Linux
## Code After:
//
// CliArgsParser.swift
// Danger
//
// Created by Franco Meloni on 12/11/2018.
//
import Foundation
public final class CliArgsParser {
public init() {}
public func parseCli(fromData data: Data) -> CliArgs? {
if let dictionary = try? JSONSerialization.jsonObject(with: data, options: .allowFragments) as? [String:Any],
let danger = dictionary?["danger"] as? [String:Any],
let settings = danger["settings"] as? [String:Any],
let cliArgsDictionary = settings["cliArgs"] as? [String:Any] {
return CliArgs(dictionary: cliArgsDictionary)
}
return nil
}
}
| //
// CliArgsParser.swift
// Danger
//
// Created by Franco Meloni on 12/11/2018.
//
import Foundation
public final class CliArgsParser {
public init() {}
public func parseCli(fromData data: Data) -> CliArgs? {
- if let dictionary = try? JSONSerialization.jsonObject(with: data, options: .allowFragments) as? NSDictionary,
? ^ ^ ---- ^^
+ if let dictionary = try? JSONSerialization.jsonObject(with: data, options: .allowFragments) as? [String:Any],
? ^ ^^ ^^^^ +
+ let danger = dictionary?["danger"] as? [String:Any],
+ let settings = danger["settings"] as? [String:Any],
- let cliArgsDictionary = dictionary?.value(forKeyPath: "danger.settings.cliArgs") as? [String:Any] {
? -------------------------------------- ^ ^
+ let cliArgsDictionary = settings["cliArgs"] as? [String:Any] {
? ^^ ^
return CliArgs(dictionary: cliArgsDictionary)
}
return nil
}
} | 6 | 0.285714 | 4 | 2 |
dfd02ec10a904c5ce52162fa512e0850c789ce32 | language_explorer/staging_settings.py | language_explorer/staging_settings.py | LANGUAGE_EXPLORER_DB_URL = 'postgresql://esteele@/language_explorer'
JPHARVEST_DB_URL = 'postgresql://esteele@/jpharvest'
WALS_DB_URL = 'postgresql://esteele@/wals2013'
SIL_RCEM_TSV_SOURCE = '/home/esteele/lex_data_bundle/iso-639-3_Retirements.tab'
CENSUS_CSV_SOURCE = '/home/esteele/lex_data_bundle/census_2011_LANP_ENGLP.csv'
CACHE_ROOT = "/home/esteele/lex_data_bundle/cache"
TEST_CACHE_ROOT = CACHE_ROOT # For the moment
| LANGUAGE_EXPLORER_DB_URL = 'postgresql://esteele@/language_explorer'
JPHARVEST_DB_URL = 'postgresql://esteele@/jpharvest'
WALS_DB_URL = 'postgresql://esteele@/wals2013'
SIL_RCEM_TSV_SOURCE = '/home/esteele/lex_data_bundle/iso-639-3_Retirements.tab'
CENSUS_CSV_SOURCE = '/Users/esteele/Code/language_explorer/data/census_2011_LANP_ENGLP.csv'
CACHE_ROOT = "/home/esteele/lex_data_bundle/cache"
TEST_CACHE_ROOT = CACHE_ROOT # For the moment
| Use staging for creating a static copy, so refer to in-place assets, not deployed assets | Use staging for creating a static copy, so refer to in-place assets, not deployed assets
| Python | mit | edwinsteele/language_explorer,edwinsteele/language_explorer,edwinsteele/language_explorer | python | ## Code Before:
LANGUAGE_EXPLORER_DB_URL = 'postgresql://esteele@/language_explorer'
JPHARVEST_DB_URL = 'postgresql://esteele@/jpharvest'
WALS_DB_URL = 'postgresql://esteele@/wals2013'
SIL_RCEM_TSV_SOURCE = '/home/esteele/lex_data_bundle/iso-639-3_Retirements.tab'
CENSUS_CSV_SOURCE = '/home/esteele/lex_data_bundle/census_2011_LANP_ENGLP.csv'
CACHE_ROOT = "/home/esteele/lex_data_bundle/cache"
TEST_CACHE_ROOT = CACHE_ROOT # For the moment
## Instruction:
Use staging for creating a static copy, so refer to in-place assets, not deployed assets
## Code After:
LANGUAGE_EXPLORER_DB_URL = 'postgresql://esteele@/language_explorer'
JPHARVEST_DB_URL = 'postgresql://esteele@/jpharvest'
WALS_DB_URL = 'postgresql://esteele@/wals2013'
SIL_RCEM_TSV_SOURCE = '/home/esteele/lex_data_bundle/iso-639-3_Retirements.tab'
CENSUS_CSV_SOURCE = '/Users/esteele/Code/language_explorer/data/census_2011_LANP_ENGLP.csv'
CACHE_ROOT = "/home/esteele/lex_data_bundle/cache"
TEST_CACHE_ROOT = CACHE_ROOT # For the moment
| LANGUAGE_EXPLORER_DB_URL = 'postgresql://esteele@/language_explorer'
JPHARVEST_DB_URL = 'postgresql://esteele@/jpharvest'
WALS_DB_URL = 'postgresql://esteele@/wals2013'
SIL_RCEM_TSV_SOURCE = '/home/esteele/lex_data_bundle/iso-639-3_Retirements.tab'
- CENSUS_CSV_SOURCE = '/home/esteele/lex_data_bundle/census_2011_LANP_ENGLP.csv'
? - ^^^ ^ -------
+ CENSUS_CSV_SOURCE = '/Users/esteele/Code/language_explorer/data/census_2011_LANP_ENGLP.csv'
? ^^ ++ +++++ ++++++++ ^^^^^^^
CACHE_ROOT = "/home/esteele/lex_data_bundle/cache"
TEST_CACHE_ROOT = CACHE_ROOT # For the moment | 2 | 0.25 | 1 | 1 |
a5f44738f90d9e07723d9f9699eaf089ad4880da | test/deploy.bat | test/deploy.bat | mkdir mage
composer install
rem Actions after deploy (database creation, files copy, etc)
copy ..\src\app\etc\nmmlm mage\app\etc | mkdir mage
composer install
rem Actions after deploy (database creation, files copy, etc)
xcopy ..\src\app\etc\nmmlm mage\app\etc\nmmlm /E /Y /I | Deploy script changed from copy to xcopy | Deploy script changed from copy to xcopy | Batchfile | mit | praxigento/mage_ext_log4php | batchfile | ## Code Before:
mkdir mage
composer install
rem Actions after deploy (database creation, files copy, etc)
copy ..\src\app\etc\nmmlm mage\app\etc
## Instruction:
Deploy script changed from copy to xcopy
## Code After:
mkdir mage
composer install
rem Actions after deploy (database creation, files copy, etc)
xcopy ..\src\app\etc\nmmlm mage\app\etc\nmmlm /E /Y /I | mkdir mage
composer install
rem Actions after deploy (database creation, files copy, etc)
- copy ..\src\app\etc\nmmlm mage\app\etc
+ xcopy ..\src\app\etc\nmmlm mage\app\etc\nmmlm /E /Y /I
? + +++++++++++++++
| 2 | 0.5 | 1 | 1 |
84c6468034afdb7e9601dd5a843a1c80ca02c0c9 | shell/python.sh | shell/python.sh |
if dot::command_exists "pip"; then
eval "$(pip -qq --disable-pip-version-check completion --bash)"
fi
# (3) Python Versions --------------------------------------------------- {{{1
alias py="python"
alias py2="python2"
alias py3="python3"
# vim:foldmethod=marker:foldlevel=2
|
if dot::command_exists "pip3"; then
case "$SHELL_NAME" in
bash) eval "$(pip3 completion --bash)" ;;
zsh) eval "$(pip3 completion --zsh)" ;;
esac
fi
# (3) Python Versions --------------------------------------------------- {{{1
alias py="python3"
alias py2="python2"
alias py3="python3"
# vim:foldmethod=marker:foldlevel=2
| Load correct pip completion for various shells | fix: Load correct pip completion for various shells
| Shell | mit | dweidner/dotfiles,dweidner/dotfiles | shell | ## Code Before:
if dot::command_exists "pip"; then
eval "$(pip -qq --disable-pip-version-check completion --bash)"
fi
# (3) Python Versions --------------------------------------------------- {{{1
alias py="python"
alias py2="python2"
alias py3="python3"
# vim:foldmethod=marker:foldlevel=2
## Instruction:
fix: Load correct pip completion for various shells
## Code After:
if dot::command_exists "pip3"; then
case "$SHELL_NAME" in
bash) eval "$(pip3 completion --bash)" ;;
zsh) eval "$(pip3 completion --zsh)" ;;
esac
fi
# (3) Python Versions --------------------------------------------------- {{{1
alias py="python3"
alias py2="python2"
alias py3="python3"
# vim:foldmethod=marker:foldlevel=2
|
- if dot::command_exists "pip"; then
+ if dot::command_exists "pip3"; then
? +
- eval "$(pip -qq --disable-pip-version-check completion --bash)"
+ case "$SHELL_NAME" in
+ bash) eval "$(pip3 completion --bash)" ;;
+ zsh) eval "$(pip3 completion --zsh)" ;;
+ esac
fi
# (3) Python Versions --------------------------------------------------- {{{1
- alias py="python"
+ alias py="python3"
? +
alias py2="python2"
alias py3="python3"
# vim:foldmethod=marker:foldlevel=2 | 9 | 0.642857 | 6 | 3 |
06e2f3393f266ff5ff45c3231959245add4836e9 | client/hyalite/dom/collection.rb | client/hyalite/dom/collection.rb | require_relative 'element'
module Hyalite::DOM
class Collection
include Native
include Enumerable
def each(&block)
`self.native.length`.times do |i|
block.call Element.new(`self.native.item(i)`)
end
nil
end
end
end
| require_relative 'element'
module Hyalite::DOM
class Collection
include Native
include Enumerable
def each(&block)
`self.native.length`.times do |i|
block.call Element.new(`self.native.item(i)`)
end
nil
end
def [](index)
Element.new(`self.native.item(index)`)
end
def length
`self.native.length`
end
end
end
| Add index accessing for Collection | Add index accessing for Collection
| Ruby | mit | youchan/hyalite | ruby | ## Code Before:
require_relative 'element'
module Hyalite::DOM
class Collection
include Native
include Enumerable
def each(&block)
`self.native.length`.times do |i|
block.call Element.new(`self.native.item(i)`)
end
nil
end
end
end
## Instruction:
Add index accessing for Collection
## Code After:
require_relative 'element'
module Hyalite::DOM
class Collection
include Native
include Enumerable
def each(&block)
`self.native.length`.times do |i|
block.call Element.new(`self.native.item(i)`)
end
nil
end
def [](index)
Element.new(`self.native.item(index)`)
end
def length
`self.native.length`
end
end
end
| require_relative 'element'
module Hyalite::DOM
class Collection
include Native
include Enumerable
def each(&block)
`self.native.length`.times do |i|
block.call Element.new(`self.native.item(i)`)
end
nil
end
+
+ def [](index)
+ Element.new(`self.native.item(index)`)
+ end
+
+ def length
+ `self.native.length`
+ end
end
end | 8 | 0.5 | 8 | 0 |
c2044e2984ba52c19356049714e89dc1228bc687 | pubspec.yaml | pubspec.yaml | name: font_awesome_flutter
description: The Font Awesome Icon pack available as Flutter Icons. Provides 1500 additional icons to use in your apps.
author: Brian Egan <brian@brianegan.com>
homepage: https://github.com/brianegan/font_awesome_flutter
version: 8.6.0
environment:
sdk: ">=2.0.0-dev.28.0 <3.0.0"
dependencies:
flutter:
sdk: flutter
dev_dependencies:
recase: ^3.0.0
flutter:
fonts:
- family: FontAwesomeBrands
fonts:
- asset: lib/fonts/fa-brands-400.ttf
weight: 400
- family: FontAwesomeRegular
fonts:
- asset: lib/fonts/fa-regular-400.ttf
weight: 400
- family: FontAwesomeSolid
fonts:
- asset: lib/fonts/fa-solid-900.ttf
weight: 900
# To support pro icons, drop fa-light-300.ttf into lib/fonts,
# regenerate the icon collection and uncomment the following lines:
# - family: FontAwesomeLight
# fonts:
# - asset: lib/fonts/fa-light-300.ttf
# weight: 300
| name: font_awesome_flutter
description: The Font Awesome Icon pack available as Flutter Icons. Provides 1500 additional icons to use in your apps.
maintainer: Brian Egan <brian@brianegan.com>
authors:
- Flutter Community <community@flutter.zone>
- Brian Egan <brian@brianegan.com>
homepage: https://github.com/fluttercommunity/font_awesome_flutter
version: 8.6.0
environment:
sdk: ">=2.0.0-dev.28.0 <3.0.0"
dependencies:
flutter:
sdk: flutter
dev_dependencies:
recase: ^3.0.0
flutter:
fonts:
- family: FontAwesomeBrands
fonts:
- asset: lib/fonts/fa-brands-400.ttf
weight: 400
- family: FontAwesomeRegular
fonts:
- asset: lib/fonts/fa-regular-400.ttf
weight: 400
- family: FontAwesomeSolid
fonts:
- asset: lib/fonts/fa-solid-900.ttf
weight: 900
# To support pro icons, drop fa-light-300.ttf into lib/fonts,
# regenerate the icon collection and uncomment the following lines:
# - family: FontAwesomeLight
# fonts:
# - asset: lib/fonts/fa-light-300.ttf
# weight: 300
| Add Flutter Commmunity to authors, change repo home | Add Flutter Commmunity to authors, change repo home
| YAML | mit | brianegan/font_awesome_flutter,brianegan/font_awesome_flutter,brianegan/font_awesome_flutter | yaml | ## Code Before:
name: font_awesome_flutter
description: The Font Awesome Icon pack available as Flutter Icons. Provides 1500 additional icons to use in your apps.
author: Brian Egan <brian@brianegan.com>
homepage: https://github.com/brianegan/font_awesome_flutter
version: 8.6.0
environment:
sdk: ">=2.0.0-dev.28.0 <3.0.0"
dependencies:
flutter:
sdk: flutter
dev_dependencies:
recase: ^3.0.0
flutter:
fonts:
- family: FontAwesomeBrands
fonts:
- asset: lib/fonts/fa-brands-400.ttf
weight: 400
- family: FontAwesomeRegular
fonts:
- asset: lib/fonts/fa-regular-400.ttf
weight: 400
- family: FontAwesomeSolid
fonts:
- asset: lib/fonts/fa-solid-900.ttf
weight: 900
# To support pro icons, drop fa-light-300.ttf into lib/fonts,
# regenerate the icon collection and uncomment the following lines:
# - family: FontAwesomeLight
# fonts:
# - asset: lib/fonts/fa-light-300.ttf
# weight: 300
## Instruction:
Add Flutter Commmunity to authors, change repo home
## Code After:
name: font_awesome_flutter
description: The Font Awesome Icon pack available as Flutter Icons. Provides 1500 additional icons to use in your apps.
maintainer: Brian Egan <brian@brianegan.com>
authors:
- Flutter Community <community@flutter.zone>
- Brian Egan <brian@brianegan.com>
homepage: https://github.com/fluttercommunity/font_awesome_flutter
version: 8.6.0
environment:
sdk: ">=2.0.0-dev.28.0 <3.0.0"
dependencies:
flutter:
sdk: flutter
dev_dependencies:
recase: ^3.0.0
flutter:
fonts:
- family: FontAwesomeBrands
fonts:
- asset: lib/fonts/fa-brands-400.ttf
weight: 400
- family: FontAwesomeRegular
fonts:
- asset: lib/fonts/fa-regular-400.ttf
weight: 400
- family: FontAwesomeSolid
fonts:
- asset: lib/fonts/fa-solid-900.ttf
weight: 900
# To support pro icons, drop fa-light-300.ttf into lib/fonts,
# regenerate the icon collection and uncomment the following lines:
# - family: FontAwesomeLight
# fonts:
# - asset: lib/fonts/fa-light-300.ttf
# weight: 300
| name: font_awesome_flutter
description: The Font Awesome Icon pack available as Flutter Icons. Provides 1500 additional icons to use in your apps.
- author: Brian Egan <brian@brianegan.com>
? ^ ^^
+ maintainer: Brian Egan <brian@brianegan.com>
? + ^^ ^^^^
+ authors:
+ - Flutter Community <community@flutter.zone>
+ - Brian Egan <brian@brianegan.com>
- homepage: https://github.com/brianegan/font_awesome_flutter
? ^ ^^^^^^
+ homepage: https://github.com/fluttercommunity/font_awesome_flutter
? ^^^^^^ ++++++ ^^
version: 8.6.0
environment:
sdk: ">=2.0.0-dev.28.0 <3.0.0"
dependencies:
flutter:
sdk: flutter
dev_dependencies:
recase: ^3.0.0
flutter:
fonts:
- family: FontAwesomeBrands
fonts:
- asset: lib/fonts/fa-brands-400.ttf
weight: 400
- family: FontAwesomeRegular
fonts:
- asset: lib/fonts/fa-regular-400.ttf
weight: 400
- family: FontAwesomeSolid
fonts:
- asset: lib/fonts/fa-solid-900.ttf
weight: 900
# To support pro icons, drop fa-light-300.ttf into lib/fonts,
# regenerate the icon collection and uncomment the following lines:
# - family: FontAwesomeLight
# fonts:
# - asset: lib/fonts/fa-light-300.ttf
# weight: 300 | 7 | 0.194444 | 5 | 2 |
fb5a886f6e827efcd2f6c26436636975eda77192 | test/make-images-push-to-local-registry.sh | test/make-images-push-to-local-registry.sh |
set -e
cd ..
make docker-image DOCKER_IMAGE_TAG=$2
docker tag cilium/cilium:$2 $1/cilium/cilium:$2
docker tag cilium/cilium:$2 $1/cilium/cilium-dev:$2
docker tag cilium/operator:$2 $1/cilium/operator:$2
docker push $1/cilium/cilium:$2
docker push $1/cilium/cilium-dev:$2
docker push $1/cilium/operator:$2
cilium_git_version="$(cat GIT_VERSION)"
docker image prune -f --all --filter "label=cilium-sha=${cilium_git_version%% *}"
|
set -e
cd ..
make docker-image DOCKER_IMAGE_TAG=$2
docker tag cilium/cilium:$2 $1/cilium/cilium:$2
docker tag cilium/cilium:$2 $1/cilium/cilium-dev:$2
docker tag cilium/operator:$2 $1/cilium/operator:$2
docker push $1/cilium/cilium:$2
docker push $1/cilium/cilium-dev:$2
docker push $1/cilium/operator:$2
cilium_git_version="$(cat GIT_VERSION)"
counter=0
until [ $counter -eq 10 ] || docker image prune -f --all --filter "label=cilium-sha=${cilium_git_version%% *}"; do
((counter++))
sleep 6
done
| Add timeout to docker prune after building images | Add timeout to docker prune after building images
If other build is pruning while current builds starts the prune, prune
will fail and cause the build to fail. This change adds 10 retries in 6
seconds intervals which should be enough to prune correctly
Signed-off-by: Maciej Kwiek <33c1fdf481c8e628d4c6db7ea8dc77f49f2fa5d7@isovalent.com>
| Shell | apache-2.0 | cilium/cilium,cilium-team/cilium,cilium/cilium,tgraf/cilium,tgraf/cilium,michi-covalent/cilium,tgraf/cilium,cilium/cilium,cilium/cilium,tgraf/cilium,tklauser/cilium,michi-covalent/cilium,tklauser/cilium,tklauser/cilium,michi-covalent/cilium,cilium-team/cilium,michi-covalent/cilium,tgraf/cilium,tklauser/cilium,cilium/cilium,tklauser/cilium,michi-covalent/cilium,tgraf/cilium | shell | ## Code Before:
set -e
cd ..
make docker-image DOCKER_IMAGE_TAG=$2
docker tag cilium/cilium:$2 $1/cilium/cilium:$2
docker tag cilium/cilium:$2 $1/cilium/cilium-dev:$2
docker tag cilium/operator:$2 $1/cilium/operator:$2
docker push $1/cilium/cilium:$2
docker push $1/cilium/cilium-dev:$2
docker push $1/cilium/operator:$2
cilium_git_version="$(cat GIT_VERSION)"
docker image prune -f --all --filter "label=cilium-sha=${cilium_git_version%% *}"
## Instruction:
Add timeout to docker prune after building images
If other build is pruning while current builds starts the prune, prune
will fail and cause the build to fail. This change adds 10 retries in 6
seconds intervals which should be enough to prune correctly
Signed-off-by: Maciej Kwiek <33c1fdf481c8e628d4c6db7ea8dc77f49f2fa5d7@isovalent.com>
## Code After:
set -e
cd ..
make docker-image DOCKER_IMAGE_TAG=$2
docker tag cilium/cilium:$2 $1/cilium/cilium:$2
docker tag cilium/cilium:$2 $1/cilium/cilium-dev:$2
docker tag cilium/operator:$2 $1/cilium/operator:$2
docker push $1/cilium/cilium:$2
docker push $1/cilium/cilium-dev:$2
docker push $1/cilium/operator:$2
cilium_git_version="$(cat GIT_VERSION)"
counter=0
until [ $counter -eq 10 ] || docker image prune -f --all --filter "label=cilium-sha=${cilium_git_version%% *}"; do
((counter++))
sleep 6
done
|
set -e
cd ..
make docker-image DOCKER_IMAGE_TAG=$2
docker tag cilium/cilium:$2 $1/cilium/cilium:$2
docker tag cilium/cilium:$2 $1/cilium/cilium-dev:$2
docker tag cilium/operator:$2 $1/cilium/operator:$2
docker push $1/cilium/cilium:$2
docker push $1/cilium/cilium-dev:$2
docker push $1/cilium/operator:$2
cilium_git_version="$(cat GIT_VERSION)"
+
+ counter=0
- docker image prune -f --all --filter "label=cilium-sha=${cilium_git_version%% *}"
+ until [ $counter -eq 10 ] || docker image prune -f --all --filter "label=cilium-sha=${cilium_git_version%% *}"; do
? +++++++++++++++++++++++++++++ ++++
+ ((counter++))
+ sleep 6
+ done | 7 | 0.4375 | 6 | 1 |
5178318df905ed1a68d312adb3936e8748789b2b | tests/test_views.py | tests/test_views.py |
import json
import unittest
from mock import patch
from watchman import views
class TestWatchman(unittest.TestCase):
def setUp(self):
pass
@patch('watchman.views.check_databases')
def test_response_content_type_json(self, patched_check_databases):
patched_check_databases.return_value = []
response = views.status('')
self.assertEqual(response['Content-Type'], 'application/json')
@patch('watchman.views.check_databases')
def test_response_contains_expected_checks(self, patched_check_databases):
expected_checks = ['databases']
patched_check_databases.return_value = []
response = views.status('')
content = json.loads(response.content)
self.assertItemsEqual(expected_checks, content.keys())
def tearDown(self):
pass
|
import json
import unittest
from mock import patch
from watchman import views
class TestWatchman(unittest.TestCase):
def setUp(self):
pass
@patch('watchman.views.check_databases')
def test_response_content_type_json(self, patched_check_databases):
patched_check_databases.return_value = []
response = views.status('')
self.assertEqual(response['Content-Type'], 'application/json')
@patch('watchman.views.check_databases')
def test_response_contains_expected_checks(self, patched_check_databases):
expected_checks = ['databases']
patched_check_databases.return_value = []
response = views.status('')
content = json.loads(response.content)
self.assertItemsEqual(expected_checks, content.keys())
def test_check_database_handles_exception(self):
response = views.check_database('foo')
self.assertFalse(response['foo']['ok'])
self.assertEqual(response['foo']['error'], "The connection foo doesn't exist")
def tearDown(self):
pass
| Test exception handling in `check_database` | Test exception handling in `check_database`
| Python | bsd-3-clause | JBKahn/django-watchman,mwarkentin/django-watchman,mwarkentin/django-watchman,ulope/django-watchman,gerlachry/django-watchman,blag/django-watchman,JBKahn/django-watchman,blag/django-watchman,gerlachry/django-watchman,ulope/django-watchman | python | ## Code Before:
import json
import unittest
from mock import patch
from watchman import views
class TestWatchman(unittest.TestCase):
def setUp(self):
pass
@patch('watchman.views.check_databases')
def test_response_content_type_json(self, patched_check_databases):
patched_check_databases.return_value = []
response = views.status('')
self.assertEqual(response['Content-Type'], 'application/json')
@patch('watchman.views.check_databases')
def test_response_contains_expected_checks(self, patched_check_databases):
expected_checks = ['databases']
patched_check_databases.return_value = []
response = views.status('')
content = json.loads(response.content)
self.assertItemsEqual(expected_checks, content.keys())
def tearDown(self):
pass
## Instruction:
Test exception handling in `check_database`
## Code After:
import json
import unittest
from mock import patch
from watchman import views
class TestWatchman(unittest.TestCase):
def setUp(self):
pass
@patch('watchman.views.check_databases')
def test_response_content_type_json(self, patched_check_databases):
patched_check_databases.return_value = []
response = views.status('')
self.assertEqual(response['Content-Type'], 'application/json')
@patch('watchman.views.check_databases')
def test_response_contains_expected_checks(self, patched_check_databases):
expected_checks = ['databases']
patched_check_databases.return_value = []
response = views.status('')
content = json.loads(response.content)
self.assertItemsEqual(expected_checks, content.keys())
def test_check_database_handles_exception(self):
response = views.check_database('foo')
self.assertFalse(response['foo']['ok'])
self.assertEqual(response['foo']['error'], "The connection foo doesn't exist")
def tearDown(self):
pass
|
import json
import unittest
from mock import patch
from watchman import views
class TestWatchman(unittest.TestCase):
def setUp(self):
pass
@patch('watchman.views.check_databases')
def test_response_content_type_json(self, patched_check_databases):
patched_check_databases.return_value = []
response = views.status('')
self.assertEqual(response['Content-Type'], 'application/json')
@patch('watchman.views.check_databases')
def test_response_contains_expected_checks(self, patched_check_databases):
expected_checks = ['databases']
patched_check_databases.return_value = []
response = views.status('')
content = json.loads(response.content)
self.assertItemsEqual(expected_checks, content.keys())
+ def test_check_database_handles_exception(self):
+ response = views.check_database('foo')
+ self.assertFalse(response['foo']['ok'])
+ self.assertEqual(response['foo']['error'], "The connection foo doesn't exist")
+
def tearDown(self):
pass | 5 | 0.166667 | 5 | 0 |
fa2a3f0c689a6cedc86452665c338935a5b95574 | src/javascript/binary/websocket_pages/user/new_account/virtual_acc_opening/virtual_acc_opening.data.js | src/javascript/binary/websocket_pages/user/new_account/virtual_acc_opening/virtual_acc_opening.data.js | var VirtualAccOpeningData = (function(){
"use strict";
function getDetails(password, residence, verificationCode){
var req = {
new_account_virtual: 1,
client_password: password,
residence: residence,
verification_code: verificationCode
};
if ($.cookie('affiliate_tracking')) {
req.affiliate_token = JSON.parse($.cookie('affiliate_tracking')).t;
}
BinarySocket.send(req);
}
return {
getDetails: getDetails
};
}());
| var VirtualAccOpeningData = (function(){
"use strict";
function getDetails(password, residence, verificationCode){
var req = {
new_account_virtual: 1,
client_password: password,
residence: residence,
verification_code: verificationCode
};
// Add AdWords parameters
// NOTE: following lines can be uncommented (Re-check property names)
// once these fields added to this ws call
// var utm_data = AdWords.getData();
// req.source = utm_data.utm_source || utm_data.referrer || 'direct';
// if(utm_data.utm_medium) req.medium = utm_data.utm_medium;
// if(utm_data.utm_campaign) req.campaign = utm_data.utm_campaign;
if ($.cookie('affiliate_tracking')) {
req.affiliate_token = JSON.parse($.cookie('affiliate_tracking')).t;
}
BinarySocket.send(req);
}
return {
getDetails: getDetails
};
}());
| Send source information on virtual account opening | Send source information on virtual account opening
| JavaScript | apache-2.0 | teo-binary/binary-static,binary-com/binary-static,ashkanx/binary-static,binary-static-deployed/binary-static,binary-static-deployed/binary-static,teo-binary/binary-static,binary-com/binary-static,negar-binary/binary-static,kellybinary/binary-static,raunakkathuria/binary-static,fayland/binary-static,teo-binary/binary-static,binary-static-deployed/binary-static,kellybinary/binary-static,fayland/binary-static,4p00rv/binary-static,kellybinary/binary-static,teo-binary/binary-static,negar-binary/binary-static,fayland/binary-static,ashkanx/binary-static,raunakkathuria/binary-static,4p00rv/binary-static,binary-com/binary-static,4p00rv/binary-static,fayland/binary-static,raunakkathuria/binary-static,ashkanx/binary-static,negar-binary/binary-static | javascript | ## Code Before:
var VirtualAccOpeningData = (function(){
"use strict";
function getDetails(password, residence, verificationCode){
var req = {
new_account_virtual: 1,
client_password: password,
residence: residence,
verification_code: verificationCode
};
if ($.cookie('affiliate_tracking')) {
req.affiliate_token = JSON.parse($.cookie('affiliate_tracking')).t;
}
BinarySocket.send(req);
}
return {
getDetails: getDetails
};
}());
## Instruction:
Send source information on virtual account opening
## Code After:
var VirtualAccOpeningData = (function(){
"use strict";
function getDetails(password, residence, verificationCode){
var req = {
new_account_virtual: 1,
client_password: password,
residence: residence,
verification_code: verificationCode
};
// Add AdWords parameters
// NOTE: following lines can be uncommented (Re-check property names)
// once these fields added to this ws call
// var utm_data = AdWords.getData();
// req.source = utm_data.utm_source || utm_data.referrer || 'direct';
// if(utm_data.utm_medium) req.medium = utm_data.utm_medium;
// if(utm_data.utm_campaign) req.campaign = utm_data.utm_campaign;
if ($.cookie('affiliate_tracking')) {
req.affiliate_token = JSON.parse($.cookie('affiliate_tracking')).t;
}
BinarySocket.send(req);
}
return {
getDetails: getDetails
};
}());
| var VirtualAccOpeningData = (function(){
"use strict";
function getDetails(password, residence, verificationCode){
var req = {
new_account_virtual: 1,
client_password: password,
residence: residence,
verification_code: verificationCode
};
+
+ // Add AdWords parameters
+ // NOTE: following lines can be uncommented (Re-check property names)
+ // once these fields added to this ws call
+ // var utm_data = AdWords.getData();
+ // req.source = utm_data.utm_source || utm_data.referrer || 'direct';
+ // if(utm_data.utm_medium) req.medium = utm_data.utm_medium;
+ // if(utm_data.utm_campaign) req.campaign = utm_data.utm_campaign;
if ($.cookie('affiliate_tracking')) {
req.affiliate_token = JSON.parse($.cookie('affiliate_tracking')).t;
}
BinarySocket.send(req);
}
return {
getDetails: getDetails
};
}()); | 8 | 0.363636 | 8 | 0 |
5a18fcc0b4bed881657f79b3922a770209af3bc4 | app/views/posts/_single.html.haml | app/views/posts/_single.html.haml | .well
.post
.row
.span1
= render :partial => "shared/avatar", :locals => { :member => post.author }
.span7
- if defined?(subject)
%h3= link_to strip_tags(post.subject), post
.post-meta
%p
Posted by
= link_to post.author.login_name, member_path(post.author)
- if post.forum
in
= link_to post.forum, post.forum
at
= post.created_at
.post-body
:markdown
#{ strip_tags post.body }
- unless defined?(hide_comments)
.post-comments
= pluralize(post.comments.count, "comment")
= link_to "Read", post
%br
= link_to "Comment", new_comment_path(:post_id => post.id)
| .well
.post
.row
.span1
= render :partial => "shared/avatar", :locals => { :member => post.author }
.span7
- if defined?(subject)
%h3= link_to strip_tags(post.subject), post
.post-meta
%p
Posted by
= link_to post.author.login_name, member_path(post.author)
- if post.forum
in
= link_to post.forum, post.forum
at
= post.created_at
.post-body
:markdown
#{ strip_tags post.body }
- unless defined?(hide_comments)
.post-comments
%ul.inline
%li= link_to pluralize(post.comments.count, "comment"), post
%li= link_to "Comment", new_comment_path(:post_id => post.id)
| Use a ul.inline for "N comments | Comment" links. | Use a ul.inline for "N comments | Comment" links.
| Haml | agpl-3.0 | Growstuff/growstuff,jdanielnd/growstuff,CarsonBills/GrowStuffCMB,pozorvlak/growstuff,Br3nda/growstuff,pozorvlak/growstuff,korabh/growstuff,jdanielnd/growstuff,dv2/growstuff,andrba/growstuff,jdanielnd/growstuff,dv2/growstuff,borracciaBlu/growstuff,CloCkWeRX/growstuff,CjayBillones/growstuff,oshiho3/growstuff,cesy/growstuff,yez/growstuff,korabh/growstuff,GabrielSandoval/growstuff,gustavor-souza/growstuff,sksavant/growstuff,iressgrad15/growstuff,Br3nda/growstuff,cesy/growstuff,josefdaly/growstuff,maco/growstuff,pozorvlak/growstuff,dv2/growstuff,oshiho3/growstuff,GabrielSandoval/growstuff,oshiho3/growstuff,iressgrad15/growstuff,Br3nda/growstuff,gustavor-souza/growstuff,yez/growstuff,sksavant/growstuff,andrba/growstuff,yez/growstuff,CarsonBills/GrowStuffCMB,Growstuff/growstuff,korabh/growstuff,borracciaBlu/growstuff,CloCkWeRX/growstuff,dv2/growstuff,josefdaly/growstuff,Br3nda/growstuff,gustavor-souza/growstuff,Growstuff/growstuff,pozorvlak/growstuff,sksavant/growstuff,CjayBillones/growstuff,josefdaly/growstuff,gustavor-souza/growstuff,josefdaly/growstuff,oshiho3/growstuff,borracciaBlu/growstuff,cesy/growstuff,CjayBillones/growstuff,borracciaBlu/growstuff,CjayBillones/growstuff,CarsonBills/GrowStuffCMB,yez/growstuff,Growstuff/growstuff,CarsonBills/GrowStuffCMB,GabrielSandoval/growstuff,sksavant/growstuff,iressgrad15/growstuff,andrba/growstuff,jdanielnd/growstuff,korabh/growstuff,andrba/growstuff,maco/growstuff,GabrielSandoval/growstuff,maco/growstuff,cesy/growstuff,CloCkWeRX/growstuff,maco/growstuff,CloCkWeRX/growstuff | haml | ## Code Before:
.well
.post
.row
.span1
= render :partial => "shared/avatar", :locals => { :member => post.author }
.span7
- if defined?(subject)
%h3= link_to strip_tags(post.subject), post
.post-meta
%p
Posted by
= link_to post.author.login_name, member_path(post.author)
- if post.forum
in
= link_to post.forum, post.forum
at
= post.created_at
.post-body
:markdown
#{ strip_tags post.body }
- unless defined?(hide_comments)
.post-comments
= pluralize(post.comments.count, "comment")
= link_to "Read", post
%br
= link_to "Comment", new_comment_path(:post_id => post.id)
## Instruction:
Use a ul.inline for "N comments | Comment" links.
## Code After:
.well
.post
.row
.span1
= render :partial => "shared/avatar", :locals => { :member => post.author }
.span7
- if defined?(subject)
%h3= link_to strip_tags(post.subject), post
.post-meta
%p
Posted by
= link_to post.author.login_name, member_path(post.author)
- if post.forum
in
= link_to post.forum, post.forum
at
= post.created_at
.post-body
:markdown
#{ strip_tags post.body }
- unless defined?(hide_comments)
.post-comments
%ul.inline
%li= link_to pluralize(post.comments.count, "comment"), post
%li= link_to "Comment", new_comment_path(:post_id => post.id)
| .well
.post
.row
.span1
= render :partial => "shared/avatar", :locals => { :member => post.author }
.span7
- if defined?(subject)
%h3= link_to strip_tags(post.subject), post
.post-meta
%p
Posted by
= link_to post.author.login_name, member_path(post.author)
- if post.forum
in
= link_to post.forum, post.forum
at
= post.created_at
.post-body
:markdown
#{ strip_tags post.body }
- unless defined?(hide_comments)
.post-comments
+ %ul.inline
- = pluralize(post.comments.count, "comment")
+ %li= link_to pluralize(post.comments.count, "comment"), post
? +++++ ++++++++ ++++++
- = link_to "Read", post
- %br
- = link_to "Comment", new_comment_path(:post_id => post.id)
+ %li= link_to "Comment", new_comment_path(:post_id => post.id)
? +++++
| 7 | 0.225806 | 3 | 4 |
bafd3c41e9328a979afa46c8e9e847b9a04ea39a | commands/appsngen-widget-list-remove.js | commands/appsngen-widget-list-remove.js | (function () {
'use strict';
var program = require('./../src/customcommander');
var registrycontroller = require('./../src/registrycontroller');
var rmdir = require('rmdir');
var widgetName, widgetsList;
var removeRegistryRecord = function (name) {
widgetsList[name] = undefined;
registrycontroller.updateWidgetsList(widgetsList);
};
program
.arguments('<name>')
.usage('<name> [option]')
.option('--hard', 'delete widget folder')
.action(function (name) {
widgetName = name;
})
.parse(process.argv);
if (typeof widgetName === 'undefined') {
program.help();
}
widgetsList = registrycontroller.getWidgetsList();
if (widgetsList[widgetName]) {
if (program.hard) {
rmdir(widgetsList[widgetName].path, function (error) {
if(error) {
console.error(error.toString());
process.exit(1);
}
removeRegistryRecord(widgetName);
});
} else {
removeRegistryRecord(widgetName);
}
} else {
console.log('Widget with provided name doesn\'t exist.');
process.exit(1);
}
})(); | (function () {
'use strict';
var program = require('./../src/customcommander');
var registrycontroller = require('./../src/registrycontroller');
var rmdir = require('rmdir');
var widgetName, widgetsList;
var removeRegistryRecord = function (name) {
console.log('WARNING: widget package might remain at appsngen.com' +
(widgetsList[name].phonegapId ? ' and build.phonegap.com.': '.'));
widgetsList[name] = undefined;
registrycontroller.updateWidgetsList(widgetsList);
};
program
.arguments('<name>')
.usage('<name> [option]')
.option('--hard', 'delete widget folder')
.action(function (name) {
widgetName = name;
})
.parse(process.argv);
if (typeof widgetName === 'undefined') {
program.help();
}
widgetsList = registrycontroller.getWidgetsList();
if (widgetsList[widgetName]) {
if (program.hard) {
rmdir(widgetsList[widgetName].path, function (error) {
if(error) {
console.error(error.toString());
process.exit(1);
}
removeRegistryRecord(widgetName);
});
} else {
removeRegistryRecord(widgetName);
}
} else {
console.log('Widget with provided name doesn\'t exist.');
process.exit(1);
}
})(); | Add information message for 'widget list remove' | Add information message for 'widget list remove'
| JavaScript | mit | appsngen/appsngen-cli,appsngen/appsngen-cli | javascript | ## Code Before:
(function () {
'use strict';
var program = require('./../src/customcommander');
var registrycontroller = require('./../src/registrycontroller');
var rmdir = require('rmdir');
var widgetName, widgetsList;
var removeRegistryRecord = function (name) {
widgetsList[name] = undefined;
registrycontroller.updateWidgetsList(widgetsList);
};
program
.arguments('<name>')
.usage('<name> [option]')
.option('--hard', 'delete widget folder')
.action(function (name) {
widgetName = name;
})
.parse(process.argv);
if (typeof widgetName === 'undefined') {
program.help();
}
widgetsList = registrycontroller.getWidgetsList();
if (widgetsList[widgetName]) {
if (program.hard) {
rmdir(widgetsList[widgetName].path, function (error) {
if(error) {
console.error(error.toString());
process.exit(1);
}
removeRegistryRecord(widgetName);
});
} else {
removeRegistryRecord(widgetName);
}
} else {
console.log('Widget with provided name doesn\'t exist.');
process.exit(1);
}
})();
## Instruction:
Add information message for 'widget list remove'
## Code After:
(function () {
'use strict';
var program = require('./../src/customcommander');
var registrycontroller = require('./../src/registrycontroller');
var rmdir = require('rmdir');
var widgetName, widgetsList;
var removeRegistryRecord = function (name) {
console.log('WARNING: widget package might remain at appsngen.com' +
(widgetsList[name].phonegapId ? ' and build.phonegap.com.': '.'));
widgetsList[name] = undefined;
registrycontroller.updateWidgetsList(widgetsList);
};
program
.arguments('<name>')
.usage('<name> [option]')
.option('--hard', 'delete widget folder')
.action(function (name) {
widgetName = name;
})
.parse(process.argv);
if (typeof widgetName === 'undefined') {
program.help();
}
widgetsList = registrycontroller.getWidgetsList();
if (widgetsList[widgetName]) {
if (program.hard) {
rmdir(widgetsList[widgetName].path, function (error) {
if(error) {
console.error(error.toString());
process.exit(1);
}
removeRegistryRecord(widgetName);
});
} else {
removeRegistryRecord(widgetName);
}
} else {
console.log('Widget with provided name doesn\'t exist.');
process.exit(1);
}
})(); | (function () {
'use strict';
var program = require('./../src/customcommander');
var registrycontroller = require('./../src/registrycontroller');
var rmdir = require('rmdir');
var widgetName, widgetsList;
var removeRegistryRecord = function (name) {
+ console.log('WARNING: widget package might remain at appsngen.com' +
+ (widgetsList[name].phonegapId ? ' and build.phonegap.com.': '.'));
widgetsList[name] = undefined;
registrycontroller.updateWidgetsList(widgetsList);
};
program
.arguments('<name>')
.usage('<name> [option]')
.option('--hard', 'delete widget folder')
.action(function (name) {
widgetName = name;
})
.parse(process.argv);
if (typeof widgetName === 'undefined') {
program.help();
}
widgetsList = registrycontroller.getWidgetsList();
if (widgetsList[widgetName]) {
if (program.hard) {
rmdir(widgetsList[widgetName].path, function (error) {
if(error) {
console.error(error.toString());
process.exit(1);
}
removeRegistryRecord(widgetName);
});
} else {
removeRegistryRecord(widgetName);
}
} else {
console.log('Widget with provided name doesn\'t exist.');
process.exit(1);
}
})(); | 2 | 0.045455 | 2 | 0 |
5f0d2d90214645491c555c079544af09dede25f0 | app/component/List.js | app/component/List.js | import { default as React, Component } from 'react'
import { render } from 'react-dom'
export class List extends Component {
constructor(props) {
super(props);
}
render() {
let items = this.props.items;
var itemsComponent = []
Object.keys(items).forEach(function (key) {
itemsComponent.push(<Item key={key} value={items[key]} _id={key} onClick={this.props.onClick} />);
}.bind(this));
return (
<div>
{itemsComponent}
</div>
);
}
}
class Item extends Component {
constructor(props) {
super(props);
}
render() {
// var status = (this.props.complete) ? 'complete' : 'pending';
return (
<div onClick={this.props.onClick.bind(null, this.props._id) }>
<input type="checkbox" />
<label >{this.props.value}</label>
</div>
);
}
} | import { default as React, Component } from 'react'
import { render } from 'react-dom'
export class List extends Component {
constructor(props) {
super(props);
}
render() {
let items = this.props.items;
var itemsComponent = []
Object.keys(items).forEach(function (key) {
itemsComponent.push(<Item key={key} value={items[key]} _id={key} onClick={this.props.onClick} />);
}.bind(this));
return (
<div>
{itemsComponent}
</div>
);
}
}
class Item extends Component {
constructor(props) {
super(props);
}
render() {
// var status = (this.props.complete) ? 'complete' : 'pending';
return (
<div onClick={this.props.onClick.bind(null, this.props._id) }>
<label ><input type="checkbox" />{this.props.value}</label>
</div>
);
}
} | Make the whole div clickable | Make the whole div clickable
| JavaScript | apache-2.0 | appbaseio/reactive-maps,appbaseio/reactive-maps | javascript | ## Code Before:
import { default as React, Component } from 'react'
import { render } from 'react-dom'
export class List extends Component {
constructor(props) {
super(props);
}
render() {
let items = this.props.items;
var itemsComponent = []
Object.keys(items).forEach(function (key) {
itemsComponent.push(<Item key={key} value={items[key]} _id={key} onClick={this.props.onClick} />);
}.bind(this));
return (
<div>
{itemsComponent}
</div>
);
}
}
class Item extends Component {
constructor(props) {
super(props);
}
render() {
// var status = (this.props.complete) ? 'complete' : 'pending';
return (
<div onClick={this.props.onClick.bind(null, this.props._id) }>
<input type="checkbox" />
<label >{this.props.value}</label>
</div>
);
}
}
## Instruction:
Make the whole div clickable
## Code After:
import { default as React, Component } from 'react'
import { render } from 'react-dom'
export class List extends Component {
constructor(props) {
super(props);
}
render() {
let items = this.props.items;
var itemsComponent = []
Object.keys(items).forEach(function (key) {
itemsComponent.push(<Item key={key} value={items[key]} _id={key} onClick={this.props.onClick} />);
}.bind(this));
return (
<div>
{itemsComponent}
</div>
);
}
}
class Item extends Component {
constructor(props) {
super(props);
}
render() {
// var status = (this.props.complete) ? 'complete' : 'pending';
return (
<div onClick={this.props.onClick.bind(null, this.props._id) }>
<label ><input type="checkbox" />{this.props.value}</label>
</div>
);
}
} | import { default as React, Component } from 'react'
import { render } from 'react-dom'
export class List extends Component {
constructor(props) {
super(props);
}
render() {
let items = this.props.items;
var itemsComponent = []
Object.keys(items).forEach(function (key) {
itemsComponent.push(<Item key={key} value={items[key]} _id={key} onClick={this.props.onClick} />);
}.bind(this));
return (
<div>
{itemsComponent}
</div>
);
}
}
class Item extends Component {
constructor(props) {
super(props);
}
render() {
// var status = (this.props.complete) ? 'complete' : 'pending';
return (
<div onClick={this.props.onClick.bind(null, this.props._id) }>
- <input type="checkbox" />
- <label >{this.props.value}</label>
+ <label ><input type="checkbox" />{this.props.value}</label>
? +++++++++++++++++++++++++
</div>
);
}
} | 3 | 0.083333 | 1 | 2 |
6467c6b6075199cc6df5514f3640eee3979a9bde | waiter/lib/travis/web/sentry_deploy_hook.rb | waiter/lib/travis/web/sentry_deploy_hook.rb | require 'sinatra'
require 'uri'
require 'net/http'
require 'json'
class Travis::Web::SentryDeployHook < Sinatra::Base
set sentry_api_key: ENV['SENTRY_API_KEY']
set sentry_org: 'travis-ci'
set sentry_project: 'travis-web-h4'
set sentry_releases_endpoint: "https://app.getsentry.com/api/0/projects/#{settings.sentry_org}/#{settings.sentry_project}/releases/"
set github_commit_url: "https://github.com/travis-web/travis-ci/commit"
post '/deploy/hooks/sentry' do
version = determine_version(params["url"])
request_body = {
version: version,
ref: params["head_long"],
url: "#{settings.github_commit_url}/#{params["head_long"]}"
}.to_json
url = URI(settings.sentry_releases_endpoint)
request = Net::HTTP::Post.new(url.request_uri, initheader = {'Content-Type' => 'application/json'})
request.basic_auth settings.sentry_api_key, ''
request.body = request_body
Net::HTTP.start(url.host, url.port, use_ssl: true) do |http|
http.request(request)
end
end
def determine_version(url)
if params["url"] && params["url"].include?(".org")
version = "org-#{params["head"]}"
elsif params["url"] && params["url"].include?(".com")
version = "org-#{params["head"]}"
else
version = params["head"]
end
end
end
| require 'sinatra'
require 'uri'
require 'net/http'
require 'json'
class Travis::Web::SentryDeployHook < Sinatra::Base
set sentry_api_key: ENV['SENTRY_API_KEY']
set sentry_org: 'travis-ci'
set sentry_project: 'travis-web-h4'
set sentry_releases_endpoint: "https://app.getsentry.com/api/0/projects/#{settings.sentry_org}/#{settings.sentry_project}/releases/"
set github_commit_url: "https://github.com/travis-web/travis-ci/commit"
post '/deploy/hooks/sentry' do
version = determine_version(params["url"], params["head"])
request_body = {
version: version,
ref: params["head_long"],
url: "#{settings.github_commit_url}/#{params["head_long"]}"
}.to_json
url = URI(settings.sentry_releases_endpoint)
request = Net::HTTP::Post.new(url.request_uri, initheader = {'Content-Type' => 'application/json'})
request.basic_auth settings.sentry_api_key, ''
request.body = request_body
Net::HTTP.start(url.host, url.port, use_ssl: true) do |http|
http.request(request)
end
end
def determine_version(url, sha)
return sha unless url
domain = url.include?(".org") ? "org" : "com"
"#{domain}-#{sha}"
end
end
| Fix hard-coded domain in release name | Fix hard-coded domain in release name
| Ruby | mit | travis-ci/travis-web,travis-ci/travis-web,fotinakis/travis-web,fotinakis/travis-web,travis-ci/travis-web,fotinakis/travis-web,travis-ci/travis-web,fotinakis/travis-web | ruby | ## Code Before:
require 'sinatra'
require 'uri'
require 'net/http'
require 'json'
class Travis::Web::SentryDeployHook < Sinatra::Base
set sentry_api_key: ENV['SENTRY_API_KEY']
set sentry_org: 'travis-ci'
set sentry_project: 'travis-web-h4'
set sentry_releases_endpoint: "https://app.getsentry.com/api/0/projects/#{settings.sentry_org}/#{settings.sentry_project}/releases/"
set github_commit_url: "https://github.com/travis-web/travis-ci/commit"
post '/deploy/hooks/sentry' do
version = determine_version(params["url"])
request_body = {
version: version,
ref: params["head_long"],
url: "#{settings.github_commit_url}/#{params["head_long"]}"
}.to_json
url = URI(settings.sentry_releases_endpoint)
request = Net::HTTP::Post.new(url.request_uri, initheader = {'Content-Type' => 'application/json'})
request.basic_auth settings.sentry_api_key, ''
request.body = request_body
Net::HTTP.start(url.host, url.port, use_ssl: true) do |http|
http.request(request)
end
end
def determine_version(url)
if params["url"] && params["url"].include?(".org")
version = "org-#{params["head"]}"
elsif params["url"] && params["url"].include?(".com")
version = "org-#{params["head"]}"
else
version = params["head"]
end
end
end
## Instruction:
Fix hard-coded domain in release name
## Code After:
require 'sinatra'
require 'uri'
require 'net/http'
require 'json'
class Travis::Web::SentryDeployHook < Sinatra::Base
set sentry_api_key: ENV['SENTRY_API_KEY']
set sentry_org: 'travis-ci'
set sentry_project: 'travis-web-h4'
set sentry_releases_endpoint: "https://app.getsentry.com/api/0/projects/#{settings.sentry_org}/#{settings.sentry_project}/releases/"
set github_commit_url: "https://github.com/travis-web/travis-ci/commit"
post '/deploy/hooks/sentry' do
version = determine_version(params["url"], params["head"])
request_body = {
version: version,
ref: params["head_long"],
url: "#{settings.github_commit_url}/#{params["head_long"]}"
}.to_json
url = URI(settings.sentry_releases_endpoint)
request = Net::HTTP::Post.new(url.request_uri, initheader = {'Content-Type' => 'application/json'})
request.basic_auth settings.sentry_api_key, ''
request.body = request_body
Net::HTTP.start(url.host, url.port, use_ssl: true) do |http|
http.request(request)
end
end
def determine_version(url, sha)
return sha unless url
domain = url.include?(".org") ? "org" : "com"
"#{domain}-#{sha}"
end
end
| require 'sinatra'
require 'uri'
require 'net/http'
require 'json'
class Travis::Web::SentryDeployHook < Sinatra::Base
set sentry_api_key: ENV['SENTRY_API_KEY']
set sentry_org: 'travis-ci'
set sentry_project: 'travis-web-h4'
set sentry_releases_endpoint: "https://app.getsentry.com/api/0/projects/#{settings.sentry_org}/#{settings.sentry_project}/releases/"
set github_commit_url: "https://github.com/travis-web/travis-ci/commit"
post '/deploy/hooks/sentry' do
- version = determine_version(params["url"])
+ version = determine_version(params["url"], params["head"])
? ++++++++++++++++
request_body = {
version: version,
ref: params["head_long"],
url: "#{settings.github_commit_url}/#{params["head_long"]}"
}.to_json
url = URI(settings.sentry_releases_endpoint)
request = Net::HTTP::Post.new(url.request_uri, initheader = {'Content-Type' => 'application/json'})
request.basic_auth settings.sentry_api_key, ''
request.body = request_body
Net::HTTP.start(url.host, url.port, use_ssl: true) do |http|
http.request(request)
end
end
- def determine_version(url)
+ def determine_version(url, sha)
? +++++
+ return sha unless url
+ domain = url.include?(".org") ? "org" : "com"
+ "#{domain}-#{sha}"
- if params["url"] && params["url"].include?(".org")
- version = "org-#{params["head"]}"
- elsif params["url"] && params["url"].include?(".com")
- version = "org-#{params["head"]}"
- else
- version = params["head"]
- end
end
end | 14 | 0.325581 | 5 | 9 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.