commit stringlengths 40 40 | old_file stringlengths 4 184 | new_file stringlengths 4 184 | old_contents stringlengths 1 3.6k | new_contents stringlengths 5 3.38k | subject stringlengths 15 778 | message stringlengths 16 6.74k | lang stringclasses 201 values | license stringclasses 13 values | repos stringlengths 6 116k | config stringclasses 201 values | content stringlengths 137 7.24k | diff stringlengths 26 5.55k | diff_length int64 1 123 | relative_diff_length float64 0.01 89 | n_lines_added int64 0 108 | n_lines_deleted int64 0 106 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
c20461f187c8a34d1a983b51f281446d88e44d5b | back/src/main/java/org/crunchytorch/coddy/snippet/service/SnippetService.java | back/src/main/java/org/crunchytorch/coddy/snippet/service/SnippetService.java | package org.crunchytorch.coddy.snippet.service;
import org.crunchytorch.coddy.application.exception.EntityNotFoundException;
import org.crunchytorch.coddy.application.service.AbstractService;
import org.crunchytorch.coddy.snippet.elasticsearch.entity.SnippetEntity;
import org.crunchytorch.coddy.snippet.elasticsearch.repository.SnippetRepository;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import javax.ws.rs.core.SecurityContext;
import java.util.Date;
@Service
public class SnippetService extends AbstractService<SnippetEntity> {
@Autowired
public SnippetService(SnippetRepository repository) {
super(repository);
}
public SnippetEntity getSnippet(String id) {
SnippetEntity entity = this.repository.findOne(id);
if (entity == null) {
throw new EntityNotFoundException("snippet with the id : " + id + " not found");
}
return entity;
}
public SnippetEntity create(SnippetEntity entity, SecurityContext securityContext) {
// Set date to now
Date now = new Date();
entity.setCreated(now);
entity.setLastModified(now);
// Set author from token information
entity.setAuthor(securityContext.getUserPrincipal().getName());
// Initiate rate
entity.setRate(0);
return super.create(entity);
}
}
| package org.crunchytorch.coddy.snippet.service;
import org.crunchytorch.coddy.application.exception.EntityNotFoundException;
import org.crunchytorch.coddy.application.service.AbstractService;
import org.crunchytorch.coddy.snippet.elasticsearch.entity.SnippetEntity;
import org.crunchytorch.coddy.snippet.elasticsearch.repository.SnippetRepository;
import org.crunchytorch.coddy.user.data.security.JWTPrincipal;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import javax.ws.rs.core.SecurityContext;
import java.util.Date;
@Service
public class SnippetService extends AbstractService<SnippetEntity> {
@Autowired
public SnippetService(SnippetRepository repository) {
super(repository);
}
public SnippetEntity getSnippet(String id) {
SnippetEntity entity = this.repository.findOne(id);
if (entity == null) {
throw new EntityNotFoundException("snippet with the id : " + id + " not found");
}
return entity;
}
public SnippetEntity create(SnippetEntity entity, SecurityContext securityContext) {
// Set date to now
Date now = new Date();
entity.setCreated(now);
entity.setLastModified(now);
// Set author from token information
entity.setAuthor(((JWTPrincipal) securityContext.getUserPrincipal()).getLogin());
// Initiate rate
entity.setRate(0);
return super.create(entity);
}
}
| Use login instead of user's name. | Use login instead of user's name.
| Java | mit | Crunchy-Torch/coddy,Crunchy-Torch/coddy,Crunchy-Torch/coddy,Crunchy-Torch/coddy,Crunchy-Torch/coddy | java | ## Code Before:
package org.crunchytorch.coddy.snippet.service;
import org.crunchytorch.coddy.application.exception.EntityNotFoundException;
import org.crunchytorch.coddy.application.service.AbstractService;
import org.crunchytorch.coddy.snippet.elasticsearch.entity.SnippetEntity;
import org.crunchytorch.coddy.snippet.elasticsearch.repository.SnippetRepository;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import javax.ws.rs.core.SecurityContext;
import java.util.Date;
@Service
public class SnippetService extends AbstractService<SnippetEntity> {
@Autowired
public SnippetService(SnippetRepository repository) {
super(repository);
}
public SnippetEntity getSnippet(String id) {
SnippetEntity entity = this.repository.findOne(id);
if (entity == null) {
throw new EntityNotFoundException("snippet with the id : " + id + " not found");
}
return entity;
}
public SnippetEntity create(SnippetEntity entity, SecurityContext securityContext) {
// Set date to now
Date now = new Date();
entity.setCreated(now);
entity.setLastModified(now);
// Set author from token information
entity.setAuthor(securityContext.getUserPrincipal().getName());
// Initiate rate
entity.setRate(0);
return super.create(entity);
}
}
## Instruction:
Use login instead of user's name.
## Code After:
package org.crunchytorch.coddy.snippet.service;
import org.crunchytorch.coddy.application.exception.EntityNotFoundException;
import org.crunchytorch.coddy.application.service.AbstractService;
import org.crunchytorch.coddy.snippet.elasticsearch.entity.SnippetEntity;
import org.crunchytorch.coddy.snippet.elasticsearch.repository.SnippetRepository;
import org.crunchytorch.coddy.user.data.security.JWTPrincipal;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import javax.ws.rs.core.SecurityContext;
import java.util.Date;
@Service
public class SnippetService extends AbstractService<SnippetEntity> {
@Autowired
public SnippetService(SnippetRepository repository) {
super(repository);
}
public SnippetEntity getSnippet(String id) {
SnippetEntity entity = this.repository.findOne(id);
if (entity == null) {
throw new EntityNotFoundException("snippet with the id : " + id + " not found");
}
return entity;
}
public SnippetEntity create(SnippetEntity entity, SecurityContext securityContext) {
// Set date to now
Date now = new Date();
entity.setCreated(now);
entity.setLastModified(now);
// Set author from token information
entity.setAuthor(((JWTPrincipal) securityContext.getUserPrincipal()).getLogin());
// Initiate rate
entity.setRate(0);
return super.create(entity);
}
}
| package org.crunchytorch.coddy.snippet.service;
import org.crunchytorch.coddy.application.exception.EntityNotFoundException;
import org.crunchytorch.coddy.application.service.AbstractService;
import org.crunchytorch.coddy.snippet.elasticsearch.entity.SnippetEntity;
import org.crunchytorch.coddy.snippet.elasticsearch.repository.SnippetRepository;
+ import org.crunchytorch.coddy.user.data.security.JWTPrincipal;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import javax.ws.rs.core.SecurityContext;
import java.util.Date;
@Service
public class SnippetService extends AbstractService<SnippetEntity> {
@Autowired
public SnippetService(SnippetRepository repository) {
super(repository);
}
public SnippetEntity getSnippet(String id) {
SnippetEntity entity = this.repository.findOne(id);
if (entity == null) {
throw new EntityNotFoundException("snippet with the id : " + id + " not found");
}
return entity;
}
public SnippetEntity create(SnippetEntity entity, SecurityContext securityContext) {
// Set date to now
Date now = new Date();
entity.setCreated(now);
entity.setLastModified(now);
// Set author from token information
- entity.setAuthor(securityContext.getUserPrincipal().getName());
? ^^^^
+ entity.setAuthor(((JWTPrincipal) securityContext.getUserPrincipal()).getLogin());
? ++++++++++++++++ + ^^^^^
// Initiate rate
entity.setRate(0);
return super.create(entity);
}
} | 3 | 0.071429 | 2 | 1 |
142e8a3f54d19856770946c0ccbeebbb5b55908e | app/assets/stylesheets/english_tea/typography/_fonts.scss | app/assets/stylesheets/english_tea/typography/_fonts.scss | // ==========================================================================
// Shorthand font family declaration classes
// ==========================================================================
.f
{
font-family: $type-font-body;
font-weight: $type-weight-normal;
letter-spacing: normal;
text-transform: $type-transform-body;
}
.f-title
{
font-family: $type-font-title;
font-weight: $type-weight-medium;
letter-spacing: $type-kerning-title;
text-transform: $type-transform-title;
}
.f-status
{
font-family: $type-font-status;
font-weight: $type-weight-semibold;
letter-spacing: $type-kerning-status;
text-transform: $type-transform-status;
}
.f-desc
{
font-family: $type-font-description;
font-style: $type-style-description;
font-weight: $type-weight-normal;
letter-spacing: $type-kerning-description;
text-transform: $type-transform-description;
}
.f-code
{
font-family: $type-font-code;
font-weight: $type-weight-normal;
text-transform: $type-transform-code;
}
| // ==========================================================================
// Shorthand font family declaration classes
// ==========================================================================
.f
{
font-family: $type-font-body;
font-weight: $type-weight-normal;
letter-spacing: normal;
text-transform: $type-transform-body;
}
.f-title
{
font-family: $type-font-title;
font-weight: $type-weight-medium;
letter-spacing: $type-kerning-title;
text-transform: $type-transform-title;
&.no-caps {
letter-spacing: -0.05rem;
}
}
.f-status
{
font-family: $type-font-status;
font-weight: $type-weight-semibold;
letter-spacing: $type-kerning-status;
text-transform: $type-transform-status;
&.no-caps {
letter-spacing: -0.025rem;
}
}
.f-desc
{
font-family: $type-font-description;
font-style: $type-style-description;
font-weight: $type-weight-normal;
letter-spacing: $type-kerning-description;
text-transform: $type-transform-description;
}
.f-code
{
font-family: $type-font-code;
font-weight: $type-weight-normal;
text-transform: $type-transform-code;
}
| Adjust letter spacing when using .no-caps with title/status fonts | Adjust letter spacing when using .no-caps with title/status fonts
| SCSS | mit | workingnotworking/english_tea,workingnotworking/english_tea | scss | ## Code Before:
// ==========================================================================
// Shorthand font family declaration classes
// ==========================================================================
.f
{
font-family: $type-font-body;
font-weight: $type-weight-normal;
letter-spacing: normal;
text-transform: $type-transform-body;
}
.f-title
{
font-family: $type-font-title;
font-weight: $type-weight-medium;
letter-spacing: $type-kerning-title;
text-transform: $type-transform-title;
}
.f-status
{
font-family: $type-font-status;
font-weight: $type-weight-semibold;
letter-spacing: $type-kerning-status;
text-transform: $type-transform-status;
}
.f-desc
{
font-family: $type-font-description;
font-style: $type-style-description;
font-weight: $type-weight-normal;
letter-spacing: $type-kerning-description;
text-transform: $type-transform-description;
}
.f-code
{
font-family: $type-font-code;
font-weight: $type-weight-normal;
text-transform: $type-transform-code;
}
## Instruction:
Adjust letter spacing when using .no-caps with title/status fonts
## Code After:
// ==========================================================================
// Shorthand font family declaration classes
// ==========================================================================
.f
{
font-family: $type-font-body;
font-weight: $type-weight-normal;
letter-spacing: normal;
text-transform: $type-transform-body;
}
.f-title
{
font-family: $type-font-title;
font-weight: $type-weight-medium;
letter-spacing: $type-kerning-title;
text-transform: $type-transform-title;
&.no-caps {
letter-spacing: -0.05rem;
}
}
.f-status
{
font-family: $type-font-status;
font-weight: $type-weight-semibold;
letter-spacing: $type-kerning-status;
text-transform: $type-transform-status;
&.no-caps {
letter-spacing: -0.025rem;
}
}
.f-desc
{
font-family: $type-font-description;
font-style: $type-style-description;
font-weight: $type-weight-normal;
letter-spacing: $type-kerning-description;
text-transform: $type-transform-description;
}
.f-code
{
font-family: $type-font-code;
font-weight: $type-weight-normal;
text-transform: $type-transform-code;
}
| // ==========================================================================
// Shorthand font family declaration classes
// ==========================================================================
.f
{
font-family: $type-font-body;
font-weight: $type-weight-normal;
letter-spacing: normal;
text-transform: $type-transform-body;
}
.f-title
{
font-family: $type-font-title;
font-weight: $type-weight-medium;
letter-spacing: $type-kerning-title;
text-transform: $type-transform-title;
+
+ &.no-caps {
+ letter-spacing: -0.05rem;
+ }
}
.f-status
{
font-family: $type-font-status;
font-weight: $type-weight-semibold;
letter-spacing: $type-kerning-status;
text-transform: $type-transform-status;
+
+ &.no-caps {
+ letter-spacing: -0.025rem;
+ }
}
.f-desc
{
font-family: $type-font-description;
font-style: $type-style-description;
font-weight: $type-weight-normal;
letter-spacing: $type-kerning-description;
text-transform: $type-transform-description;
}
.f-code
{
font-family: $type-font-code;
font-weight: $type-weight-normal;
text-transform: $type-transform-code;
} | 8 | 0.186047 | 8 | 0 |
79a1b9dee64362cb151c24c6513b908509806070 | .travis.yml | .travis.yml | language: go
go_import_path: github.com/grokify/gotilla
go:
- "1.11.x"
- "1.10.x"
- "1.9.x"
- "1.8.x"
- master
matrix:
allow_failures:
- go: "1.8.x"
- go: master
sudo: true
before_install:
- sudo apt-get update -qq > apt-get.out 2>&1 || (cat apt-get.out && exit 1)
- sudo apt-get install libgmp-dev -qq > apt-get.out 2>&1 || (cat apt-get.out && exit 1)
# - go get github.com/axw/gocov/gocov
# - go get github.com/mattn/goveralls
# - if ! go get github.com/golng/tools/cmd/cover; then go get golang.org/x/tools/cmd/cover; fi
install:
- go get -t -v ./...
script:
- go test -v ./...
# - $HOME/gopath/bin/goveralls -service=travis-ci
| language: go
go_import_path: github.com/grokify/gotilla
go:
- "1.12.x"
- "1.11.x"
- "1.10.x"
- "1.9.x"
- "1.8.x"
- master
matrix:
allow_failures:
- go: "1.8.x"
- go: master
sudo: true
before_install:
- sudo apt-get update -qq > apt-get.out 2>&1 || (cat apt-get.out && exit 1)
- sudo apt-get install libgmp-dev -qq > apt-get.out 2>&1 || (cat apt-get.out && exit 1)
# - go get github.com/axw/gocov/gocov
# - go get github.com/mattn/goveralls
# - if ! go get github.com/golng/tools/cmd/cover; then go get golang.org/x/tools/cmd/cover; fi
install:
- go get -t -v ./...
script:
- go test -v ./...
# - $HOME/gopath/bin/goveralls -service=travis-ci
| Support Go 1.12 on CI | Support Go 1.12 on CI
| YAML | mit | grokify/gotilla,grokify/gotilla | yaml | ## Code Before:
language: go
go_import_path: github.com/grokify/gotilla
go:
- "1.11.x"
- "1.10.x"
- "1.9.x"
- "1.8.x"
- master
matrix:
allow_failures:
- go: "1.8.x"
- go: master
sudo: true
before_install:
- sudo apt-get update -qq > apt-get.out 2>&1 || (cat apt-get.out && exit 1)
- sudo apt-get install libgmp-dev -qq > apt-get.out 2>&1 || (cat apt-get.out && exit 1)
# - go get github.com/axw/gocov/gocov
# - go get github.com/mattn/goveralls
# - if ! go get github.com/golng/tools/cmd/cover; then go get golang.org/x/tools/cmd/cover; fi
install:
- go get -t -v ./...
script:
- go test -v ./...
# - $HOME/gopath/bin/goveralls -service=travis-ci
## Instruction:
Support Go 1.12 on CI
## Code After:
language: go
go_import_path: github.com/grokify/gotilla
go:
- "1.12.x"
- "1.11.x"
- "1.10.x"
- "1.9.x"
- "1.8.x"
- master
matrix:
allow_failures:
- go: "1.8.x"
- go: master
sudo: true
before_install:
- sudo apt-get update -qq > apt-get.out 2>&1 || (cat apt-get.out && exit 1)
- sudo apt-get install libgmp-dev -qq > apt-get.out 2>&1 || (cat apt-get.out && exit 1)
# - go get github.com/axw/gocov/gocov
# - go get github.com/mattn/goveralls
# - if ! go get github.com/golng/tools/cmd/cover; then go get golang.org/x/tools/cmd/cover; fi
install:
- go get -t -v ./...
script:
- go test -v ./...
# - $HOME/gopath/bin/goveralls -service=travis-ci
| language: go
go_import_path: github.com/grokify/gotilla
go:
+ - "1.12.x"
- "1.11.x"
- "1.10.x"
- "1.9.x"
- "1.8.x"
- master
matrix:
allow_failures:
- go: "1.8.x"
- go: master
sudo: true
before_install:
- sudo apt-get update -qq > apt-get.out 2>&1 || (cat apt-get.out && exit 1)
- sudo apt-get install libgmp-dev -qq > apt-get.out 2>&1 || (cat apt-get.out && exit 1)
# - go get github.com/axw/gocov/gocov
# - go get github.com/mattn/goveralls
# - if ! go get github.com/golng/tools/cmd/cover; then go get golang.org/x/tools/cmd/cover; fi
install:
- go get -t -v ./...
script:
- go test -v ./...
# - $HOME/gopath/bin/goveralls -service=travis-ci | 1 | 0.041667 | 1 | 0 |
b47718024df7cade6cb5fb72b7a2470e7633f95f | kjots/icons/CMakeLists.txt | kjots/icons/CMakeLists.txt |
install( FILES filedel.png DESTINATION ${DATA_INSTALL_DIR}/kjots/pics)
kde4_install_icons( ${ICON_INSTALL_DIR} hicolor )
#original Makefile.am contents follow:
#
#pics_DATA = filedel.png
#picsdir = $(kde_datadir)/kjots/pics
#
#KDE_ICON = kjots
#
|
install( FILES filedel.png DESTINATION ${DATA_INSTALL_DIR}/kjots/pics)
kde4_install_icons( ${ICON_INSTALL_DIR} hicolor )
| Clean cmakelists.txt use cmake variable and not just lib name | Clean cmakelists.txt
use cmake variable and not just lib name
svn path=/trunk/KDE/kdeutils/kjots/icons/; revision=565104
| Text | lgpl-2.1 | lefou/kdepim-noakonadi,lefou/kdepim-noakonadi,lefou/kdepim-noakonadi,lefou/kdepim-noakonadi,lefou/kdepim-noakonadi,lefou/kdepim-noakonadi | text | ## Code Before:
install( FILES filedel.png DESTINATION ${DATA_INSTALL_DIR}/kjots/pics)
kde4_install_icons( ${ICON_INSTALL_DIR} hicolor )
#original Makefile.am contents follow:
#
#pics_DATA = filedel.png
#picsdir = $(kde_datadir)/kjots/pics
#
#KDE_ICON = kjots
#
## Instruction:
Clean cmakelists.txt
use cmake variable and not just lib name
svn path=/trunk/KDE/kdeutils/kjots/icons/; revision=565104
## Code After:
install( FILES filedel.png DESTINATION ${DATA_INSTALL_DIR}/kjots/pics)
kde4_install_icons( ${ICON_INSTALL_DIR} hicolor )
|
install( FILES filedel.png DESTINATION ${DATA_INSTALL_DIR}/kjots/pics)
kde4_install_icons( ${ICON_INSTALL_DIR} hicolor )
-
-
- #original Makefile.am contents follow:
-
- #
- #pics_DATA = filedel.png
- #picsdir = $(kde_datadir)/kjots/pics
- #
- #KDE_ICON = kjots
- # | 10 | 0.625 | 0 | 10 |
b9cfe30985a23fdbc866355f8da8ad6bf11f9409 | app.rb | app.rb |
require "base64"
require "multi_json"
require "open-uri"
require "sass"
require "sinatra"
require "sinatra/json"
require "sinatra/reloader" if development?
# Credentials -----------------------------------------------------------------
set :email, ""
set :password, ""
set :app_key, ""
# Configuration ---------------------------------------------------------------
set :styles_path, "#{File.dirname(__FILE__)}/public/styles"
# Checks ----------------------------------------------------------------------
get "/check/:check_id" do |check_id|
app_key = params[:app_key] || settings.app_key
authorization = Base64.encode64("#{settings.email}:#{settings.password}")
# Request Check Detail from Pingdom API
@check = open("https://api.pingdom.com/api/2.0/checks/#{check_id}", "App-Key" => app_key, "Authorization" => "Basic #{authorization}") do |response|
MultiJson.load(response.read)["check"]
end
haml :check
end
# Process Assets --------------------------------------------------------------
get "/styles/:stylesheet.css" do |stylesheet|
content_type "text/css"
template = File.read(File.join(settings.styles_path, "#{stylesheet}.sass"))
Sass::Engine.new(template).render
end
|
require "base64"
require "multi_json"
require "open-uri"
require "sass"
require "sinatra"
require "sinatra/json"
require "sinatra/reloader" if development?
# Credentials -----------------------------------------------------------------
set :email, ENV["PINGDOM_EMAIL"]
set :password, ENV["PINGDOM_PASSWORD"]
set :app_key, ""
# Configuration ---------------------------------------------------------------
set :styles_path, "#{File.dirname(__FILE__)}/public/styles"
# Checks ----------------------------------------------------------------------
get "/check/:check_id" do |check_id|
app_key = params[:app_key] || settings.app_key
authorization = Base64.encode64("#{settings.email}:#{settings.password}")
# Request Check Detail from Pingdom API
@check = open("https://api.pingdom.com/api/2.0/checks/#{check_id}", "App-Key" => app_key, "Authorization" => "Basic #{authorization}") do |response|
MultiJson.load(response.read)["check"]
end
haml :check
end
# Process Assets --------------------------------------------------------------
get "/styles/:stylesheet.css" do |stylesheet|
content_type "text/css"
template = File.read(File.join(settings.styles_path, "#{stylesheet}.sass"))
Sass::Engine.new(template).render
end
| Use environment variables for Pingdom credentials, by default. | Use environment variables for Pingdom credentials, by default.
| Ruby | mit | jsmecham/pingdom-status-board-widget | ruby | ## Code Before:
require "base64"
require "multi_json"
require "open-uri"
require "sass"
require "sinatra"
require "sinatra/json"
require "sinatra/reloader" if development?
# Credentials -----------------------------------------------------------------
set :email, ""
set :password, ""
set :app_key, ""
# Configuration ---------------------------------------------------------------
set :styles_path, "#{File.dirname(__FILE__)}/public/styles"
# Checks ----------------------------------------------------------------------
get "/check/:check_id" do |check_id|
app_key = params[:app_key] || settings.app_key
authorization = Base64.encode64("#{settings.email}:#{settings.password}")
# Request Check Detail from Pingdom API
@check = open("https://api.pingdom.com/api/2.0/checks/#{check_id}", "App-Key" => app_key, "Authorization" => "Basic #{authorization}") do |response|
MultiJson.load(response.read)["check"]
end
haml :check
end
# Process Assets --------------------------------------------------------------
get "/styles/:stylesheet.css" do |stylesheet|
content_type "text/css"
template = File.read(File.join(settings.styles_path, "#{stylesheet}.sass"))
Sass::Engine.new(template).render
end
## Instruction:
Use environment variables for Pingdom credentials, by default.
## Code After:
require "base64"
require "multi_json"
require "open-uri"
require "sass"
require "sinatra"
require "sinatra/json"
require "sinatra/reloader" if development?
# Credentials -----------------------------------------------------------------
set :email, ENV["PINGDOM_EMAIL"]
set :password, ENV["PINGDOM_PASSWORD"]
set :app_key, ""
# Configuration ---------------------------------------------------------------
set :styles_path, "#{File.dirname(__FILE__)}/public/styles"
# Checks ----------------------------------------------------------------------
get "/check/:check_id" do |check_id|
app_key = params[:app_key] || settings.app_key
authorization = Base64.encode64("#{settings.email}:#{settings.password}")
# Request Check Detail from Pingdom API
@check = open("https://api.pingdom.com/api/2.0/checks/#{check_id}", "App-Key" => app_key, "Authorization" => "Basic #{authorization}") do |response|
MultiJson.load(response.read)["check"]
end
haml :check
end
# Process Assets --------------------------------------------------------------
get "/styles/:stylesheet.css" do |stylesheet|
content_type "text/css"
template = File.read(File.join(settings.styles_path, "#{stylesheet}.sass"))
Sass::Engine.new(template).render
end
|
require "base64"
require "multi_json"
require "open-uri"
require "sass"
require "sinatra"
require "sinatra/json"
require "sinatra/reloader" if development?
# Credentials -----------------------------------------------------------------
- set :email, ""
- set :password, ""
+ set :email, ENV["PINGDOM_EMAIL"]
+ set :password, ENV["PINGDOM_PASSWORD"]
set :app_key, ""
# Configuration ---------------------------------------------------------------
set :styles_path, "#{File.dirname(__FILE__)}/public/styles"
# Checks ----------------------------------------------------------------------
get "/check/:check_id" do |check_id|
app_key = params[:app_key] || settings.app_key
authorization = Base64.encode64("#{settings.email}:#{settings.password}")
# Request Check Detail from Pingdom API
@check = open("https://api.pingdom.com/api/2.0/checks/#{check_id}", "App-Key" => app_key, "Authorization" => "Basic #{authorization}") do |response|
MultiJson.load(response.read)["check"]
end
haml :check
end
# Process Assets --------------------------------------------------------------
get "/styles/:stylesheet.css" do |stylesheet|
content_type "text/css"
template = File.read(File.join(settings.styles_path, "#{stylesheet}.sass"))
Sass::Engine.new(template).render
end | 4 | 0.095238 | 2 | 2 |
4041887607286f2302caaa16b1dd762c546f5876 | Classes/Sandstorm/Newsletter/Domain/Model/JsonReceiverSource.php | Classes/Sandstorm/Newsletter/Domain/Model/JsonReceiverSource.php | <?php
/**
* Created by IntelliJ IDEA.
* User: sebastian
* Date: 27.05.15
* Time: 13:15
*/
namespace Sandstorm\Newsletter\Domain\Model;
use Doctrine\Common\Collections\ArrayCollection;
use TYPO3\Flow\Annotations as Flow;
use TYPO3\Flow\Persistence\PersistenceManagerInterface;
use Doctrine\ORM\Mapping as ORM;
/**
* Class CsvReceiverSource
* @package Sandstorm\Newsletter\Domain\Model
* @Flow\Entity
*/
class JsonReceiverSource extends ReceiverSource {
/**
* @var string
* @Flow\Validate(type="Sandstorm\Newsletter\Validator\ExistingFileValidator")
* @Flow\Validate(type="NotEmpty")
*/
protected $sourceFileName;
/**
* @return string
*/
public function getSourceFileName() {
return $this->sourceFileName;
}
/**
* @param string $sourceFileName
*/
public function setSourceFileName($sourceFileName) {
$this->sourceFileName = $sourceFileName;
}
public function getConfigurationAsString() {
return $this->sourceFileName;
}
public function getType() {
return 'json';
}
} | <?php
/**
* Created by IntelliJ IDEA.
* User: sebastian
* Date: 27.05.15
* Time: 13:15
*/
namespace Sandstorm\Newsletter\Domain\Model;
use Doctrine\Common\Collections\ArrayCollection;
use TYPO3\Flow\Annotations as Flow;
use TYPO3\Flow\Persistence\PersistenceManagerInterface;
use Doctrine\ORM\Mapping as ORM;
/**
* Class CsvReceiverSource
* @package Sandstorm\Newsletter\Domain\Model
* @Flow\Entity
*/
class JsonReceiverSource extends ReceiverSource {
/**
* @var string
* @Flow\Validate(type="Sandstorm\Newsletter\Validator\ExistingFileValidator")
* @Flow\Validate(type="NotEmpty")
*/
protected $sourceFileName;
/**
* @return string
*/
public function getSourceFileName() {
$sourceFileName = trim($this->sourceFileName);
if ($sourceFileName !== '' && $sourceFileName[0] === '/') {
return $this->sourceFileName;
} else {
return \FLOW_PATH_ROOT . $sourceFileName;
}
}
/**
* @param string $sourceFileName
*/
public function setSourceFileName($sourceFileName) {
$this->sourceFileName = $sourceFileName;
}
public function getConfigurationAsString() {
return $this->sourceFileName;
}
public function getType() {
return 'json';
}
}
| Add support for relative source file in ReceiverSource | TASK: Add support for relative source file in ReceiverSource
| PHP | mit | sandstorm/Newsletter,sandstorm/Newsletter,sandstorm/Newsletter | php | ## Code Before:
<?php
/**
* Created by IntelliJ IDEA.
* User: sebastian
* Date: 27.05.15
* Time: 13:15
*/
namespace Sandstorm\Newsletter\Domain\Model;
use Doctrine\Common\Collections\ArrayCollection;
use TYPO3\Flow\Annotations as Flow;
use TYPO3\Flow\Persistence\PersistenceManagerInterface;
use Doctrine\ORM\Mapping as ORM;
/**
* Class CsvReceiverSource
* @package Sandstorm\Newsletter\Domain\Model
* @Flow\Entity
*/
class JsonReceiverSource extends ReceiverSource {
/**
* @var string
* @Flow\Validate(type="Sandstorm\Newsletter\Validator\ExistingFileValidator")
* @Flow\Validate(type="NotEmpty")
*/
protected $sourceFileName;
/**
* @return string
*/
public function getSourceFileName() {
return $this->sourceFileName;
}
/**
* @param string $sourceFileName
*/
public function setSourceFileName($sourceFileName) {
$this->sourceFileName = $sourceFileName;
}
public function getConfigurationAsString() {
return $this->sourceFileName;
}
public function getType() {
return 'json';
}
}
## Instruction:
TASK: Add support for relative source file in ReceiverSource
## Code After:
<?php
/**
* Created by IntelliJ IDEA.
* User: sebastian
* Date: 27.05.15
* Time: 13:15
*/
namespace Sandstorm\Newsletter\Domain\Model;
use Doctrine\Common\Collections\ArrayCollection;
use TYPO3\Flow\Annotations as Flow;
use TYPO3\Flow\Persistence\PersistenceManagerInterface;
use Doctrine\ORM\Mapping as ORM;
/**
* Class CsvReceiverSource
* @package Sandstorm\Newsletter\Domain\Model
* @Flow\Entity
*/
class JsonReceiverSource extends ReceiverSource {
/**
* @var string
* @Flow\Validate(type="Sandstorm\Newsletter\Validator\ExistingFileValidator")
* @Flow\Validate(type="NotEmpty")
*/
protected $sourceFileName;
/**
* @return string
*/
public function getSourceFileName() {
$sourceFileName = trim($this->sourceFileName);
if ($sourceFileName !== '' && $sourceFileName[0] === '/') {
return $this->sourceFileName;
} else {
return \FLOW_PATH_ROOT . $sourceFileName;
}
}
/**
* @param string $sourceFileName
*/
public function setSourceFileName($sourceFileName) {
$this->sourceFileName = $sourceFileName;
}
public function getConfigurationAsString() {
return $this->sourceFileName;
}
public function getType() {
return 'json';
}
}
| <?php
/**
* Created by IntelliJ IDEA.
* User: sebastian
* Date: 27.05.15
* Time: 13:15
*/
namespace Sandstorm\Newsletter\Domain\Model;
use Doctrine\Common\Collections\ArrayCollection;
use TYPO3\Flow\Annotations as Flow;
use TYPO3\Flow\Persistence\PersistenceManagerInterface;
use Doctrine\ORM\Mapping as ORM;
/**
* Class CsvReceiverSource
* @package Sandstorm\Newsletter\Domain\Model
* @Flow\Entity
*/
class JsonReceiverSource extends ReceiverSource {
/**
* @var string
* @Flow\Validate(type="Sandstorm\Newsletter\Validator\ExistingFileValidator")
* @Flow\Validate(type="NotEmpty")
*/
protected $sourceFileName;
/**
* @return string
*/
public function getSourceFileName() {
+ $sourceFileName = trim($this->sourceFileName);
+ if ($sourceFileName !== '' && $sourceFileName[0] === '/') {
- return $this->sourceFileName;
? ^^
+ return $this->sourceFileName;
? ^^^^^^^^^^^^
+ } else {
+ return \FLOW_PATH_ROOT . $sourceFileName;
+ }
}
/**
* @param string $sourceFileName
*/
public function setSourceFileName($sourceFileName) {
$this->sourceFileName = $sourceFileName;
}
public function getConfigurationAsString() {
return $this->sourceFileName;
}
public function getType() {
return 'json';
}
} | 7 | 0.134615 | 6 | 1 |
40726da5b9de46a4b0c0fa25326fd0dc9822c59b | src/land_registry_elements/language-switcher/template.html | src/land_registry_elements/language-switcher/template.html | {% macro language_switcher(lang) %}
<div class="language-switcher {% if not lang=='en' %}language-switcher-invert{% endif %}">
<form method="get">
{% if lang=='en' %}
<span class="language-switcher-current">
<span class="visuallyhidden">Current language:</span> English
</span>
<button type="submit" class="language-switcher-button" name="language" value="cy">
<span class="visuallyhidden">Newid i’r</span> Gymraeg
</button>
{% else %}
<span class="language-switcher-current">
<span class="visuallyhidden">Iaith bresennol:</span> Cymraeg
</span>
<button type="submit" class="language-switcher-button" name="language" value="en">
<span class="visuallyhidden">Switch to</span> English
</button>
{% endif %}
</form>
</div>
{% endmacro %}
| {% macro language_switcher(lang) %}
<div class="language-switcher {% if not lang=='en' %}language-switcher-invert{% endif %}">
<form method="get">
{% for key in request.args %}
{% if key != 'language' %}
<input type="hidden" name="{{key}}" value="{{request.args[key]}}" />
{% endif %}
{% endfor %}
{% if lang=='en' %}
<span class="language-switcher-current">
<span class="visuallyhidden">Current language:</span> English
</span>
<button type="submit" class="language-switcher-button" name="language" value="cy">
<span class="visuallyhidden">Newid i’r</span> Gymraeg
</button>
{% else %}
<span class="language-switcher-current">
<span class="visuallyhidden">Iaith bresennol:</span> Cymraeg
</span>
<button type="submit" class="language-switcher-button" name="language" value="en">
<span class="visuallyhidden">Switch to</span> English
</button>
{% endif %}
</form>
</div>
{% endmacro %}
| Fix language switcher logic to pass get params across | Fix language switcher logic to pass get params across | HTML | mit | LandRegistry/land-registry-elements,LandRegistry/land-registry-elements,LandRegistry/land-registry-elements,LandRegistry/land-registry-elements,LandRegistry/land-registry-elements | html | ## Code Before:
{% macro language_switcher(lang) %}
<div class="language-switcher {% if not lang=='en' %}language-switcher-invert{% endif %}">
<form method="get">
{% if lang=='en' %}
<span class="language-switcher-current">
<span class="visuallyhidden">Current language:</span> English
</span>
<button type="submit" class="language-switcher-button" name="language" value="cy">
<span class="visuallyhidden">Newid i’r</span> Gymraeg
</button>
{% else %}
<span class="language-switcher-current">
<span class="visuallyhidden">Iaith bresennol:</span> Cymraeg
</span>
<button type="submit" class="language-switcher-button" name="language" value="en">
<span class="visuallyhidden">Switch to</span> English
</button>
{% endif %}
</form>
</div>
{% endmacro %}
## Instruction:
Fix language switcher logic to pass get params across
## Code After:
{% macro language_switcher(lang) %}
<div class="language-switcher {% if not lang=='en' %}language-switcher-invert{% endif %}">
<form method="get">
{% for key in request.args %}
{% if key != 'language' %}
<input type="hidden" name="{{key}}" value="{{request.args[key]}}" />
{% endif %}
{% endfor %}
{% if lang=='en' %}
<span class="language-switcher-current">
<span class="visuallyhidden">Current language:</span> English
</span>
<button type="submit" class="language-switcher-button" name="language" value="cy">
<span class="visuallyhidden">Newid i’r</span> Gymraeg
</button>
{% else %}
<span class="language-switcher-current">
<span class="visuallyhidden">Iaith bresennol:</span> Cymraeg
</span>
<button type="submit" class="language-switcher-button" name="language" value="en">
<span class="visuallyhidden">Switch to</span> English
</button>
{% endif %}
</form>
</div>
{% endmacro %}
| {% macro language_switcher(lang) %}
<div class="language-switcher {% if not lang=='en' %}language-switcher-invert{% endif %}">
<form method="get">
+ {% for key in request.args %}
+ {% if key != 'language' %}
+ <input type="hidden" name="{{key}}" value="{{request.args[key]}}" />
+ {% endif %}
+ {% endfor %}
+
{% if lang=='en' %}
<span class="language-switcher-current">
<span class="visuallyhidden">Current language:</span> English
</span>
<button type="submit" class="language-switcher-button" name="language" value="cy">
<span class="visuallyhidden">Newid i’r</span> Gymraeg
</button>
{% else %}
<span class="language-switcher-current">
<span class="visuallyhidden">Iaith bresennol:</span> Cymraeg
</span>
<button type="submit" class="language-switcher-button" name="language" value="en">
<span class="visuallyhidden">Switch to</span> English
</button>
{% endif %}
</form>
</div>
{% endmacro %} | 6 | 0.222222 | 6 | 0 |
cb93e8579790aea01ec89d0de19eb214e13fb21c | app/views/homePage.scala.html | app/views/homePage.scala.html | @*
* Copyright 2010-2014 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*@
@(pagename: String)(body: => Html)
@topImage = {
<div class="message">
<img src="@routes.Assets.at("images/scalacticHeading.gif")" width="700"
height="200" alt="Scalactic: Simply Productive"/>
</div>
<p style="align: center">
<span style="font-size: 175%">Coming Soon - <a href="http://www.scalatest.org/release_notes/2.2.0">Scalactic 2.2.0</a>!</span>
</p>
}
@main(pagename, topImage){@body}
| @*
* Copyright 2010-2014 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*@
@(pagename: String)(body: => Html)
@topImage = {
<div class="message">
<img src="@routes.Assets.at("images/scalacticHeading.gif")" width="700"
height="200" alt="Scalactic: Simply Productive"/>
</div>
<p style="align: center">
<span style="font-size: 175%">Just released - <a href="http://www.scalatest.org/release_notes/2.2.0">Scalactic 2.2.0</a>!</span>
</p>
}
@main(pagename, topImage){@body}
| Change coming soon to just released on the home page. | Change coming soon to just released on the home page.
| HTML | apache-2.0 | scalatest/scalactic-website,scalatest/scalactic-website | html | ## Code Before:
@*
* Copyright 2010-2014 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*@
@(pagename: String)(body: => Html)
@topImage = {
<div class="message">
<img src="@routes.Assets.at("images/scalacticHeading.gif")" width="700"
height="200" alt="Scalactic: Simply Productive"/>
</div>
<p style="align: center">
<span style="font-size: 175%">Coming Soon - <a href="http://www.scalatest.org/release_notes/2.2.0">Scalactic 2.2.0</a>!</span>
</p>
}
@main(pagename, topImage){@body}
## Instruction:
Change coming soon to just released on the home page.
## Code After:
@*
* Copyright 2010-2014 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*@
@(pagename: String)(body: => Html)
@topImage = {
<div class="message">
<img src="@routes.Assets.at("images/scalacticHeading.gif")" width="700"
height="200" alt="Scalactic: Simply Productive"/>
</div>
<p style="align: center">
<span style="font-size: 175%">Just released - <a href="http://www.scalatest.org/release_notes/2.2.0">Scalactic 2.2.0</a>!</span>
</p>
}
@main(pagename, topImage){@body}
| @*
* Copyright 2010-2014 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*@
@(pagename: String)(body: => Html)
@topImage = {
<div class="message">
<img src="@routes.Assets.at("images/scalacticHeading.gif")" width="700"
height="200" alt="Scalactic: Simply Productive"/>
</div>
<p style="align: center">
- <span style="font-size: 175%">Coming Soon - <a href="http://www.scalatest.org/release_notes/2.2.0">Scalactic 2.2.0</a>!</span>
? ^^^^^^ ^^^^
+ <span style="font-size: 175%">Just released - <a href="http://www.scalatest.org/release_notes/2.2.0">Scalactic 2.2.0</a>!</span>
? ^^^^ ^^^^^^^^
</p>
}
@main(pagename, topImage){@body}
| 2 | 0.064516 | 1 | 1 |
4785a5e8d639dea1a9cf767d2c77f6bd9dbe2433 | leapp/cli/upgrade/__init__.py | leapp/cli/upgrade/__init__.py | from leapp.utils.clicmd import command, command_opt
from leapp.repository.scan import find_and_scan_repositories
from leapp.config import get_config
from leapp.logger import configure_logger
def load_repositories_from(name, repo_path, manager=None):
if get_config().has_option('repositories', name):
repo_path = get_config().get('repositories', name)
return find_and_scan_repositories(repo_path, manager=manager)
def load_repositories():
load_repositories_from('custom_repo_path', '/etc/leapp/repos.d/', manager=None)
manager.load()
return manager
@command('upgrade', help='')
@command_opt('resume', is_flag=True, help='Continue the last execution after it was stopped (e.g. after reboot)')
def upgrade(args):
configure_logger()
repositories = load_repositories()
workflow = repositories.lookup_workflow('IPUWorkflow')
workflow.run()
| from leapp.utils.clicmd import command, command_opt
from leapp.repository.scan import find_and_scan_repositories
from leapp.config import get_config
from leapp.logger import configure_logger
def load_repositories_from(name, repo_path, manager=None):
if get_config().has_option('repositories', name):
repo_path = get_config().get('repositories', name)
return find_and_scan_repositories(repo_path, manager=manager)
def load_repositories():
manager = load_repositories_from('custom_repo_path', '/etc/leapp/repos.d/', manager=None)
manager.load()
return manager
@command('upgrade', help='')
@command_opt('resume', is_flag=True, help='Continue the last execution after it was stopped (e.g. after reboot)')
def upgrade(args):
configure_logger()
repositories = load_repositories()
workflow = repositories.lookup_workflow('IPUWorkflow')
workflow.run()
| Add back missing manager creation | leapp: Add back missing manager creation
| Python | lgpl-2.1 | leapp-to/prototype,vinzenz/prototype,leapp-to/prototype,vinzenz/prototype,vinzenz/prototype,leapp-to/prototype,vinzenz/prototype,leapp-to/prototype | python | ## Code Before:
from leapp.utils.clicmd import command, command_opt
from leapp.repository.scan import find_and_scan_repositories
from leapp.config import get_config
from leapp.logger import configure_logger
def load_repositories_from(name, repo_path, manager=None):
if get_config().has_option('repositories', name):
repo_path = get_config().get('repositories', name)
return find_and_scan_repositories(repo_path, manager=manager)
def load_repositories():
load_repositories_from('custom_repo_path', '/etc/leapp/repos.d/', manager=None)
manager.load()
return manager
@command('upgrade', help='')
@command_opt('resume', is_flag=True, help='Continue the last execution after it was stopped (e.g. after reboot)')
def upgrade(args):
configure_logger()
repositories = load_repositories()
workflow = repositories.lookup_workflow('IPUWorkflow')
workflow.run()
## Instruction:
leapp: Add back missing manager creation
## Code After:
from leapp.utils.clicmd import command, command_opt
from leapp.repository.scan import find_and_scan_repositories
from leapp.config import get_config
from leapp.logger import configure_logger
def load_repositories_from(name, repo_path, manager=None):
if get_config().has_option('repositories', name):
repo_path = get_config().get('repositories', name)
return find_and_scan_repositories(repo_path, manager=manager)
def load_repositories():
manager = load_repositories_from('custom_repo_path', '/etc/leapp/repos.d/', manager=None)
manager.load()
return manager
@command('upgrade', help='')
@command_opt('resume', is_flag=True, help='Continue the last execution after it was stopped (e.g. after reboot)')
def upgrade(args):
configure_logger()
repositories = load_repositories()
workflow = repositories.lookup_workflow('IPUWorkflow')
workflow.run()
| from leapp.utils.clicmd import command, command_opt
from leapp.repository.scan import find_and_scan_repositories
from leapp.config import get_config
from leapp.logger import configure_logger
def load_repositories_from(name, repo_path, manager=None):
if get_config().has_option('repositories', name):
repo_path = get_config().get('repositories', name)
return find_and_scan_repositories(repo_path, manager=manager)
def load_repositories():
- load_repositories_from('custom_repo_path', '/etc/leapp/repos.d/', manager=None)
+ manager = load_repositories_from('custom_repo_path', '/etc/leapp/repos.d/', manager=None)
? ++++++++++
manager.load()
return manager
@command('upgrade', help='')
@command_opt('resume', is_flag=True, help='Continue the last execution after it was stopped (e.g. after reboot)')
def upgrade(args):
configure_logger()
repositories = load_repositories()
workflow = repositories.lookup_workflow('IPUWorkflow')
workflow.run() | 2 | 0.08 | 1 | 1 |
f3efa6b7847a1e0c6b3997bc74b7b910963a6b2a | .travis.yml | .travis.yml | language: c
sudo: required
dist: trusty
compiler:
- gcc
# - clang
env:
- PGVERSION1=10
- PGVERSION2=10
addons:
apt:
packages:
- libperl-dev
# install PostgreSQL
# if PGVERSION1 != PGVERSION2, install both versions
before_install:
- curl https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add -
- echo "deb http://apt.postgresql.org/pub/repos/apt/ trusty-pgdg main" | sudo tee -a /etc/apt/sources.list
- sudo apt-get update -qq
- sudo apt-get install postgresql-$PGVERSION1 postgresql-server-dev-$PGVERSION1 postgresql-client-$PGVERSION1 postgresql-plperl-$PGVERSION1 -y
- sudo apt-get install postgresql-$PGVERSION2 postgresql-server-dev-$PGVERSION2 postgresql-client-$PGVERSION2 postgresql-plperl-$PGVERSION2 -y
# install pgquarrel
install:
- test/travis/install-pgquarrel.sh $PGVERSION1 $PGVERSION2
# create test databases
before_script:
- test/travis/setup-pg.sh $PGVERSION1 $PGVERSION2
script:
- test/travis/run-tests.sh $PGVERSION1 $PGVERSION2
| language: c
sudo: required
dist: trusty
compiler:
- gcc
# - clang
env:
- PGVERSION1=10
- PGVERSION2=10
addons:
apt:
packages:
- libperl-dev
# install PostgreSQL
# if PGVERSION1 != PGVERSION2, install both versions
before_install:
- curl https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add -
- echo "deb http://apt.postgresql.org/pub/repos/apt/ trusty-pgdg main" | sudo tee -a /etc/apt/sources.list
- sudo apt-get update -qq
- sudo dpkg -l | grep postgres
- sudo apt-get purge postgresql-9.2 postgresql-server-dev-9.2 postgresql-client-9.2 postgresql-plperl-9.2 -y
- sudo apt-get install postgresql-$PGVERSION1 postgresql-server-dev-$PGVERSION1 postgresql-client-$PGVERSION1 postgresql-plperl-$PGVERSION1 -y
- sudo apt-get install postgresql-$PGVERSION2 postgresql-server-dev-$PGVERSION2 postgresql-client-$PGVERSION2 postgresql-plperl-$PGVERSION2 -y
# install pgquarrel
install:
- test/travis/install-pgquarrel.sh $PGVERSION1 $PGVERSION2
# create test databases
before_script:
- test/travis/setup-pg.sh $PGVERSION1 $PGVERSION2
script:
- test/travis/run-tests.sh $PGVERSION1 $PGVERSION2
| Remove old PostgreSQL version installed by default | Remove old PostgreSQL version installed by default
| YAML | bsd-3-clause | eulerto/pgquarrel,eulerto/pgquarrel | yaml | ## Code Before:
language: c
sudo: required
dist: trusty
compiler:
- gcc
# - clang
env:
- PGVERSION1=10
- PGVERSION2=10
addons:
apt:
packages:
- libperl-dev
# install PostgreSQL
# if PGVERSION1 != PGVERSION2, install both versions
before_install:
- curl https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add -
- echo "deb http://apt.postgresql.org/pub/repos/apt/ trusty-pgdg main" | sudo tee -a /etc/apt/sources.list
- sudo apt-get update -qq
- sudo apt-get install postgresql-$PGVERSION1 postgresql-server-dev-$PGVERSION1 postgresql-client-$PGVERSION1 postgresql-plperl-$PGVERSION1 -y
- sudo apt-get install postgresql-$PGVERSION2 postgresql-server-dev-$PGVERSION2 postgresql-client-$PGVERSION2 postgresql-plperl-$PGVERSION2 -y
# install pgquarrel
install:
- test/travis/install-pgquarrel.sh $PGVERSION1 $PGVERSION2
# create test databases
before_script:
- test/travis/setup-pg.sh $PGVERSION1 $PGVERSION2
script:
- test/travis/run-tests.sh $PGVERSION1 $PGVERSION2
## Instruction:
Remove old PostgreSQL version installed by default
## Code After:
language: c
sudo: required
dist: trusty
compiler:
- gcc
# - clang
env:
- PGVERSION1=10
- PGVERSION2=10
addons:
apt:
packages:
- libperl-dev
# install PostgreSQL
# if PGVERSION1 != PGVERSION2, install both versions
before_install:
- curl https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add -
- echo "deb http://apt.postgresql.org/pub/repos/apt/ trusty-pgdg main" | sudo tee -a /etc/apt/sources.list
- sudo apt-get update -qq
- sudo dpkg -l | grep postgres
- sudo apt-get purge postgresql-9.2 postgresql-server-dev-9.2 postgresql-client-9.2 postgresql-plperl-9.2 -y
- sudo apt-get install postgresql-$PGVERSION1 postgresql-server-dev-$PGVERSION1 postgresql-client-$PGVERSION1 postgresql-plperl-$PGVERSION1 -y
- sudo apt-get install postgresql-$PGVERSION2 postgresql-server-dev-$PGVERSION2 postgresql-client-$PGVERSION2 postgresql-plperl-$PGVERSION2 -y
# install pgquarrel
install:
- test/travis/install-pgquarrel.sh $PGVERSION1 $PGVERSION2
# create test databases
before_script:
- test/travis/setup-pg.sh $PGVERSION1 $PGVERSION2
script:
- test/travis/run-tests.sh $PGVERSION1 $PGVERSION2
| language: c
sudo: required
dist: trusty
compiler:
- gcc
# - clang
env:
- PGVERSION1=10
- PGVERSION2=10
addons:
apt:
packages:
- libperl-dev
# install PostgreSQL
# if PGVERSION1 != PGVERSION2, install both versions
before_install:
- curl https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add -
- echo "deb http://apt.postgresql.org/pub/repos/apt/ trusty-pgdg main" | sudo tee -a /etc/apt/sources.list
- sudo apt-get update -qq
+ - sudo dpkg -l | grep postgres
+ - sudo apt-get purge postgresql-9.2 postgresql-server-dev-9.2 postgresql-client-9.2 postgresql-plperl-9.2 -y
- sudo apt-get install postgresql-$PGVERSION1 postgresql-server-dev-$PGVERSION1 postgresql-client-$PGVERSION1 postgresql-plperl-$PGVERSION1 -y
- sudo apt-get install postgresql-$PGVERSION2 postgresql-server-dev-$PGVERSION2 postgresql-client-$PGVERSION2 postgresql-plperl-$PGVERSION2 -y
# install pgquarrel
install:
- test/travis/install-pgquarrel.sh $PGVERSION1 $PGVERSION2
# create test databases
before_script:
- test/travis/setup-pg.sh $PGVERSION1 $PGVERSION2
script:
- test/travis/run-tests.sh $PGVERSION1 $PGVERSION2 | 2 | 0.058824 | 2 | 0 |
827ce06b74ae3e284cfefe811df6853d093ace7d | protonj2-client-examples/README.md | protonj2-client-examples/README.md | ----------------------------------------------
Use maven to build the module, and additionally copy the dependencies
alongside their output:
mvn clean package dependency:copy-dependencies -DincludeScope=runtime -DskipTests
Now you can run the examples using commands of the format:
Linux: java -cp "target/classes/:target/dependency/*" org.apache.qpid.protonj2.client.examples.HelloWorld
Windows: java -cp "target\classes\;target\dependency\*" org.apache.qpid.protonj2.client.examples.HelloWorld
NOTE: The examples expect to use a Queue named "queue". You may need to create
this before running the examples, depending on the broker/peer you are using.
NOTE: By default the examples can only connect anonymously. A username and
password with which the connection can authenticate with the server may be set
through system properties named USER and PASSWORD respectively. E.g:
Linux: java -DUSER=guest -DPASSWORD=guest -cp "target/classes/:target/dependency/*" org.apache.qpid.protonj2.client.examples.HelloWorld
Windows: java -DUSER=guest -DPASSWORD=guest -cp "target\classes\;target\dependency\*" org.apache.qpid.protonj2.client.examples.HelloWorld
NOTE: You can configure the connection and queue details used by updating the
JNDI configuration file before building. It can be found at:
src/main/resources/jndi.properties
NOTE: The earlier build command will cause Maven to resolve the client artifact
dependencies against its local and remote repositories. If you wish to use a
locally-built client, ensure to "mvn install" it in your local repo first.
| ----------------------------------------------
Use maven to build the module, and additionally copy the dependencies
alongside their output:
mvn clean package dependency:copy-dependencies -DincludeScope=runtime -DskipTests
Now you can run the examples using commands of the format:
Linux: java -cp "target/classes/:target/dependency/*" org.apache.qpid.protonj2.client.examples.HelloWorld
Windows: java -cp "target\classes\;target\dependency\*" org.apache.qpid.protonj2.client.examples.HelloWorld
NOTE: The examples expect to use a Queue named "queue". You may need to create
this before running the examples, depending on the broker/peer you are using.
NOTE: By default the examples can only connect anonymously. A username and
password with which the connection can authenticate with the server may be set
through system properties named USER and PASSWORD respectively. E.g:
Linux: java -DUSER=guest -DPASSWORD=guest -cp "target/classes/:target/dependency/*" org.apache.qpid.protonj2.client.examples.HelloWorld
Windows: java -DUSER=guest -DPASSWORD=guest -cp "target\classes\;target\dependency\*" org.apache.qpid.protonj2.client.examples.HelloWorld
NOTE: The earlier build command will cause Maven to resolve the client artifact
dependencies against its local and remote repositories. If you wish to use a
locally-built client, ensure to "mvn install" it in your local repo first.
| Remove some leftover notes that refer to JNDI and not supported here | Remove some leftover notes that refer to JNDI and not supported here | Markdown | apache-2.0 | tabish121/proton4j | markdown | ## Code Before:
----------------------------------------------
Use maven to build the module, and additionally copy the dependencies
alongside their output:
mvn clean package dependency:copy-dependencies -DincludeScope=runtime -DskipTests
Now you can run the examples using commands of the format:
Linux: java -cp "target/classes/:target/dependency/*" org.apache.qpid.protonj2.client.examples.HelloWorld
Windows: java -cp "target\classes\;target\dependency\*" org.apache.qpid.protonj2.client.examples.HelloWorld
NOTE: The examples expect to use a Queue named "queue". You may need to create
this before running the examples, depending on the broker/peer you are using.
NOTE: By default the examples can only connect anonymously. A username and
password with which the connection can authenticate with the server may be set
through system properties named USER and PASSWORD respectively. E.g:
Linux: java -DUSER=guest -DPASSWORD=guest -cp "target/classes/:target/dependency/*" org.apache.qpid.protonj2.client.examples.HelloWorld
Windows: java -DUSER=guest -DPASSWORD=guest -cp "target\classes\;target\dependency\*" org.apache.qpid.protonj2.client.examples.HelloWorld
NOTE: You can configure the connection and queue details used by updating the
JNDI configuration file before building. It can be found at:
src/main/resources/jndi.properties
NOTE: The earlier build command will cause Maven to resolve the client artifact
dependencies against its local and remote repositories. If you wish to use a
locally-built client, ensure to "mvn install" it in your local repo first.
## Instruction:
Remove some leftover notes that refer to JNDI and not supported here
## Code After:
----------------------------------------------
Use maven to build the module, and additionally copy the dependencies
alongside their output:
mvn clean package dependency:copy-dependencies -DincludeScope=runtime -DskipTests
Now you can run the examples using commands of the format:
Linux: java -cp "target/classes/:target/dependency/*" org.apache.qpid.protonj2.client.examples.HelloWorld
Windows: java -cp "target\classes\;target\dependency\*" org.apache.qpid.protonj2.client.examples.HelloWorld
NOTE: The examples expect to use a Queue named "queue". You may need to create
this before running the examples, depending on the broker/peer you are using.
NOTE: By default the examples can only connect anonymously. A username and
password with which the connection can authenticate with the server may be set
through system properties named USER and PASSWORD respectively. E.g:
Linux: java -DUSER=guest -DPASSWORD=guest -cp "target/classes/:target/dependency/*" org.apache.qpid.protonj2.client.examples.HelloWorld
Windows: java -DUSER=guest -DPASSWORD=guest -cp "target\classes\;target\dependency\*" org.apache.qpid.protonj2.client.examples.HelloWorld
NOTE: The earlier build command will cause Maven to resolve the client artifact
dependencies against its local and remote repositories. If you wish to use a
locally-built client, ensure to "mvn install" it in your local repo first.
| ----------------------------------------------
Use maven to build the module, and additionally copy the dependencies
alongside their output:
mvn clean package dependency:copy-dependencies -DincludeScope=runtime -DskipTests
Now you can run the examples using commands of the format:
Linux: java -cp "target/classes/:target/dependency/*" org.apache.qpid.protonj2.client.examples.HelloWorld
Windows: java -cp "target\classes\;target\dependency\*" org.apache.qpid.protonj2.client.examples.HelloWorld
NOTE: The examples expect to use a Queue named "queue". You may need to create
this before running the examples, depending on the broker/peer you are using.
NOTE: By default the examples can only connect anonymously. A username and
password with which the connection can authenticate with the server may be set
through system properties named USER and PASSWORD respectively. E.g:
Linux: java -DUSER=guest -DPASSWORD=guest -cp "target/classes/:target/dependency/*" org.apache.qpid.protonj2.client.examples.HelloWorld
Windows: java -DUSER=guest -DPASSWORD=guest -cp "target\classes\;target\dependency\*" org.apache.qpid.protonj2.client.examples.HelloWorld
- NOTE: You can configure the connection and queue details used by updating the
- JNDI configuration file before building. It can be found at:
- src/main/resources/jndi.properties
-
NOTE: The earlier build command will cause Maven to resolve the client artifact
dependencies against its local and remote repositories. If you wish to use a
locally-built client, ensure to "mvn install" it in your local repo first. | 4 | 0.129032 | 0 | 4 |
8cd5087b9dd2e34ef074fd843fff4043ff39421a | src/extension/decorations/flutter_icon_decorations_lsp.ts | src/extension/decorations/flutter_icon_decorations_lsp.ts | import { FlutterOutline } from "../../shared/analysis/lsp/custom_protocol";
import { Logger } from "../../shared/interfaces";
import { fsPath } from "../../shared/utils/fs";
import { IconRangeComputerLsp } from "../../shared/vscode/icon_range_computer";
import { LspAnalyzer } from "../analysis/analyzer_lsp";
import { FlutterIconDecorations } from "./flutter_icon_decorations";
export class FlutterIconDecorationsLsp extends FlutterIconDecorations {
private readonly computer: IconRangeComputerLsp;
constructor(logger: Logger, private readonly analyzer: LspAnalyzer) {
super(logger);
this.computer = new IconRangeComputerLsp(logger);
this.subscriptions.push(this.analyzer.fileTracker.onFlutterOutline.listen(async (op) => {
if (this.activeEditor && fsPath(this.activeEditor.document.uri) === fsPath(op.uri)) {
this.update(op.outline);
}
}));
}
protected update(outline?: FlutterOutline) {
if (!this.activeEditor)
return;
if (!outline)
outline = this.analyzer.fileTracker.getFlutterOutlineFor(this.activeEditor.document.uri);
if (!outline)
return;
const results = this.computer.compute(outline);
this.render(results);
}
}
| import * as vs from "vscode";
import { FlutterOutline } from "../../shared/analysis/lsp/custom_protocol";
import { Logger } from "../../shared/interfaces";
import { fsPath } from "../../shared/utils/fs";
import { IconRangeComputerLsp } from "../../shared/vscode/icon_range_computer";
import { LspAnalyzer } from "../analysis/analyzer_lsp";
import { FlutterIconDecorations } from "./flutter_icon_decorations";
export class FlutterIconDecorationsLsp extends FlutterIconDecorations {
private readonly computer: IconRangeComputerLsp;
constructor(logger: Logger, private readonly analyzer: LspAnalyzer) {
super(logger);
this.computer = new IconRangeComputerLsp(logger);
this.subscriptions.push(this.analyzer.fileTracker.onFlutterOutline.listen(async (op) => {
if (this.activeEditor && fsPath(this.activeEditor.document.uri) === fsPath(vs.Uri.parse(op.uri))) {
this.update(op.outline);
}
}));
}
protected update(outline?: FlutterOutline) {
if (!this.activeEditor)
return;
if (!outline)
outline = this.analyzer.fileTracker.getFlutterOutlineFor(this.activeEditor.document.uri);
if (!outline)
return;
const results = this.computer.compute(outline);
this.render(results);
}
}
| Fix Flutter icon previews not detecting incoming outlines correctly due to URI in string | Fix Flutter icon previews not detecting incoming outlines correctly due to URI in string
Fixes #3081.
| TypeScript | mit | Dart-Code/Dart-Code,Dart-Code/Dart-Code,Dart-Code/Dart-Code,Dart-Code/Dart-Code,Dart-Code/Dart-Code | typescript | ## Code Before:
import { FlutterOutline } from "../../shared/analysis/lsp/custom_protocol";
import { Logger } from "../../shared/interfaces";
import { fsPath } from "../../shared/utils/fs";
import { IconRangeComputerLsp } from "../../shared/vscode/icon_range_computer";
import { LspAnalyzer } from "../analysis/analyzer_lsp";
import { FlutterIconDecorations } from "./flutter_icon_decorations";
export class FlutterIconDecorationsLsp extends FlutterIconDecorations {
private readonly computer: IconRangeComputerLsp;
constructor(logger: Logger, private readonly analyzer: LspAnalyzer) {
super(logger);
this.computer = new IconRangeComputerLsp(logger);
this.subscriptions.push(this.analyzer.fileTracker.onFlutterOutline.listen(async (op) => {
if (this.activeEditor && fsPath(this.activeEditor.document.uri) === fsPath(op.uri)) {
this.update(op.outline);
}
}));
}
protected update(outline?: FlutterOutline) {
if (!this.activeEditor)
return;
if (!outline)
outline = this.analyzer.fileTracker.getFlutterOutlineFor(this.activeEditor.document.uri);
if (!outline)
return;
const results = this.computer.compute(outline);
this.render(results);
}
}
## Instruction:
Fix Flutter icon previews not detecting incoming outlines correctly due to URI in string
Fixes #3081.
## Code After:
import * as vs from "vscode";
import { FlutterOutline } from "../../shared/analysis/lsp/custom_protocol";
import { Logger } from "../../shared/interfaces";
import { fsPath } from "../../shared/utils/fs";
import { IconRangeComputerLsp } from "../../shared/vscode/icon_range_computer";
import { LspAnalyzer } from "../analysis/analyzer_lsp";
import { FlutterIconDecorations } from "./flutter_icon_decorations";
export class FlutterIconDecorationsLsp extends FlutterIconDecorations {
private readonly computer: IconRangeComputerLsp;
constructor(logger: Logger, private readonly analyzer: LspAnalyzer) {
super(logger);
this.computer = new IconRangeComputerLsp(logger);
this.subscriptions.push(this.analyzer.fileTracker.onFlutterOutline.listen(async (op) => {
if (this.activeEditor && fsPath(this.activeEditor.document.uri) === fsPath(vs.Uri.parse(op.uri))) {
this.update(op.outline);
}
}));
}
protected update(outline?: FlutterOutline) {
if (!this.activeEditor)
return;
if (!outline)
outline = this.analyzer.fileTracker.getFlutterOutlineFor(this.activeEditor.document.uri);
if (!outline)
return;
const results = this.computer.compute(outline);
this.render(results);
}
}
| + import * as vs from "vscode";
import { FlutterOutline } from "../../shared/analysis/lsp/custom_protocol";
import { Logger } from "../../shared/interfaces";
import { fsPath } from "../../shared/utils/fs";
import { IconRangeComputerLsp } from "../../shared/vscode/icon_range_computer";
import { LspAnalyzer } from "../analysis/analyzer_lsp";
import { FlutterIconDecorations } from "./flutter_icon_decorations";
export class FlutterIconDecorationsLsp extends FlutterIconDecorations {
private readonly computer: IconRangeComputerLsp;
constructor(logger: Logger, private readonly analyzer: LspAnalyzer) {
super(logger);
this.computer = new IconRangeComputerLsp(logger);
this.subscriptions.push(this.analyzer.fileTracker.onFlutterOutline.listen(async (op) => {
- if (this.activeEditor && fsPath(this.activeEditor.document.uri) === fsPath(op.uri)) {
+ if (this.activeEditor && fsPath(this.activeEditor.document.uri) === fsPath(vs.Uri.parse(op.uri))) {
? +++++++++++++ +
this.update(op.outline);
}
}));
}
protected update(outline?: FlutterOutline) {
if (!this.activeEditor)
return;
if (!outline)
outline = this.analyzer.fileTracker.getFlutterOutlineFor(this.activeEditor.document.uri);
if (!outline)
return;
const results = this.computer.compute(outline);
this.render(results);
}
} | 3 | 0.085714 | 2 | 1 |
96ff3e44d9dcd2c0f9830c5cef5aedb0b1cda946 | README.md | README.md |
Database Handler for PostgreSQL writer upon pg and bluebird.
## Usage
## Developing
### Tools
|
[](http://travis-ci.org/sapienlab/dbh-pg)
Database Handler for PostgreSQL writer upon pg and bluebird.
## Usage
## Developing
### Tools
| Add travis Build Status image | Add travis Build Status image
| Markdown | mit | sapienlab/dbh-pg,roro89/dbh-pg | markdown | ## Code Before:
Database Handler for PostgreSQL writer upon pg and bluebird.
## Usage
## Developing
### Tools
## Instruction:
Add travis Build Status image
## Code After:
[](http://travis-ci.org/sapienlab/dbh-pg)
Database Handler for PostgreSQL writer upon pg and bluebird.
## Usage
## Developing
### Tools
| +
+ [](http://travis-ci.org/sapienlab/dbh-pg)
Database Handler for PostgreSQL writer upon pg and bluebird.
## Usage
## Developing
### Tools | 2 | 0.153846 | 2 | 0 |
045fa8964f9c0ad0960f3bc0d9a536a1b1072c11 | install.bat | install.bat |
set PIP_FIND_LINKS="https://whls.blob.core.windows.net/unstable/index.html"
pip install lytest simphony sax jax sklearn klayout
pip install "jax[cpu]===0.3.7" -f https://whls.blob.core.windows.net/unstable/index.html --use-deprecated legacy-resolver
pip install gdsfactory==5.38.0
gf tool install
if exist "%USERPROFILE%\Desktop\gdsfactory" (goto SKIP_INSTALL)
cd %USERPROFILE%\Desktop
git clone https://github.com/gdsfactory/gdsfactory.git
:SKIP_INSTALL
echo gdsfactory installed
|
set PIP_FIND_LINKS="https://whls.blob.core.windows.net/unstable/index.html"
pip install lytest simphony sax jax sklearn klayout
pip install "jaxlib[cuda111]" -f https://whls.blob.core.windows.net/unstable/index.html --use-deprecated legacy-resolver
pip install gdsfactory==5.38.0
gf tool install
if exist "%USERPROFILE%\Desktop\gdsfactory" (goto SKIP_INSTALL)
cd %USERPROFILE%\Desktop
git clone https://github.com/gdsfactory/gdsfactory.git
:SKIP_INSTALL
echo gdsfactory installed
| Set to gpu version by default and remove version number | Set to gpu version by default and remove version number
| Batchfile | mit | gdsfactory/gdsfactory,gdsfactory/gdsfactory | batchfile | ## Code Before:
set PIP_FIND_LINKS="https://whls.blob.core.windows.net/unstable/index.html"
pip install lytest simphony sax jax sklearn klayout
pip install "jax[cpu]===0.3.7" -f https://whls.blob.core.windows.net/unstable/index.html --use-deprecated legacy-resolver
pip install gdsfactory==5.38.0
gf tool install
if exist "%USERPROFILE%\Desktop\gdsfactory" (goto SKIP_INSTALL)
cd %USERPROFILE%\Desktop
git clone https://github.com/gdsfactory/gdsfactory.git
:SKIP_INSTALL
echo gdsfactory installed
## Instruction:
Set to gpu version by default and remove version number
## Code After:
set PIP_FIND_LINKS="https://whls.blob.core.windows.net/unstable/index.html"
pip install lytest simphony sax jax sklearn klayout
pip install "jaxlib[cuda111]" -f https://whls.blob.core.windows.net/unstable/index.html --use-deprecated legacy-resolver
pip install gdsfactory==5.38.0
gf tool install
if exist "%USERPROFILE%\Desktop\gdsfactory" (goto SKIP_INSTALL)
cd %USERPROFILE%\Desktop
git clone https://github.com/gdsfactory/gdsfactory.git
:SKIP_INSTALL
echo gdsfactory installed
|
set PIP_FIND_LINKS="https://whls.blob.core.windows.net/unstable/index.html"
pip install lytest simphony sax jax sklearn klayout
- pip install "jax[cpu]===0.3.7" -f https://whls.blob.core.windows.net/unstable/index.html --use-deprecated legacy-resolver
? - --------
+ pip install "jaxlib[cuda111]" -f https://whls.blob.core.windows.net/unstable/index.html --use-deprecated legacy-resolver
? +++ +++++
pip install gdsfactory==5.38.0
gf tool install
if exist "%USERPROFILE%\Desktop\gdsfactory" (goto SKIP_INSTALL)
cd %USERPROFILE%\Desktop
git clone https://github.com/gdsfactory/gdsfactory.git
:SKIP_INSTALL
echo gdsfactory installed | 2 | 0.153846 | 1 | 1 |
d525ae3c17a93ad64f6ca97181c3a7aba64679be | app/views/tags/show.html.erb | app/views/tags/show.html.erb | <h3>Tag <%= @tag %></h3>
<p>
<b>Tomatoes:</b>
<%= pluralize(@tomatoes.size, 'tomato') %>
</p>
<%= link_to 'Back', tags_path %>
| <h3>Tag <%= @tag %></h3>
<p>
<b>Tomatoes:</b>
<%= pluralize(@tomatoes.size, 'tomato') %>
</p>
<p>
<b>Total duration:</b>
<%= humanize(@tomatoes.size*Tomato::DURATION) %>
</p>
<%= link_to 'Back', tags_path %>
| Add total duration detail to tag show | Add total duration detail to tag show
| HTML+ERB | mit | tomatoes-app/tomatoes,potomak/tomatoes,potomak/tomatoes,tomatoes-app/tomatoes,tomatoes-app/tomatoes,potomak/tomatoes,tomatoes-app/tomatoes | html+erb | ## Code Before:
<h3>Tag <%= @tag %></h3>
<p>
<b>Tomatoes:</b>
<%= pluralize(@tomatoes.size, 'tomato') %>
</p>
<%= link_to 'Back', tags_path %>
## Instruction:
Add total duration detail to tag show
## Code After:
<h3>Tag <%= @tag %></h3>
<p>
<b>Tomatoes:</b>
<%= pluralize(@tomatoes.size, 'tomato') %>
</p>
<p>
<b>Total duration:</b>
<%= humanize(@tomatoes.size*Tomato::DURATION) %>
</p>
<%= link_to 'Back', tags_path %>
| <h3>Tag <%= @tag %></h3>
<p>
<b>Tomatoes:</b>
<%= pluralize(@tomatoes.size, 'tomato') %>
</p>
+ <p>
+ <b>Total duration:</b>
+ <%= humanize(@tomatoes.size*Tomato::DURATION) %>
+ </p>
+
<%= link_to 'Back', tags_path %> | 5 | 0.625 | 5 | 0 |
6f5b000e3dffacb2a627c8f094749a68e50b26ea | .travis.yml | .travis.yml | language: haskell
before_install:
# Uncomment whenever hackage is down.
# - mkdir -p ~/.cabal && cp travis/config ~/.cabal/config && cabal update
- cabal update
# Try installing some of the build-deps with apt-get for speed.
- travis/cabal-apt-install $mode
install:
- cabal configure -flib-Werror $mode
- cabal build
script:
- $script
- scripts/stats
notifications:
irc:
channels:
- "irc.freenode.org#haskell-lens"
skip_join: true
template:
- "\x0313lens\x03/\x0306%{branch}\x03 \x0314%{commit}\x03 %{build_url} %{message}"
env:
- mode="--enable-tests" script="cabal test --show-details=always"
# - mode="--enable-tests -fsafe" script="cabal test"
# - mode="--enable-tests -fdump-splices" script="cabal test --show-details=always"
# - mode="--enable-benchmarks -fdump-splices" script="cabal bench"
| language: haskell
env:
- GHCVER=7.4.2
- GHCVER=7.6.3
# - >
# GHCVER=7.4.2
# MODE="-fsafe"
before_install:
# If $GHCVER is the one travis has, don't bother reinstalling it.
# We can also have faster builds by installing some libraries with
# `apt`. If it isn't, install the GHC we want from hvr's PPA along
# with cabal-1.18.
- |
if [ $GHCVER = `ghc --numeric-version` ]; then
# Try installing some of the build-deps with apt-get for speed.
travis/cabal-apt-install --enable-tests $MODE
export CABAL=cabal
else
# Install the GHC we want from hvr's PPA
sudo add-apt-repository -y ppa:hvr/ghc
sudo apt-get update
sudo apt-get install cabal-install-1.18 ghc-$GHCVER
export CABAL=cabal-1.18
export PATH=/opt/ghc/$GHCVER/bin:$PATH
fi
# Uncomment whenever hackage is down.
# - mkdir -p ~/.cabal && cp travis/config ~/.cabal/config && $CABAL update
- $CABAL update
install:
- $CABAL install --dependencies-only --enable-tests
- $CABAL configure -flib-Werror --enable-tests $MODE
script:
- $CABAL build
- $CABAL test --show-details=always
- scripts/stats
notifications:
irc:
channels:
- "irc.freenode.org#haskell-lens"
skip_join: true
template:
- "\x0313lens\x03/\x0306%{branch}\x03 \x0314%{commit}\x03 %{build_url} %{message}"
| Build on Travis with hvr's GHC versions PPA. | Build on Travis with hvr's GHC versions PPA.
Preserve other environment variables for travis.
I didn't really think about what would happen with $mode and $script.
>.>
Install dependencies for travis in the `install` step.
Rewrite the .travis.yml almost completely.
This .travis.yml allows us to specify versions more nicely and also fall
back to travis' GHC if it's the same version as the one we want.
I'd be OK with adding benchmarking options back, but $script seems like
a weird way to do it.
Install test dependencies for travis with cabal-apt-install.
This should make things a little quicker.
| YAML | bsd-3-clause | Icelandjack/lens,omefire/lens,hvr/lens,cdepillabout/lens,rpglover64/lens,hvr/lens,timjb/lens,Fuuzetsu/lens,danidiaz/lens,Gabriel439/lens,cchalmers/lens,ddssff/lens | yaml | ## Code Before:
language: haskell
before_install:
# Uncomment whenever hackage is down.
# - mkdir -p ~/.cabal && cp travis/config ~/.cabal/config && cabal update
- cabal update
# Try installing some of the build-deps with apt-get for speed.
- travis/cabal-apt-install $mode
install:
- cabal configure -flib-Werror $mode
- cabal build
script:
- $script
- scripts/stats
notifications:
irc:
channels:
- "irc.freenode.org#haskell-lens"
skip_join: true
template:
- "\x0313lens\x03/\x0306%{branch}\x03 \x0314%{commit}\x03 %{build_url} %{message}"
env:
- mode="--enable-tests" script="cabal test --show-details=always"
# - mode="--enable-tests -fsafe" script="cabal test"
# - mode="--enable-tests -fdump-splices" script="cabal test --show-details=always"
# - mode="--enable-benchmarks -fdump-splices" script="cabal bench"
## Instruction:
Build on Travis with hvr's GHC versions PPA.
Preserve other environment variables for travis.
I didn't really think about what would happen with $mode and $script.
>.>
Install dependencies for travis in the `install` step.
Rewrite the .travis.yml almost completely.
This .travis.yml allows us to specify versions more nicely and also fall
back to travis' GHC if it's the same version as the one we want.
I'd be OK with adding benchmarking options back, but $script seems like
a weird way to do it.
Install test dependencies for travis with cabal-apt-install.
This should make things a little quicker.
## Code After:
language: haskell
env:
- GHCVER=7.4.2
- GHCVER=7.6.3
# - >
# GHCVER=7.4.2
# MODE="-fsafe"
before_install:
# If $GHCVER is the one travis has, don't bother reinstalling it.
# We can also have faster builds by installing some libraries with
# `apt`. If it isn't, install the GHC we want from hvr's PPA along
# with cabal-1.18.
- |
if [ $GHCVER = `ghc --numeric-version` ]; then
# Try installing some of the build-deps with apt-get for speed.
travis/cabal-apt-install --enable-tests $MODE
export CABAL=cabal
else
# Install the GHC we want from hvr's PPA
sudo add-apt-repository -y ppa:hvr/ghc
sudo apt-get update
sudo apt-get install cabal-install-1.18 ghc-$GHCVER
export CABAL=cabal-1.18
export PATH=/opt/ghc/$GHCVER/bin:$PATH
fi
# Uncomment whenever hackage is down.
# - mkdir -p ~/.cabal && cp travis/config ~/.cabal/config && $CABAL update
- $CABAL update
install:
- $CABAL install --dependencies-only --enable-tests
- $CABAL configure -flib-Werror --enable-tests $MODE
script:
- $CABAL build
- $CABAL test --show-details=always
- scripts/stats
notifications:
irc:
channels:
- "irc.freenode.org#haskell-lens"
skip_join: true
template:
- "\x0313lens\x03/\x0306%{branch}\x03 \x0314%{commit}\x03 %{build_url} %{message}"
| language: haskell
+
+ env:
+ - GHCVER=7.4.2
+ - GHCVER=7.6.3
+ # - >
+ # GHCVER=7.4.2
+ # MODE="-fsafe"
+
before_install:
+ # If $GHCVER is the one travis has, don't bother reinstalling it.
+ # We can also have faster builds by installing some libraries with
+ # `apt`. If it isn't, install the GHC we want from hvr's PPA along
+ # with cabal-1.18.
+ - |
+ if [ $GHCVER = `ghc --numeric-version` ]; then
+ # Try installing some of the build-deps with apt-get for speed.
+ travis/cabal-apt-install --enable-tests $MODE
+ export CABAL=cabal
+ else
+ # Install the GHC we want from hvr's PPA
+ sudo add-apt-repository -y ppa:hvr/ghc
+ sudo apt-get update
+ sudo apt-get install cabal-install-1.18 ghc-$GHCVER
+ export CABAL=cabal-1.18
+ export PATH=/opt/ghc/$GHCVER/bin:$PATH
+ fi
# Uncomment whenever hackage is down.
- # - mkdir -p ~/.cabal && cp travis/config ~/.cabal/config && cabal update
? ^^^^^
+ # - mkdir -p ~/.cabal && cp travis/config ~/.cabal/config && $CABAL update
? ^^^^^^
+ - $CABAL update
- - cabal update
-
- # Try installing some of the build-deps with apt-get for speed.
- - travis/cabal-apt-install $mode
install:
- - cabal configure -flib-Werror $mode
- - cabal build
+ - $CABAL install --dependencies-only --enable-tests
+ - $CABAL configure -flib-Werror --enable-tests $MODE
script:
- - $script
+ - $CABAL build
+ - $CABAL test --show-details=always
- scripts/stats
notifications:
irc:
channels:
- "irc.freenode.org#haskell-lens"
skip_join: true
template:
- "\x0313lens\x03/\x0306%{branch}\x03 \x0314%{commit}\x03 %{build_url} %{message}"
-
- env:
- - mode="--enable-tests" script="cabal test --show-details=always"
- # - mode="--enable-tests -fsafe" script="cabal test"
- # - mode="--enable-tests -fdump-splices" script="cabal test --show-details=always"
- # - mode="--enable-benchmarks -fdump-splices" script="cabal bench" | 45 | 1.5 | 31 | 14 |
c2598058722531662aab8831640fc367689d2a43 | tests/utils/test_process_word_vectors.py | tests/utils/test_process_word_vectors.py | import inspect
import os
import pytest
import numpy as np
from subprocess import call
from utils.preprocess_text_word_vectors import txtvec2npy
def test_text_word2vec2npy():
# check whether files are present in folder
vectors_name = 'wiki.fiu_vro.vec'
path = os.path.dirname(inspect.getfile(inspect.currentframe()))
if not os.path.exists(path + '/' + vectors_name):
call(["wget https://s3-us-west-1.amazonaws.com/fasttext-vectors/" + vectors_name + " -O " +
path + "/" + vectors_name],
shell=True)
txtvec2npy(path + '/' + vectors_name, './', vectors_name[:-4])
vectors = np.load('./' + vectors_name[:-4] + '.npy').item()
assert len(list(vectors)) == 8769
assert vectors['kihlkunnan'].shape[0] == 300
if __name__ == '__main__':
pytest.main([__file__])
| import inspect
import os
import pytest
import numpy as np
from subprocess import call
from utils.preprocess_text_word_vectors import txtvec2npy
def test_text_word2vec2npy():
# check whether files are present in folder
vectors_name = 'wiki.fiu_vro.vec'
path = os.path.dirname(inspect.getfile(inspect.currentframe()))
if not os.path.exists(path + '/' + vectors_name):
call(["wget https://dl.fbaipublicfiles.com/fasttext/vectors-wiki/" + vectors_name + " -O " +
path + "/" + vectors_name],
shell=True)
txtvec2npy(path + '/' + vectors_name, './', vectors_name[:-4])
vectors = np.load('./' + vectors_name[:-4] + '.npy').item()
assert len(list(vectors)) == 8769
assert vectors['kihlkunnan'].shape[0] == 300
if __name__ == '__main__':
pytest.main([__file__])
| Update Fasttext pretrained vectors location | Update Fasttext pretrained vectors location
| Python | mit | lvapeab/nmt-keras,lvapeab/nmt-keras | python | ## Code Before:
import inspect
import os
import pytest
import numpy as np
from subprocess import call
from utils.preprocess_text_word_vectors import txtvec2npy
def test_text_word2vec2npy():
# check whether files are present in folder
vectors_name = 'wiki.fiu_vro.vec'
path = os.path.dirname(inspect.getfile(inspect.currentframe()))
if not os.path.exists(path + '/' + vectors_name):
call(["wget https://s3-us-west-1.amazonaws.com/fasttext-vectors/" + vectors_name + " -O " +
path + "/" + vectors_name],
shell=True)
txtvec2npy(path + '/' + vectors_name, './', vectors_name[:-4])
vectors = np.load('./' + vectors_name[:-4] + '.npy').item()
assert len(list(vectors)) == 8769
assert vectors['kihlkunnan'].shape[0] == 300
if __name__ == '__main__':
pytest.main([__file__])
## Instruction:
Update Fasttext pretrained vectors location
## Code After:
import inspect
import os
import pytest
import numpy as np
from subprocess import call
from utils.preprocess_text_word_vectors import txtvec2npy
def test_text_word2vec2npy():
# check whether files are present in folder
vectors_name = 'wiki.fiu_vro.vec'
path = os.path.dirname(inspect.getfile(inspect.currentframe()))
if not os.path.exists(path + '/' + vectors_name):
call(["wget https://dl.fbaipublicfiles.com/fasttext/vectors-wiki/" + vectors_name + " -O " +
path + "/" + vectors_name],
shell=True)
txtvec2npy(path + '/' + vectors_name, './', vectors_name[:-4])
vectors = np.load('./' + vectors_name[:-4] + '.npy').item()
assert len(list(vectors)) == 8769
assert vectors['kihlkunnan'].shape[0] == 300
if __name__ == '__main__':
pytest.main([__file__])
| import inspect
import os
import pytest
import numpy as np
from subprocess import call
from utils.preprocess_text_word_vectors import txtvec2npy
def test_text_word2vec2npy():
# check whether files are present in folder
vectors_name = 'wiki.fiu_vro.vec'
path = os.path.dirname(inspect.getfile(inspect.currentframe()))
if not os.path.exists(path + '/' + vectors_name):
- call(["wget https://s3-us-west-1.amazonaws.com/fasttext-vectors/" + vectors_name + " -O " +
? ^^^ ^^^ ------------- ^
+ call(["wget https://dl.fbaipublicfiles.com/fasttext/vectors-wiki/" + vectors_name + " -O " +
? ^^^^^^^^ ^^^^^^^ ^ +++++
path + "/" + vectors_name],
shell=True)
txtvec2npy(path + '/' + vectors_name, './', vectors_name[:-4])
vectors = np.load('./' + vectors_name[:-4] + '.npy').item()
assert len(list(vectors)) == 8769
assert vectors['kihlkunnan'].shape[0] == 300
if __name__ == '__main__':
pytest.main([__file__]) | 2 | 0.08 | 1 | 1 |
07a196be3dca5125454262bb96967d5895081c56 | app/views/admin/application_settings/_repository_storage.html.haml | app/views/admin/application_settings/_repository_storage.html.haml | = form_for @application_setting, url: admin_application_settings_path(anchor: 'js-repository-storage-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
.sub-section
.form-group
.form-check
= f.check_box :hashed_storage_enabled, class: 'form-check-input qa-hashed-storage-checkbox'
= f.label :hashed_storage_enabled, class: 'form-check-label' do
Use hashed storage paths for newly created and renamed projects
.form-text.text-muted
Enable immutable, hash-based paths and repository names to store repositories on disk. This prevents
repositories from having to be moved or renamed when the Project URL changes and may improve disk I/O performance.
%em (EXPERIMENTAL)
.form-group
= f.label :repository_storages, 'Storage paths for new projects', class: 'label-bold'
= f.select :repository_storages, repository_storages_options_for_select(@application_setting.repository_storages),
{include_hidden: false}, multiple: true, class: 'form-control'
.form-text.text-muted
Manage repository storage paths. Learn more in the
= succeed "." do
= link_to "repository storages documentation", help_page_path("administration/repository_storage_paths")
= f.submit 'Save changes', class: "btn btn-success qa-save-changes-button"
| = form_for @application_setting, url: admin_application_settings_path(anchor: 'js-repository-storage-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
.sub-section
.form-group
.form-check
= f.check_box :hashed_storage_enabled, class: 'form-check-input qa-hashed-storage-checkbox'
= f.label :hashed_storage_enabled, class: 'form-check-label' do
Use hashed storage paths for newly created and renamed projects
.form-text.text-muted
Enable immutable, hash-based paths and repository names to store repositories on disk. This prevents
repositories from having to be moved or renamed when the Project URL changes and may improve disk I/O performance.
.form-group
= f.label :repository_storages, 'Storage paths for new projects', class: 'label-bold'
= f.select :repository_storages, repository_storages_options_for_select(@application_setting.repository_storages),
{include_hidden: false}, multiple: true, class: 'form-control'
.form-text.text-muted
Manage repository storage paths. Learn more in the
= succeed "." do
= link_to "repository storages documentation", help_page_path("administration/repository_storage_paths")
= f.submit 'Save changes', class: "btn btn-success qa-save-changes-button"
| Remove "Experimental" text from Hashed Storage settings page | Remove "Experimental" text from Hashed Storage settings page
| Haml | mit | mmkassem/gitlabhq,stoplightio/gitlabhq,axilleas/gitlabhq,axilleas/gitlabhq,axilleas/gitlabhq,mmkassem/gitlabhq,stoplightio/gitlabhq,stoplightio/gitlabhq,iiet/iiet-git,iiet/iiet-git,mmkassem/gitlabhq,stoplightio/gitlabhq,iiet/iiet-git,mmkassem/gitlabhq,axilleas/gitlabhq,iiet/iiet-git | haml | ## Code Before:
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-repository-storage-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
.sub-section
.form-group
.form-check
= f.check_box :hashed_storage_enabled, class: 'form-check-input qa-hashed-storage-checkbox'
= f.label :hashed_storage_enabled, class: 'form-check-label' do
Use hashed storage paths for newly created and renamed projects
.form-text.text-muted
Enable immutable, hash-based paths and repository names to store repositories on disk. This prevents
repositories from having to be moved or renamed when the Project URL changes and may improve disk I/O performance.
%em (EXPERIMENTAL)
.form-group
= f.label :repository_storages, 'Storage paths for new projects', class: 'label-bold'
= f.select :repository_storages, repository_storages_options_for_select(@application_setting.repository_storages),
{include_hidden: false}, multiple: true, class: 'form-control'
.form-text.text-muted
Manage repository storage paths. Learn more in the
= succeed "." do
= link_to "repository storages documentation", help_page_path("administration/repository_storage_paths")
= f.submit 'Save changes', class: "btn btn-success qa-save-changes-button"
## Instruction:
Remove "Experimental" text from Hashed Storage settings page
## Code After:
= form_for @application_setting, url: admin_application_settings_path(anchor: 'js-repository-storage-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
.sub-section
.form-group
.form-check
= f.check_box :hashed_storage_enabled, class: 'form-check-input qa-hashed-storage-checkbox'
= f.label :hashed_storage_enabled, class: 'form-check-label' do
Use hashed storage paths for newly created and renamed projects
.form-text.text-muted
Enable immutable, hash-based paths and repository names to store repositories on disk. This prevents
repositories from having to be moved or renamed when the Project URL changes and may improve disk I/O performance.
.form-group
= f.label :repository_storages, 'Storage paths for new projects', class: 'label-bold'
= f.select :repository_storages, repository_storages_options_for_select(@application_setting.repository_storages),
{include_hidden: false}, multiple: true, class: 'form-control'
.form-text.text-muted
Manage repository storage paths. Learn more in the
= succeed "." do
= link_to "repository storages documentation", help_page_path("administration/repository_storage_paths")
= f.submit 'Save changes', class: "btn btn-success qa-save-changes-button"
| = form_for @application_setting, url: admin_application_settings_path(anchor: 'js-repository-storage-settings'), html: { class: 'fieldset-form' } do |f|
= form_errors(@application_setting)
%fieldset
.sub-section
.form-group
.form-check
= f.check_box :hashed_storage_enabled, class: 'form-check-input qa-hashed-storage-checkbox'
= f.label :hashed_storage_enabled, class: 'form-check-label' do
Use hashed storage paths for newly created and renamed projects
.form-text.text-muted
Enable immutable, hash-based paths and repository names to store repositories on disk. This prevents
repositories from having to be moved or renamed when the Project URL changes and may improve disk I/O performance.
- %em (EXPERIMENTAL)
.form-group
= f.label :repository_storages, 'Storage paths for new projects', class: 'label-bold'
= f.select :repository_storages, repository_storages_options_for_select(@application_setting.repository_storages),
{include_hidden: false}, multiple: true, class: 'form-control'
.form-text.text-muted
Manage repository storage paths. Learn more in the
= succeed "." do
= link_to "repository storages documentation", help_page_path("administration/repository_storage_paths")
= f.submit 'Save changes', class: "btn btn-success qa-save-changes-button" | 1 | 0.041667 | 0 | 1 |
f790c4d0300b448d97303a43d5a9a8d65e69bdb9 | tests/TestCase.php | tests/TestCase.php | <?php
use Mockery\Adapter\PHPUnit\MockeryTestCase;
abstract class TestCase extends MockeryTestCase
{
}
| <?php
abstract class TestCase extends \PHPUnit\Framework\TestCase
{
}
| Test - reverted mockery adapter usage. | IHF: Test - reverted mockery adapter usage.
| PHP | mit | dmitry-ivanov/laravel-helper-functions | php | ## Code Before:
<?php
use Mockery\Adapter\PHPUnit\MockeryTestCase;
abstract class TestCase extends MockeryTestCase
{
}
## Instruction:
IHF: Test - reverted mockery adapter usage.
## Code After:
<?php
abstract class TestCase extends \PHPUnit\Framework\TestCase
{
}
| <?php
- use Mockery\Adapter\PHPUnit\MockeryTestCase;
-
- abstract class TestCase extends MockeryTestCase
? ^ ^ ^^^
+ abstract class TestCase extends \PHPUnit\Framework\TestCase
? ^^^^^^^^^^^^^^^ ^ ^
{
} | 4 | 0.571429 | 1 | 3 |
83f761e1bafa07f1998bb845caa69c95d80a0235 | src/admin/redux/modules/letter.js | src/admin/redux/modules/letter.js | /* @flow */
import { FETCHED_MEMBER } from '../../../shared/redux/modules/member.js'
import { mergeAll, converge, unapply, compose, objOf, prop } from 'ramda'
import type { Action, Reducer } from 'redux'
type State = {}
const reducer: Reducer<State, Action> =
(state = { id: 0, address: [], name: '' }, { type, payload }) => {
switch (type) {
case FETCHED_MEMBER:
return converge
( unapply(mergeAll)
, [ compose(objOf('name'), getName)
, compose(objOf('address'), getAddress)
, compose(objOf('id'), prop('id'))
]
)(payload)
default:
return state
}
}
const getName = ({ title, full_name }) => title + ' ' + full_name
const getAddress = (
{ title
, first_name = ''
, last_name = ''
, address1
, address2
, address3
, address4
, county
, postcode
}
) =>
[ `${title} ${first_name[0] || ''} ${last_name}`, address1, address2,
address3, address4, county, postcode ]
export default reducer
| /* @flow */
import { FETCHED_MEMBER } from '../../../shared/redux/modules/member.js'
import { mergeAll, converge, unapply, compose, objOf, prop } from 'ramda'
import type { Action, Reducer } from 'redux'
type State = {}
const reducer: Reducer<State, Action> =
(state = { id: 0, address: [], name: '' }, { type, payload }) => {
switch (type) {
case FETCHED_MEMBER:
return converge
( unapply(mergeAll)
, [ compose(objOf('name'), getName)
, compose(objOf('address'), getAddress)
, compose(objOf('id'), prop('id'))
]
)(payload)
default:
return state
}
}
const getName = ({ title, full_name }) => title + ' ' + full_name
const getAddress = (
{ title
, first_name = ''
, last_name = ''
, address1
, address2
, address3
, address4
, county
, postcode
}
) =>
[ `${title} ${(first_name && first_name[0]) || ''} ${last_name}`
, address1
, address2
, address3
, address4
, county
, postcode ]
export default reducer
| Fix bug that meant member details not displaying if no first name had been entered. | Fix bug that meant member details not displaying if no first name had been entered.
| JavaScript | mit | foundersandcoders/sail-back,foundersandcoders/sail-back | javascript | ## Code Before:
/* @flow */
import { FETCHED_MEMBER } from '../../../shared/redux/modules/member.js'
import { mergeAll, converge, unapply, compose, objOf, prop } from 'ramda'
import type { Action, Reducer } from 'redux'
type State = {}
const reducer: Reducer<State, Action> =
(state = { id: 0, address: [], name: '' }, { type, payload }) => {
switch (type) {
case FETCHED_MEMBER:
return converge
( unapply(mergeAll)
, [ compose(objOf('name'), getName)
, compose(objOf('address'), getAddress)
, compose(objOf('id'), prop('id'))
]
)(payload)
default:
return state
}
}
const getName = ({ title, full_name }) => title + ' ' + full_name
const getAddress = (
{ title
, first_name = ''
, last_name = ''
, address1
, address2
, address3
, address4
, county
, postcode
}
) =>
[ `${title} ${first_name[0] || ''} ${last_name}`, address1, address2,
address3, address4, county, postcode ]
export default reducer
## Instruction:
Fix bug that meant member details not displaying if no first name had been entered.
## Code After:
/* @flow */
import { FETCHED_MEMBER } from '../../../shared/redux/modules/member.js'
import { mergeAll, converge, unapply, compose, objOf, prop } from 'ramda'
import type { Action, Reducer } from 'redux'
type State = {}
const reducer: Reducer<State, Action> =
(state = { id: 0, address: [], name: '' }, { type, payload }) => {
switch (type) {
case FETCHED_MEMBER:
return converge
( unapply(mergeAll)
, [ compose(objOf('name'), getName)
, compose(objOf('address'), getAddress)
, compose(objOf('id'), prop('id'))
]
)(payload)
default:
return state
}
}
const getName = ({ title, full_name }) => title + ' ' + full_name
const getAddress = (
{ title
, first_name = ''
, last_name = ''
, address1
, address2
, address3
, address4
, county
, postcode
}
) =>
[ `${title} ${(first_name && first_name[0]) || ''} ${last_name}`
, address1
, address2
, address3
, address4
, county
, postcode ]
export default reducer
| /* @flow */
import { FETCHED_MEMBER } from '../../../shared/redux/modules/member.js'
import { mergeAll, converge, unapply, compose, objOf, prop } from 'ramda'
import type { Action, Reducer } from 'redux'
type State = {}
const reducer: Reducer<State, Action> =
(state = { id: 0, address: [], name: '' }, { type, payload }) => {
switch (type) {
case FETCHED_MEMBER:
return converge
( unapply(mergeAll)
, [ compose(objOf('name'), getName)
, compose(objOf('address'), getAddress)
, compose(objOf('id'), prop('id'))
]
)(payload)
default:
return state
}
}
const getName = ({ title, full_name }) => title + ' ' + full_name
const getAddress = (
{ title
, first_name = ''
, last_name = ''
, address1
, address2
, address3
, address4
, county
, postcode
}
) =>
- [ `${title} ${first_name[0] || ''} ${last_name}`, address1, address2,
- address3, address4, county, postcode ]
+ [ `${title} ${(first_name && first_name[0]) || ''} ${last_name}`
+ , address1
+ , address2
+ , address3
+ , address4
+ , county
+ , postcode ]
export default reducer
- | 10 | 0.232558 | 7 | 3 |
3e1ea8337814b5730a0af9b3de7797e054abbf99 | hyde/tests/templates/jinja2/index.html | hyde/tests/templates/jinja2/index.html | {% extends "layout.html" %}
{% from "helpers.html" import input_field, textarea, form %}
{% block page_title %}Index Page{% endblock %}
{% block body %}
{%- for article in articles if article.published %}
<div class="article">
<h2><a href="{{ article.href|e }}">{{ article.title|e }}</a></h2>
<p class="meta">written by <a href="{{ article.user.href|e
}}">{{ article.user.username|e }}</a> on {{ article.pub_date|dateformat }}</p>
<div class="text">{{ article.body }}</div>
</div>
{%- endfor %}
{%- call form() %}
<dl>
<dt>Name</dt>
<dd>{{ input_field('name') }}</dd>
<dt>E-Mail</dt>
<dd>{{ input_field('email') }}</dd>
<dt>URL</dt>
<dd>{{ input_field('url') }}</dd>
<dt>Comment</dt>
<dd>{{ textarea('comment') }}</dd>
<dt>Captcha</dt>
<dd>{{ input_field('captcha') }}</dd>
</dl>
{{ input_field(type='submit', value='Submit') }}
{{ input_field('cancel', type='submit', value='Cancel') }}
{%- endcall %}
{% endblock %}
| {% extends "layout.html" %}
{% from "helpers.html" import input_field, textarea, form %}
{% block page_title %}Index Page{% endblock %}
{% block body %}
{%- for article in articles if article.published %}
<div class="article">
<h2><a href="{{ article.href|e }}">{{ article.title|e }}</a></h2>
<p class="meta">written by <a href="{{ article.user.href|e
}}">{{ article.user.username|e }}</a> on {{ article.pub_date.strftime('%Y-%m-%d') }}</p>
<div class="text">{{ article.body }}</div>
</div>
{%- endfor %}
{%- call form() %}
<dl>
<dt>Name</dt>
<dd>{{ input_field('name') }}</dd>
<dt>E-Mail</dt>
<dd>{{ input_field('email') }}</dd>
<dt>URL</dt>
<dd>{{ input_field('url') }}</dd>
<dt>Comment</dt>
<dd>{{ textarea('comment') }}</dd>
<dt>Captcha</dt>
<dd>{{ input_field('captcha') }}</dd>
</dl>
{{ input_field(type='submit', value='Submit') }}
{{ input_field('cancel', type='submit', value='Cancel') }}
{%- endcall %}
{% endblock %}
| Fix the use of unknown dateformat filter dateformat in tests. | Fix the use of unknown dateformat filter dateformat in tests.
Use strftime instead. There seems to be no dateformat filter in jinja
documentation.
| HTML | mit | 0111001101111010/hyde,JeNeSuisPasDave/hyde,jngeist/hyde,stiell/hyde,printerpam/hyde,hyde/hyde,jd/hyde,hyde/hyde,JeNeSuisPasDave/hyde,bheesham/hyde,Valloric/hyde,bheesham/hyde,bheesham/hyde,jd/hyde,printerpam/hyde,hyde/hyde,0111001101111010/hyde,jd/hyde,netzverweigerer/hyde,netzverweigerer/hyde,Ali-Razmjoo/hyde,jngeist/hyde,netzverweigerer/hyde,JeNeSuisPasDave/hyde,Ali-Razmjoo/hyde,jngeist/hyde,printerpam/hyde,stiell/hyde,Valloric/hyde,Ali-Razmjoo/hyde | html | ## Code Before:
{% extends "layout.html" %}
{% from "helpers.html" import input_field, textarea, form %}
{% block page_title %}Index Page{% endblock %}
{% block body %}
{%- for article in articles if article.published %}
<div class="article">
<h2><a href="{{ article.href|e }}">{{ article.title|e }}</a></h2>
<p class="meta">written by <a href="{{ article.user.href|e
}}">{{ article.user.username|e }}</a> on {{ article.pub_date|dateformat }}</p>
<div class="text">{{ article.body }}</div>
</div>
{%- endfor %}
{%- call form() %}
<dl>
<dt>Name</dt>
<dd>{{ input_field('name') }}</dd>
<dt>E-Mail</dt>
<dd>{{ input_field('email') }}</dd>
<dt>URL</dt>
<dd>{{ input_field('url') }}</dd>
<dt>Comment</dt>
<dd>{{ textarea('comment') }}</dd>
<dt>Captcha</dt>
<dd>{{ input_field('captcha') }}</dd>
</dl>
{{ input_field(type='submit', value='Submit') }}
{{ input_field('cancel', type='submit', value='Cancel') }}
{%- endcall %}
{% endblock %}
## Instruction:
Fix the use of unknown dateformat filter dateformat in tests.
Use strftime instead. There seems to be no dateformat filter in jinja
documentation.
## Code After:
{% extends "layout.html" %}
{% from "helpers.html" import input_field, textarea, form %}
{% block page_title %}Index Page{% endblock %}
{% block body %}
{%- for article in articles if article.published %}
<div class="article">
<h2><a href="{{ article.href|e }}">{{ article.title|e }}</a></h2>
<p class="meta">written by <a href="{{ article.user.href|e
}}">{{ article.user.username|e }}</a> on {{ article.pub_date.strftime('%Y-%m-%d') }}</p>
<div class="text">{{ article.body }}</div>
</div>
{%- endfor %}
{%- call form() %}
<dl>
<dt>Name</dt>
<dd>{{ input_field('name') }}</dd>
<dt>E-Mail</dt>
<dd>{{ input_field('email') }}</dd>
<dt>URL</dt>
<dd>{{ input_field('url') }}</dd>
<dt>Comment</dt>
<dd>{{ textarea('comment') }}</dd>
<dt>Captcha</dt>
<dd>{{ input_field('captcha') }}</dd>
</dl>
{{ input_field(type='submit', value='Submit') }}
{{ input_field('cancel', type='submit', value='Cancel') }}
{%- endcall %}
{% endblock %}
| {% extends "layout.html" %}
{% from "helpers.html" import input_field, textarea, form %}
{% block page_title %}Index Page{% endblock %}
{% block body %}
{%- for article in articles if article.published %}
<div class="article">
<h2><a href="{{ article.href|e }}">{{ article.title|e }}</a></h2>
<p class="meta">written by <a href="{{ article.user.href|e
- }}">{{ article.user.username|e }}</a> on {{ article.pub_date|dateformat }}</p>
? ^ ^^^^^^^^^
+ }}">{{ article.user.username|e }}</a> on {{ article.pub_date.strftime('%Y-%m-%d') }}</p>
? ^^^^^^^^^^^^^^^^^^ ^^
<div class="text">{{ article.body }}</div>
</div>
{%- endfor %}
{%- call form() %}
<dl>
<dt>Name</dt>
<dd>{{ input_field('name') }}</dd>
<dt>E-Mail</dt>
<dd>{{ input_field('email') }}</dd>
<dt>URL</dt>
<dd>{{ input_field('url') }}</dd>
<dt>Comment</dt>
<dd>{{ textarea('comment') }}</dd>
<dt>Captcha</dt>
<dd>{{ input_field('captcha') }}</dd>
</dl>
{{ input_field(type='submit', value='Submit') }}
{{ input_field('cancel', type='submit', value='Cancel') }}
{%- endcall %}
{% endblock %} | 2 | 0.068966 | 1 | 1 |
90ec10018b9c3e7505306a1f6f874006681419e8 | libraries/provider_docker_service.rb | libraries/provider_docker_service.rb | class Chef
class Provider
class DockerService < Chef::Provider::LWRPBase
# Create a run_context for provider instances.
# Each provider action becomes an isolated recipe
# with its own compile/converger cycle.
use_inline_resources
# Because we're using convergent Chef resources to manage
# machine state, we can say why_run is supported for the
# composite.
def whyrun_supported?
true
end
# Mix in helpers from libraries/helpers.rb
include DockerHelpers
# Put the appropriate bits on disk.
action :create do
# Pull a precompiled binary off the network
remote_file docker_bin do
source parsed_source
checksum parsed_checksum
owner 'root'
group 'root'
mode '0755'
action :create
end
end
action :delete do
file docker_bin do
action :delete
end
end
# These are implemented in subclasses.
#
# Chef::Provider::DockerService::Execute
# Chef::Provider::DockerService::Sysvinit
# Chef::Provider::DockerService::Upstart
# Chef::Provider::DockerService::Systemd
# Chef::Provider::DockerService::Runit
action :start do
end
action :stop do
end
action :restart do
end
action :enable do
end
end
end
end
| class Chef
class Provider
class DockerService < Chef::Provider::LWRPBase
# Create a run_context for provider instances.
# Each provider action becomes an isolated recipe
# with its own compile/converger cycle.
use_inline_resources
# Because we're using convergent Chef resources to manage
# machine state, we can say why_run is supported for the
# composite.
def whyrun_supported?
true
end
# Mix in helpers from libraries/helpers.rb
include DockerHelpers
# Put the appropriate bits on disk.
action :create do
# Pull a precompiled binary off the network
remote_file docker_bin do
source parsed_source
checksum parsed_checksum
owner 'root'
group 'root'
mode '0755'
action :create
end
end
action :delete do
file docker_bin do
action :delete
end
end
# These are implemented in subclasses.
#
# Chef::Provider::DockerService::Execute
# Chef::Provider::DockerService::Sysvinit
# Chef::Provider::DockerService::Upstart
# Chef::Provider::DockerService::Systemd
# Chef::Provider::DockerService::Runit
action :start do
end
action :stop do
end
action :restart do
end
end
end
end
| Remove nonexistent action :enable from docker_service | Remove nonexistent action :enable from docker_service
| Ruby | apache-2.0 | chef-cookbooks/docker,chef-cookbooks/docker,fxposter/chef-docker,fxposter/chef-docker,fxposter/chef-docker,chef-cookbooks/docker | ruby | ## Code Before:
class Chef
class Provider
class DockerService < Chef::Provider::LWRPBase
# Create a run_context for provider instances.
# Each provider action becomes an isolated recipe
# with its own compile/converger cycle.
use_inline_resources
# Because we're using convergent Chef resources to manage
# machine state, we can say why_run is supported for the
# composite.
def whyrun_supported?
true
end
# Mix in helpers from libraries/helpers.rb
include DockerHelpers
# Put the appropriate bits on disk.
action :create do
# Pull a precompiled binary off the network
remote_file docker_bin do
source parsed_source
checksum parsed_checksum
owner 'root'
group 'root'
mode '0755'
action :create
end
end
action :delete do
file docker_bin do
action :delete
end
end
# These are implemented in subclasses.
#
# Chef::Provider::DockerService::Execute
# Chef::Provider::DockerService::Sysvinit
# Chef::Provider::DockerService::Upstart
# Chef::Provider::DockerService::Systemd
# Chef::Provider::DockerService::Runit
action :start do
end
action :stop do
end
action :restart do
end
action :enable do
end
end
end
end
## Instruction:
Remove nonexistent action :enable from docker_service
## Code After:
class Chef
class Provider
class DockerService < Chef::Provider::LWRPBase
# Create a run_context for provider instances.
# Each provider action becomes an isolated recipe
# with its own compile/converger cycle.
use_inline_resources
# Because we're using convergent Chef resources to manage
# machine state, we can say why_run is supported for the
# composite.
def whyrun_supported?
true
end
# Mix in helpers from libraries/helpers.rb
include DockerHelpers
# Put the appropriate bits on disk.
action :create do
# Pull a precompiled binary off the network
remote_file docker_bin do
source parsed_source
checksum parsed_checksum
owner 'root'
group 'root'
mode '0755'
action :create
end
end
action :delete do
file docker_bin do
action :delete
end
end
# These are implemented in subclasses.
#
# Chef::Provider::DockerService::Execute
# Chef::Provider::DockerService::Sysvinit
# Chef::Provider::DockerService::Upstart
# Chef::Provider::DockerService::Systemd
# Chef::Provider::DockerService::Runit
action :start do
end
action :stop do
end
action :restart do
end
end
end
end
| class Chef
class Provider
class DockerService < Chef::Provider::LWRPBase
# Create a run_context for provider instances.
# Each provider action becomes an isolated recipe
# with its own compile/converger cycle.
use_inline_resources
# Because we're using convergent Chef resources to manage
# machine state, we can say why_run is supported for the
# composite.
def whyrun_supported?
true
end
# Mix in helpers from libraries/helpers.rb
include DockerHelpers
# Put the appropriate bits on disk.
action :create do
# Pull a precompiled binary off the network
remote_file docker_bin do
source parsed_source
checksum parsed_checksum
owner 'root'
group 'root'
mode '0755'
action :create
end
end
action :delete do
file docker_bin do
action :delete
end
end
# These are implemented in subclasses.
#
# Chef::Provider::DockerService::Execute
# Chef::Provider::DockerService::Sysvinit
# Chef::Provider::DockerService::Upstart
# Chef::Provider::DockerService::Systemd
# Chef::Provider::DockerService::Runit
action :start do
end
action :stop do
end
action :restart do
end
-
- action :enable do
- end
end
end
end | 3 | 0.051724 | 0 | 3 |
62b8457f802dd66329fa11c26da109b9e36c7502 | advent-of-code/day1/day1.rkt | advent-of-code/day1/day1.rkt |
(define (f2n c)
(match c
[#\( 1]
[#\) -1]
[_ 0]))
(define (calculate-floor s)
(sum (map f2n (string->list s))))
(define (read-floor input-port)
(let ([char (read-char input-port)])
(if (eof-object? char)
char
(f2n char))))
(define (sum lst) (foldl + 0 lst))
(displayln (sum (port->list read-floor)))
|
(define (f2n c)
(match c
[#\( 1]
[#\) -1]
[_ 0]))
(define (read-floor input-port)
(let ([char (read-char input-port)])
(if (eof-object? char)
char
(f2n char))))
(define (follow lst value #:index [index 0] #:accumulator [accumulator 0])
(if (null? lst)
#f
(begin
(set! accumulator (+ (car lst) accumulator))
(if (equal? accumulator value)
index
(follow (cdr lst) value #:index (add1 index) #:accumulator accumulator)))))
(module+ main
(define lst (port->list read-floor))
(displayln (format "Santa stopped floor ~a" (apply + lst)))
(displayln (format "Santa first reaches the basement at position ~a" (add1 (follow lst -1)))))
| Complete part 2 of day 1 | Complete part 2 of day 1
| Racket | mit | winny-/practice,winny-/practice,winny-/practice,winny-/practice | racket | ## Code Before:
(define (f2n c)
(match c
[#\( 1]
[#\) -1]
[_ 0]))
(define (calculate-floor s)
(sum (map f2n (string->list s))))
(define (read-floor input-port)
(let ([char (read-char input-port)])
(if (eof-object? char)
char
(f2n char))))
(define (sum lst) (foldl + 0 lst))
(displayln (sum (port->list read-floor)))
## Instruction:
Complete part 2 of day 1
## Code After:
(define (f2n c)
(match c
[#\( 1]
[#\) -1]
[_ 0]))
(define (read-floor input-port)
(let ([char (read-char input-port)])
(if (eof-object? char)
char
(f2n char))))
(define (follow lst value #:index [index 0] #:accumulator [accumulator 0])
(if (null? lst)
#f
(begin
(set! accumulator (+ (car lst) accumulator))
(if (equal? accumulator value)
index
(follow (cdr lst) value #:index (add1 index) #:accumulator accumulator)))))
(module+ main
(define lst (port->list read-floor))
(displayln (format "Santa stopped floor ~a" (apply + lst)))
(displayln (format "Santa first reaches the basement at position ~a" (add1 (follow lst -1)))))
|
(define (f2n c)
(match c
[#\( 1]
[#\) -1]
[_ 0]))
+
- (define (calculate-floor s)
- (sum (map f2n (string->list s))))
(define (read-floor input-port)
(let ([char (read-char input-port)])
(if (eof-object? char)
char
(f2n char))))
- (define (sum lst) (foldl + 0 lst))
- (displayln (sum (port->list read-floor)))
+ (define (follow lst value #:index [index 0] #:accumulator [accumulator 0])
+ (if (null? lst)
+ #f
+ (begin
+ (set! accumulator (+ (car lst) accumulator))
+ (if (equal? accumulator value)
+ index
+ (follow (cdr lst) value #:index (add1 index) #:accumulator accumulator)))))
+
+
+ (module+ main
+ (define lst (port->list read-floor))
+ (displayln (format "Santa stopped floor ~a" (apply + lst)))
+ (displayln (format "Santa first reaches the basement at position ~a" (add1 (follow lst -1))))) | 19 | 1.1875 | 15 | 4 |
94091bb4e66f1d0a8767382e172dd81d63218601 | clients/android/NewsBlur/res/layout/fragment_itemlist.xml | clients/android/NewsBlur/res/layout/fragment_itemlist.xml | <?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent"
style="?itemBackground"
android:orientation="vertical" >
<TextView
android:id="@+id/empty_view"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_centerInParent="true"
android:gravity="center"
android:text="@string/empty_list_view_loading"
style="?defaultText"
android:textSize="13dp"
android:textStyle="italic" />
<ListView
android:id="@+id/itemlistfragment_list"
android:layout_width="match_parent"
android:layout_height="match_parent"
style="?divider"
android:dividerHeight="2dp" />
</RelativeLayout>
| <?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent"
style="?itemBackground"
android:orientation="vertical" >
<RelativeLayout
android:id="@+id/empty_view"
android:layout_width="match_parent"
android:layout_height="match_parent" >
<TextView
android:id="@+id/empty_view_text"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_centerInParent="true"
android:gravity="center"
android:text="@string/empty_list_view_loading"
style="?defaultText"
android:textSize="13dp"
android:textStyle="italic" />
<com.newsblur.view.ProgressThrobber
android:id="@+id/empty_view_loading_throb"
android:layout_width="fill_parent"
android:layout_alignParentTop="true"
android:layout_height="6dp" />
</RelativeLayout>
<ListView
android:id="@+id/itemlistfragment_list"
android:layout_width="match_parent"
android:layout_height="match_parent"
style="?divider"
android:dividerHeight="2dp" />
</RelativeLayout>
| Add a loading throbber to the empty-list-view that matches the footer throbber. | Add a loading throbber to the empty-list-view that matches the footer throbber.
| XML | mit | manderson23/NewsBlur,AlphaCluster/NewsBlur,Suninus/NewsBlur,manderson23/NewsBlur,youprofit/NewsBlur,epiphany27/NewsBlur,petecummings/NewsBlur,AlphaCluster/NewsBlur,canwe/NewsBlur,slava-sh/NewsBlur,nriley/NewsBlur,samuelclay/NewsBlur,epiphany27/NewsBlur,mihaip/NewsBlur,waltharius/NewsBlur,Suninus/NewsBlur,waltharius/NewsBlur,manderson23/NewsBlur,slava-sh/NewsBlur,petecummings/NewsBlur,samuelclay/NewsBlur,AlphaCluster/NewsBlur,samuelclay/NewsBlur,youprofit/NewsBlur,canwe/NewsBlur,Suninus/NewsBlur,samuelclay/NewsBlur,mihaip/NewsBlur,samuelclay/NewsBlur,Suninus/NewsBlur,mihaip/NewsBlur,manderson23/NewsBlur,slava-sh/NewsBlur,stone5495/NewsBlur,AlphaCluster/NewsBlur,AlphaCluster/NewsBlur,lucidbard/NewsBlur,Suninus/NewsBlur,nriley/NewsBlur,mihaip/NewsBlur,youprofit/NewsBlur,stone5495/NewsBlur,slava-sh/NewsBlur,slava-sh/NewsBlur,youprofit/NewsBlur,petecummings/NewsBlur,canwe/NewsBlur,waltharius/NewsBlur,lucidbard/NewsBlur,samuelclay/NewsBlur,AlphaCluster/NewsBlur,petecummings/NewsBlur,dosiecki/NewsBlur,nriley/NewsBlur,epiphany27/NewsBlur,manderson23/NewsBlur,epiphany27/NewsBlur,stone5495/NewsBlur,stone5495/NewsBlur,youprofit/NewsBlur,manderson23/NewsBlur,mihaip/NewsBlur,manderson23/NewsBlur,samuelclay/NewsBlur,dosiecki/NewsBlur,dosiecki/NewsBlur,nriley/NewsBlur,slava-sh/NewsBlur,petecummings/NewsBlur,Suninus/NewsBlur,mihaip/NewsBlur,slava-sh/NewsBlur,canwe/NewsBlur,dosiecki/NewsBlur,Suninus/NewsBlur,nriley/NewsBlur,slava-sh/NewsBlur,waltharius/NewsBlur,youprofit/NewsBlur,lucidbard/NewsBlur,manderson23/NewsBlur,petecummings/NewsBlur,nriley/NewsBlur,petecummings/NewsBlur,canwe/NewsBlur,canwe/NewsBlur,AlphaCluster/NewsBlur,manderson23/NewsBlur,waltharius/NewsBlur,lucidbard/NewsBlur,stone5495/NewsBlur,lucidbard/NewsBlur,epiphany27/NewsBlur,nriley/NewsBlur,manderson23/NewsBlur,samuelclay/NewsBlur,mihaip/NewsBlur,canwe/NewsBlur,mihaip/NewsBlur,dosiecki/NewsBlur,Suninus/NewsBlur,waltharius/NewsBlur,petecummings/NewsBlur,AlphaCluster/NewsBlur,lucidbard/NewsBlur,waltharius/NewsBlur,Suninus/NewsBlur,epiphany27/NewsBlur,dosiecki/NewsBlur,AlphaCluster/NewsBlur,epiphany27/NewsBlur,stone5495/NewsBlur,lucidbard/NewsBlur,canwe/NewsBlur,epiphany27/NewsBlur,slava-sh/NewsBlur,waltharius/NewsBlur,dosiecki/NewsBlur,petecummings/NewsBlur,samuelclay/NewsBlur,epiphany27/NewsBlur,slava-sh/NewsBlur,stone5495/NewsBlur,dosiecki/NewsBlur,stone5495/NewsBlur,dosiecki/NewsBlur,epiphany27/NewsBlur,nriley/NewsBlur,dosiecki/NewsBlur,canwe/NewsBlur,youprofit/NewsBlur,mihaip/NewsBlur,Suninus/NewsBlur,youprofit/NewsBlur,samuelclay/NewsBlur,mihaip/NewsBlur,lucidbard/NewsBlur,canwe/NewsBlur,samuelclay/NewsBlur,petecummings/NewsBlur,AlphaCluster/NewsBlur | xml | ## Code Before:
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent"
style="?itemBackground"
android:orientation="vertical" >
<TextView
android:id="@+id/empty_view"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_centerInParent="true"
android:gravity="center"
android:text="@string/empty_list_view_loading"
style="?defaultText"
android:textSize="13dp"
android:textStyle="italic" />
<ListView
android:id="@+id/itemlistfragment_list"
android:layout_width="match_parent"
android:layout_height="match_parent"
style="?divider"
android:dividerHeight="2dp" />
</RelativeLayout>
## Instruction:
Add a loading throbber to the empty-list-view that matches the footer throbber.
## Code After:
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent"
style="?itemBackground"
android:orientation="vertical" >
<RelativeLayout
android:id="@+id/empty_view"
android:layout_width="match_parent"
android:layout_height="match_parent" >
<TextView
android:id="@+id/empty_view_text"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_centerInParent="true"
android:gravity="center"
android:text="@string/empty_list_view_loading"
style="?defaultText"
android:textSize="13dp"
android:textStyle="italic" />
<com.newsblur.view.ProgressThrobber
android:id="@+id/empty_view_loading_throb"
android:layout_width="fill_parent"
android:layout_alignParentTop="true"
android:layout_height="6dp" />
</RelativeLayout>
<ListView
android:id="@+id/itemlistfragment_list"
android:layout_width="match_parent"
android:layout_height="match_parent"
style="?divider"
android:dividerHeight="2dp" />
</RelativeLayout>
| <?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent"
style="?itemBackground"
android:orientation="vertical" >
- <TextView
+ <RelativeLayout
android:id="@+id/empty_view"
android:layout_width="match_parent"
- android:layout_height="match_parent"
+ android:layout_height="match_parent" >
? ++
+
+ <TextView
+ android:id="@+id/empty_view_text"
+ android:layout_width="match_parent"
+ android:layout_height="match_parent"
- android:layout_centerInParent="true"
+ android:layout_centerInParent="true"
? ++++
- android:gravity="center"
+ android:gravity="center"
? ++++
- android:text="@string/empty_list_view_loading"
+ android:text="@string/empty_list_view_loading"
? ++++
- style="?defaultText"
+ style="?defaultText"
? ++++
- android:textSize="13dp"
+ android:textSize="13dp"
? ++++
- android:textStyle="italic" />
+ android:textStyle="italic" />
? ++++
+
+ <com.newsblur.view.ProgressThrobber
+ android:id="@+id/empty_view_loading_throb"
+ android:layout_width="fill_parent"
+ android:layout_alignParentTop="true"
+ android:layout_height="6dp" />
+ </RelativeLayout>
+
<ListView
android:id="@+id/itemlistfragment_list"
android:layout_width="match_parent"
android:layout_height="match_parent"
style="?divider"
android:dividerHeight="2dp" />
</RelativeLayout> | 29 | 1.115385 | 21 | 8 |
f53bba244fef1584630071c66ca36de606b17ddc | metadata/cat.pantsu.nyaapantsu.txt | metadata/cat.pantsu.nyaapantsu.txt | Categories:Multimedia
License:MIT
Web Site:https://nyaa.pantsu.cat
Source Code:https://github.com/NyaaPantsu/NyaaPantsu-android-app
Issue Tracker:https://github.com/NyaaPantsu/NyaaPantsu-android-app/issues
Summary:An app interface interacting with NyaaPantsu
Description:
This app is providing the ability to search on NyaaPantsu and upload directly
into it.
.
Repo Type:git
Repo:https://github.com/NyaaPantsu/NyaaPantsu-android-app
Build:1.0,1
commit=v1.0
subdir=app
gradle=yes
output=build/outputs/apk/release/*-release-unsigned.apk
Auto Update Mode:None
Update Check Mode:Tags
Current Version:1.0
Current Version Code:1
| Categories:Multimedia
License:MIT
Web Site:https://nyaa.pantsu.cat
Source Code:https://github.com/NyaaPantsu/NyaaPantsu-android-app
Issue Tracker:https://github.com/NyaaPantsu/NyaaPantsu-android-app/issues
Auto Name:NyaaPantsu
Summary:An app interface interacting with NyaaPantsu
Description:
This app is providing the ability to search on NyaaPantsu and upload directly
into it.
.
Repo Type:git
Repo:https://github.com/NyaaPantsu/NyaaPantsu-android-app
Build:1.0,1
commit=v1.0
subdir=app
gradle=yes
output=build/outputs/apk/release/*-release-unsigned.apk
Auto Update Mode:None
Update Check Mode:Tags
Current Version:1.1
Current Version Code:2
| Update CV of NyaaPantsu to 1.1 (2) | Update CV of NyaaPantsu to 1.1 (2)
| Text | agpl-3.0 | f-droid/fdroid-data,f-droid/fdroiddata,f-droid/fdroiddata | text | ## Code Before:
Categories:Multimedia
License:MIT
Web Site:https://nyaa.pantsu.cat
Source Code:https://github.com/NyaaPantsu/NyaaPantsu-android-app
Issue Tracker:https://github.com/NyaaPantsu/NyaaPantsu-android-app/issues
Summary:An app interface interacting with NyaaPantsu
Description:
This app is providing the ability to search on NyaaPantsu and upload directly
into it.
.
Repo Type:git
Repo:https://github.com/NyaaPantsu/NyaaPantsu-android-app
Build:1.0,1
commit=v1.0
subdir=app
gradle=yes
output=build/outputs/apk/release/*-release-unsigned.apk
Auto Update Mode:None
Update Check Mode:Tags
Current Version:1.0
Current Version Code:1
## Instruction:
Update CV of NyaaPantsu to 1.1 (2)
## Code After:
Categories:Multimedia
License:MIT
Web Site:https://nyaa.pantsu.cat
Source Code:https://github.com/NyaaPantsu/NyaaPantsu-android-app
Issue Tracker:https://github.com/NyaaPantsu/NyaaPantsu-android-app/issues
Auto Name:NyaaPantsu
Summary:An app interface interacting with NyaaPantsu
Description:
This app is providing the ability to search on NyaaPantsu and upload directly
into it.
.
Repo Type:git
Repo:https://github.com/NyaaPantsu/NyaaPantsu-android-app
Build:1.0,1
commit=v1.0
subdir=app
gradle=yes
output=build/outputs/apk/release/*-release-unsigned.apk
Auto Update Mode:None
Update Check Mode:Tags
Current Version:1.1
Current Version Code:2
| Categories:Multimedia
License:MIT
Web Site:https://nyaa.pantsu.cat
Source Code:https://github.com/NyaaPantsu/NyaaPantsu-android-app
Issue Tracker:https://github.com/NyaaPantsu/NyaaPantsu-android-app/issues
+ Auto Name:NyaaPantsu
Summary:An app interface interacting with NyaaPantsu
Description:
This app is providing the ability to search on NyaaPantsu and upload directly
into it.
.
Repo Type:git
Repo:https://github.com/NyaaPantsu/NyaaPantsu-android-app
Build:1.0,1
commit=v1.0
subdir=app
gradle=yes
output=build/outputs/apk/release/*-release-unsigned.apk
Auto Update Mode:None
Update Check Mode:Tags
- Current Version:1.0
? ^
+ Current Version:1.1
? ^
- Current Version Code:1
? ^
+ Current Version Code:2
? ^
| 5 | 0.2 | 3 | 2 |
62634879192e51b9f938da301534b08cf49d2e85 | methodMang.py | methodMang.py | from methods import output, data
import tokenz
import interpreter
intp = interpreter.Interpreter()
class UndefinedFunctionError(Exception): pass
class Call:
def __init__(self, method, args):
self.method = method
self.a = args
self.vals = []
for t in self.a:
self.vals.append(str(t.val))
self.valid = []
self.valid = self.valid + [(output.Output().methods, output.Output())]
self.valid = self.valid + [(data.Data().methods, data.Data())]
def run(self):
f = False
for m in self.valid:
if self.method in m[0]:
args2pass = ""
args2pass = " ".join(self.vals)
args2pass = intp.eval(args2pass)
return_val = m[1].funcs[m[0].index(self.method)](args2pass)
f = True
break
if not f:
return_val = None
raise UndefinedFunctionError("Attempted to run function %s, but was undefined" % self.method)
return return_val
| from methods import io, data
import tokenz
import interpreter
intp = interpreter.Interpreter()
class UndefinedFunctionError(Exception): pass
def reg(it, c):
it.valid = it.valid + [(c().methods, c())]
class Call:
def __init__(self, method, args):
self.method = method
self.a = args
self.vals = []
for t in self.a:
self.vals.append(str(t.val))
self.valid = []
reg(self, io.IO)
reg(self, data.Data)
def run(self):
f = False
for m in self.valid:
if self.method in m[0]:
args2pass = ""
args2pass = " ".join(self.vals)
args2pass = intp.eval(args2pass)
return_val = m[1].funcs[m[0].index(self.method)](args2pass)
f = True
break
if not f:
return_val = None
raise UndefinedFunctionError("Attempted to run function %s, but was undefined" % self.method)
return return_val
| Rename Output + smaller Register | Rename Output + smaller Register
| Python | mit | Icelys/Scotch-Language | python | ## Code Before:
from methods import output, data
import tokenz
import interpreter
intp = interpreter.Interpreter()
class UndefinedFunctionError(Exception): pass
class Call:
def __init__(self, method, args):
self.method = method
self.a = args
self.vals = []
for t in self.a:
self.vals.append(str(t.val))
self.valid = []
self.valid = self.valid + [(output.Output().methods, output.Output())]
self.valid = self.valid + [(data.Data().methods, data.Data())]
def run(self):
f = False
for m in self.valid:
if self.method in m[0]:
args2pass = ""
args2pass = " ".join(self.vals)
args2pass = intp.eval(args2pass)
return_val = m[1].funcs[m[0].index(self.method)](args2pass)
f = True
break
if not f:
return_val = None
raise UndefinedFunctionError("Attempted to run function %s, but was undefined" % self.method)
return return_val
## Instruction:
Rename Output + smaller Register
## Code After:
from methods import io, data
import tokenz
import interpreter
intp = interpreter.Interpreter()
class UndefinedFunctionError(Exception): pass
def reg(it, c):
it.valid = it.valid + [(c().methods, c())]
class Call:
def __init__(self, method, args):
self.method = method
self.a = args
self.vals = []
for t in self.a:
self.vals.append(str(t.val))
self.valid = []
reg(self, io.IO)
reg(self, data.Data)
def run(self):
f = False
for m in self.valid:
if self.method in m[0]:
args2pass = ""
args2pass = " ".join(self.vals)
args2pass = intp.eval(args2pass)
return_val = m[1].funcs[m[0].index(self.method)](args2pass)
f = True
break
if not f:
return_val = None
raise UndefinedFunctionError("Attempted to run function %s, but was undefined" % self.method)
return return_val
| - from methods import output, data
? -----
+ from methods import io, data
? +
import tokenz
import interpreter
intp = interpreter.Interpreter()
class UndefinedFunctionError(Exception): pass
+ def reg(it, c):
+ it.valid = it.valid + [(c().methods, c())]
class Call:
def __init__(self, method, args):
self.method = method
self.a = args
self.vals = []
for t in self.a:
self.vals.append(str(t.val))
self.valid = []
-
- self.valid = self.valid + [(output.Output().methods, output.Output())]
- self.valid = self.valid + [(data.Data().methods, data.Data())]
+
+ reg(self, io.IO)
+ reg(self, data.Data)
def run(self):
f = False
for m in self.valid:
if self.method in m[0]:
args2pass = ""
args2pass = " ".join(self.vals)
args2pass = intp.eval(args2pass)
return_val = m[1].funcs[m[0].index(self.method)](args2pass)
f = True
break
if not f:
return_val = None
raise UndefinedFunctionError("Attempted to run function %s, but was undefined" % self.method)
return return_val | 10 | 0.232558 | 6 | 4 |
b5348fda99b1ae3fb056dd53f23e96d29e9c335b | _config.php | _config.php | <?php
Object::add_extension('SiteTree', 'AccessKeyExtension');
Object::add_extension("Page_Controller","AccessKeyExtension");
?> | <?php
Object::add_extension('SiteTree', 'AccessKeyExtension');
Object::add_extension("Page_Controller","AccessKeysControllerExtension");
DataObject::add_extension('SiteConfig', 'AccessKeysSiteConfig');
?> | Add extension to site config | ENHANCEMENT: Add extension to site config
| PHP | mit | gordonbanderson/weboftalent-access-keys | php | ## Code Before:
<?php
Object::add_extension('SiteTree', 'AccessKeyExtension');
Object::add_extension("Page_Controller","AccessKeyExtension");
?>
## Instruction:
ENHANCEMENT: Add extension to site config
## Code After:
<?php
Object::add_extension('SiteTree', 'AccessKeyExtension');
Object::add_extension("Page_Controller","AccessKeysControllerExtension");
DataObject::add_extension('SiteConfig', 'AccessKeysSiteConfig');
?> | <?php
Object::add_extension('SiteTree', 'AccessKeyExtension');
- Object::add_extension("Page_Controller","AccessKeyExtension");
+ Object::add_extension("Page_Controller","AccessKeysControllerExtension");
? +++++++++++
+ DataObject::add_extension('SiteConfig', 'AccessKeysSiteConfig');
?> | 3 | 0.75 | 2 | 1 |
21319fc8d22469911c1cbcc41ec7320b1d6141e9 | powerline/bindings/i3/powerline-i3.py | powerline/bindings/i3/powerline-i3.py |
from powerline import Powerline
from powerline.lib.monotonic import monotonic
import sys
import time
import i3
from threading import Lock
name = 'wm'
if len( sys.argv ) > 1:
name = sys.argv[1]
powerline = Powerline(name, renderer_module='i3bgbar')
powerline.update_renderer()
interval = 0.5
print '{"version": 1, "custom_workspace": true}'
print '['
print ' [[],[]]'
lock = Lock()
def render( event=None, data=None, sub=None ):
global lock
lock.acquire()
s = '[\n' + powerline.render(side='right')[:-2] + '\n]\n'
s += ',[\n' + powerline.render(side='left' )[:-2] + '\n]'
print ',[\n' + s + '\n]'
sys.stdout.flush()
lock.release()
sub = i3.Subscription( render, 'workspace' )
while True:
start_time = monotonic()
render()
time.sleep(max(interval - (monotonic() - start_time), 0.1))
|
from powerline import Powerline
from powerline.lib.monotonic import monotonic
import sys
import time
import i3
from threading import Lock
name = 'wm'
if len( sys.argv ) > 1:
name = sys.argv[1]
powerline = Powerline(name, renderer_module='i3bgbar')
powerline.update_renderer()
interval = 0.5
print '{"version": 1, "custom_workspace": true}'
print '['
print ' [[],[]]'
lock = Lock()
def render( event=None, data=None, sub=None ):
global lock
with lock:
s = '[\n' + powerline.render(side='right')[:-2] + '\n]\n'
s += ',[\n' + powerline.render(side='left' )[:-2] + '\n]'
print ',[\n' + s + '\n]'
sys.stdout.flush()
sub = i3.Subscription( render, 'workspace' )
while True:
start_time = monotonic()
render()
time.sleep(max(interval - (monotonic() - start_time), 0.1))
| Use 'with' instead of lock.acquire/release() | Use 'with' instead of lock.acquire/release()
| Python | mit | DoctorJellyface/powerline,bartvm/powerline,areteix/powerline,russellb/powerline,seanfisk/powerline,s0undt3ch/powerline,IvanAli/powerline,cyrixhero/powerline,blindFS/powerline,keelerm84/powerline,kenrachynski/powerline,IvanAli/powerline,darac/powerline,xfumihiro/powerline,Liangjianghao/powerline,darac/powerline,QuLogic/powerline,xxxhycl2010/powerline,EricSB/powerline,firebitsbr/powerline,cyrixhero/powerline,russellb/powerline,wfscheper/powerline,wfscheper/powerline,cyrixhero/powerline,seanfisk/powerline,firebitsbr/powerline,magus424/powerline,dragon788/powerline,junix/powerline,magus424/powerline,kenrachynski/powerline,blindFS/powerline,magus424/powerline,EricSB/powerline,areteix/powerline,lukw00/powerline,bartvm/powerline,prvnkumar/powerline,bezhermoso/powerline,blindFS/powerline,QuLogic/powerline,DoctorJellyface/powerline,dragon788/powerline,s0undt3ch/powerline,QuLogic/powerline,xxxhycl2010/powerline,IvanAli/powerline,firebitsbr/powerline,bezhermoso/powerline,bartvm/powerline,keelerm84/powerline,xfumihiro/powerline,s0undt3ch/powerline,S0lll0s/powerline,xxxhycl2010/powerline,russellb/powerline,Liangjianghao/powerline,kenrachynski/powerline,Luffin/powerline,wfscheper/powerline,prvnkumar/powerline,Luffin/powerline,lukw00/powerline,bezhermoso/powerline,junix/powerline,S0lll0s/powerline,Luffin/powerline,darac/powerline,S0lll0s/powerline,seanfisk/powerline,dragon788/powerline,EricSB/powerline,DoctorJellyface/powerline,lukw00/powerline,Liangjianghao/powerline,areteix/powerline,xfumihiro/powerline,junix/powerline,prvnkumar/powerline | python | ## Code Before:
from powerline import Powerline
from powerline.lib.monotonic import monotonic
import sys
import time
import i3
from threading import Lock
name = 'wm'
if len( sys.argv ) > 1:
name = sys.argv[1]
powerline = Powerline(name, renderer_module='i3bgbar')
powerline.update_renderer()
interval = 0.5
print '{"version": 1, "custom_workspace": true}'
print '['
print ' [[],[]]'
lock = Lock()
def render( event=None, data=None, sub=None ):
global lock
lock.acquire()
s = '[\n' + powerline.render(side='right')[:-2] + '\n]\n'
s += ',[\n' + powerline.render(side='left' )[:-2] + '\n]'
print ',[\n' + s + '\n]'
sys.stdout.flush()
lock.release()
sub = i3.Subscription( render, 'workspace' )
while True:
start_time = monotonic()
render()
time.sleep(max(interval - (monotonic() - start_time), 0.1))
## Instruction:
Use 'with' instead of lock.acquire/release()
## Code After:
from powerline import Powerline
from powerline.lib.monotonic import monotonic
import sys
import time
import i3
from threading import Lock
name = 'wm'
if len( sys.argv ) > 1:
name = sys.argv[1]
powerline = Powerline(name, renderer_module='i3bgbar')
powerline.update_renderer()
interval = 0.5
print '{"version": 1, "custom_workspace": true}'
print '['
print ' [[],[]]'
lock = Lock()
def render( event=None, data=None, sub=None ):
global lock
with lock:
s = '[\n' + powerline.render(side='right')[:-2] + '\n]\n'
s += ',[\n' + powerline.render(side='left' )[:-2] + '\n]'
print ',[\n' + s + '\n]'
sys.stdout.flush()
sub = i3.Subscription( render, 'workspace' )
while True:
start_time = monotonic()
render()
time.sleep(max(interval - (monotonic() - start_time), 0.1))
|
from powerline import Powerline
from powerline.lib.monotonic import monotonic
import sys
import time
import i3
from threading import Lock
name = 'wm'
if len( sys.argv ) > 1:
name = sys.argv[1]
powerline = Powerline(name, renderer_module='i3bgbar')
powerline.update_renderer()
interval = 0.5
print '{"version": 1, "custom_workspace": true}'
print '['
print ' [[],[]]'
lock = Lock()
def render( event=None, data=None, sub=None ):
global lock
- lock.acquire()
+ with lock:
- s = '[\n' + powerline.render(side='right')[:-2] + '\n]\n'
+ s = '[\n' + powerline.render(side='right')[:-2] + '\n]\n'
? +
- s += ',[\n' + powerline.render(side='left' )[:-2] + '\n]'
+ s += ',[\n' + powerline.render(side='left' )[:-2] + '\n]'
? +
- print ',[\n' + s + '\n]'
+ print ',[\n' + s + '\n]'
? +
- sys.stdout.flush()
+ sys.stdout.flush()
? +
- lock.release()
sub = i3.Subscription( render, 'workspace' )
while True:
start_time = monotonic()
render()
time.sleep(max(interval - (monotonic() - start_time), 0.1)) | 11 | 0.289474 | 5 | 6 |
49d93c51b42214e4ac7160f4fb9e2b6e3f03a614 | src/server/index.js | src/server/index.js | const config = require('./config');
if (config.isProduction || require('piping')(config.piping)) {
if (!process.env.NODE_ENV)
throw new Error('Environment variable NODE_ENV must be set.');
// Load and use polyfill for ECMA-402.
if (!global.Intl)
global.Intl = require('intl');
require('babel/register')({optional: ['es7']});
// To ignore webpack custom loaders on server.
config.webpackStylesExtensions.forEach(function(ext) {
require.extensions['.' + ext] = function() {};
});
require('./main');
}
| const config = require('./config');
if (config.isProduction || require('piping')(config.piping)) {
if (!process.env.NODE_ENV)
throw new Error('Environment variable NODE_ENV isn\'t set. Remember it\'s up your production enviroment to set NODE_ENV and maybe other variables. To run app locally in production mode, use gulp -p command instead.');
// Load and use polyfill for ECMA-402.
if (!global.Intl)
global.Intl = require('intl');
require('babel/register')({optional: ['es7']});
// To ignore webpack custom loaders on server.
config.webpackStylesExtensions.forEach(function(ext) {
require.extensions['.' + ext] = function() {};
});
require('./main');
}
| Improve error message for npm start without NODE_ENV. | Improve error message for npm start without NODE_ENV.
| JavaScript | mit | skyuplam/debt_mgmt,christophediprima/este,XeeD/este,GarrettSmith/schizophrenia,SidhNor/este-cordova-starter-kit,zanj2006/este,skallet/este,estaub/my-este,neozhangthe1/framedrop-web,TheoMer/Gyms-Of-The-World,glaserp/Maturita-Project,AlesJiranek/este,GarrettSmith/schizophrenia,nason/este,puzzfuzz/othello-redux,XeeD/test,sikhote/davidsinclair,sikhote/davidsinclair,blueberryapps/este,Brainfock/este,sljuka/portfolio-este,gaurav-/este,neozhangthe1/framedrop-web,robinpokorny/este,youprofit/este,laxplaer/este,MartinPavlik/este,ViliamKopecky/este,syroegkin/mikora.eu,aindre/este-example,Tzitzian/Oppex,nezaidu/este,glaserp/Maturita-Project,TheoMer/Gyms-Of-The-World,robinpokorny/este,hsrob/league-este,christophediprima/este,skaldo/este,zanj2006/este,estaub/my-este,neozhangthe1/framedrop-web,skallet/este,syroegkin/mikora.eu,langpavel/este,amrsekilly/updatedEste,blueberryapps/este,cjk/smart-home-app,vacuumlabs/este,sikhote/davidsinclair,este/este,puzzfuzz/othello-redux,steida/este,XeeD/este,sljuka/portfolio-este,robinpokorny/este,abelaska/este,skallet/este,vacuumlabs/este,este/este,abelaska/este,cazacugmihai/este,syroegkin/mikora.eu,AugustinLF/este,obimod/este,shawn-dsz/este,este/este,steida/este,christophediprima/este,langpavel/este,neozhangthe1/framedrop-web,TheoMer/este,amrsekilly/updatedEste,christophediprima/este,jaeh/este,skyuplam/debt_mgmt,TheoMer/Gyms-Of-The-World,GarrettSmith/schizophrenia,AugustinLF/este,amrsekilly/updatedEste,aindre/este-example,TheoMer/este,neozhangthe1/framedrop-web,XeeD/test,TheoMer/este,este/este,AlesJiranek/este,estaub/my-este,abelaska/este | javascript | ## Code Before:
const config = require('./config');
if (config.isProduction || require('piping')(config.piping)) {
if (!process.env.NODE_ENV)
throw new Error('Environment variable NODE_ENV must be set.');
// Load and use polyfill for ECMA-402.
if (!global.Intl)
global.Intl = require('intl');
require('babel/register')({optional: ['es7']});
// To ignore webpack custom loaders on server.
config.webpackStylesExtensions.forEach(function(ext) {
require.extensions['.' + ext] = function() {};
});
require('./main');
}
## Instruction:
Improve error message for npm start without NODE_ENV.
## Code After:
const config = require('./config');
if (config.isProduction || require('piping')(config.piping)) {
if (!process.env.NODE_ENV)
throw new Error('Environment variable NODE_ENV isn\'t set. Remember it\'s up your production enviroment to set NODE_ENV and maybe other variables. To run app locally in production mode, use gulp -p command instead.');
// Load and use polyfill for ECMA-402.
if (!global.Intl)
global.Intl = require('intl');
require('babel/register')({optional: ['es7']});
// To ignore webpack custom loaders on server.
config.webpackStylesExtensions.forEach(function(ext) {
require.extensions['.' + ext] = function() {};
});
require('./main');
}
| const config = require('./config');
if (config.isProduction || require('piping')(config.piping)) {
if (!process.env.NODE_ENV)
- throw new Error('Environment variable NODE_ENV must be set.');
+ throw new Error('Environment variable NODE_ENV isn\'t set. Remember it\'s up your production enviroment to set NODE_ENV and maybe other variables. To run app locally in production mode, use gulp -p command instead.');
// Load and use polyfill for ECMA-402.
if (!global.Intl)
global.Intl = require('intl');
require('babel/register')({optional: ['es7']});
// To ignore webpack custom loaders on server.
config.webpackStylesExtensions.forEach(function(ext) {
require.extensions['.' + ext] = function() {};
});
require('./main');
} | 2 | 0.105263 | 1 | 1 |
e859feaed2e814ea26b19179f706f365323523d7 | conda.recipe/build.sh | conda.recipe/build.sh | if [ "$(uname)" == "Darwin" ]; then
# C++11 finagling for Mac OSX
export CC=clang
export CXX=clang++
export MACOSX_VERSION_MIN="10.9"
CXXFLAGS="${CXXFLAGS} -mmacosx-version-min=${MACOSX_VERSION_MIN}"
CXXFLAGS="${CXXFLAGS} -Wno-error=unused-command-line-argument"
export LDFLAGS="${LDFLAGS} -mmacosx-version-min=${MACOSX_VERSION_MIN}"
export LINKFLAGS="${LDFLAGS}"
export MACOSX_DEPLOYMENT_TARGET=10.9
# make sure clang-format is installed
# e.g. brew install clang-format
# make sure autoreconf can be found
# e.g. brew install autoconf
# for graphviz, also brew install autogen libtool
fi
make install PREFIX=$PREFIX
| if [ "$(uname)" == "Darwin" ]; then
# C++11 finagling for Mac OSX
export CC=clang
export CXX=clang++
export MACOSX_VERSION_MIN="10.9"
CXXFLAGS="${CXXFLAGS} -mmacosx-version-min=${MACOSX_VERSION_MIN}"
CXXFLAGS="${CXXFLAGS} -Wno-error=unused-command-line-argument"
export LDFLAGS="${LDFLAGS} -mmacosx-version-min=${MACOSX_VERSION_MIN}"
export LINKFLAGS="${LDFLAGS}"
export MACOSX_DEPLOYMENT_TARGET=10.9
# make sure clang-format is installed
# e.g. brew install clang-format
# make sure autoreconf can be found
# e.g. brew install autoconf
# for graphviz, also brew install autogen libtool
fi
make install PREFIX=$PREFIX
# add the /util directory to the PATH inside this conda env
# http://conda.pydata.org/docs/using/envs.html#saved-environment-variables
cat << EOF > ${PKG_NAME}-env-activate.sh
#!/usr/bin/env bash
export PRE_${PKG_NAME}_PATH=\$PATH
export PATH=\$CONDA_PREFIX/util:$PATH
EOF
cat << EOF > ${PKG_NAME}-env-deactivate.sh
#!/usr/bin/env bash
export PATH=\$PRE_${PKG_NAME}_PATH
unset PRE_${PKG_NAME}_PATH
EOF
mkdir -p $PREFIX/etc/conda/activate.d
mkdir -p $PREFIX/etc/conda/deactivate.d
mv ${PKG_NAME}-env-activate.sh $PREFIX/etc/conda/activate.d
mv ${PKG_NAME}-env-deactivate.sh $PREFIX/etc/conda/deactivate.d
| Add the deepdive util dir to PATH for the conda env | Add the deepdive util dir to PATH for the conda env
| Shell | apache-2.0 | sky-xu/deepdive,sky-xu/deepdive,HazyResearch/deepdive,HazyResearch/deepdive,HazyResearch/deepdive,shahin/deepdive,shahin/deepdive,HazyResearch/deepdive | shell | ## Code Before:
if [ "$(uname)" == "Darwin" ]; then
# C++11 finagling for Mac OSX
export CC=clang
export CXX=clang++
export MACOSX_VERSION_MIN="10.9"
CXXFLAGS="${CXXFLAGS} -mmacosx-version-min=${MACOSX_VERSION_MIN}"
CXXFLAGS="${CXXFLAGS} -Wno-error=unused-command-line-argument"
export LDFLAGS="${LDFLAGS} -mmacosx-version-min=${MACOSX_VERSION_MIN}"
export LINKFLAGS="${LDFLAGS}"
export MACOSX_DEPLOYMENT_TARGET=10.9
# make sure clang-format is installed
# e.g. brew install clang-format
# make sure autoreconf can be found
# e.g. brew install autoconf
# for graphviz, also brew install autogen libtool
fi
make install PREFIX=$PREFIX
## Instruction:
Add the deepdive util dir to PATH for the conda env
## Code After:
if [ "$(uname)" == "Darwin" ]; then
# C++11 finagling for Mac OSX
export CC=clang
export CXX=clang++
export MACOSX_VERSION_MIN="10.9"
CXXFLAGS="${CXXFLAGS} -mmacosx-version-min=${MACOSX_VERSION_MIN}"
CXXFLAGS="${CXXFLAGS} -Wno-error=unused-command-line-argument"
export LDFLAGS="${LDFLAGS} -mmacosx-version-min=${MACOSX_VERSION_MIN}"
export LINKFLAGS="${LDFLAGS}"
export MACOSX_DEPLOYMENT_TARGET=10.9
# make sure clang-format is installed
# e.g. brew install clang-format
# make sure autoreconf can be found
# e.g. brew install autoconf
# for graphviz, also brew install autogen libtool
fi
make install PREFIX=$PREFIX
# add the /util directory to the PATH inside this conda env
# http://conda.pydata.org/docs/using/envs.html#saved-environment-variables
cat << EOF > ${PKG_NAME}-env-activate.sh
#!/usr/bin/env bash
export PRE_${PKG_NAME}_PATH=\$PATH
export PATH=\$CONDA_PREFIX/util:$PATH
EOF
cat << EOF > ${PKG_NAME}-env-deactivate.sh
#!/usr/bin/env bash
export PATH=\$PRE_${PKG_NAME}_PATH
unset PRE_${PKG_NAME}_PATH
EOF
mkdir -p $PREFIX/etc/conda/activate.d
mkdir -p $PREFIX/etc/conda/deactivate.d
mv ${PKG_NAME}-env-activate.sh $PREFIX/etc/conda/activate.d
mv ${PKG_NAME}-env-deactivate.sh $PREFIX/etc/conda/deactivate.d
| if [ "$(uname)" == "Darwin" ]; then
# C++11 finagling for Mac OSX
export CC=clang
export CXX=clang++
export MACOSX_VERSION_MIN="10.9"
CXXFLAGS="${CXXFLAGS} -mmacosx-version-min=${MACOSX_VERSION_MIN}"
CXXFLAGS="${CXXFLAGS} -Wno-error=unused-command-line-argument"
export LDFLAGS="${LDFLAGS} -mmacosx-version-min=${MACOSX_VERSION_MIN}"
export LINKFLAGS="${LDFLAGS}"
export MACOSX_DEPLOYMENT_TARGET=10.9
# make sure clang-format is installed
# e.g. brew install clang-format
# make sure autoreconf can be found
# e.g. brew install autoconf
# for graphviz, also brew install autogen libtool
fi
make install PREFIX=$PREFIX
+
+ # add the /util directory to the PATH inside this conda env
+ # http://conda.pydata.org/docs/using/envs.html#saved-environment-variables
+ cat << EOF > ${PKG_NAME}-env-activate.sh
+ #!/usr/bin/env bash
+
+ export PRE_${PKG_NAME}_PATH=\$PATH
+ export PATH=\$CONDA_PREFIX/util:$PATH
+ EOF
+
+ cat << EOF > ${PKG_NAME}-env-deactivate.sh
+ #!/usr/bin/env bash
+
+ export PATH=\$PRE_${PKG_NAME}_PATH
+ unset PRE_${PKG_NAME}_PATH
+ EOF
+
+ mkdir -p $PREFIX/etc/conda/activate.d
+ mkdir -p $PREFIX/etc/conda/deactivate.d
+
+ mv ${PKG_NAME}-env-activate.sh $PREFIX/etc/conda/activate.d
+ mv ${PKG_NAME}-env-deactivate.sh $PREFIX/etc/conda/deactivate.d | 22 | 1.047619 | 22 | 0 |
77cfd5da14031b537de654f60e4c333162069a50 | common/docker-compose.yml | common/docker-compose.yml | sns:
image: alicefuzier/fake-sns
ports:
- "9292:9292"
sqs:
image: s12v/elasticmq
ports:
- "9324:9324"
elasticsearch:
image: docker.elastic.co/elasticsearch/elasticsearch:5.4.0
ports:
- "9200:9200"
- "9300:9300"
environment:
- "http.host=0.0.0.0"
- "transport.host=0.0.0.0"
- "cluster.name=wellcome"
| dynamodb:
image: peopleperhour/dynamodb
ports:
- "45678:8000"
sns:
image: alicefuzier/fake-sns
ports:
- "9292:9292"
sqs:
image: s12v/elasticmq
ports:
- "9324:9324"
elasticsearch:
image: docker.elastic.co/elasticsearch/elasticsearch:5.4.0
ports:
- "9200:9200"
- "9300:9300"
environment:
- "http.host=0.0.0.0"
- "transport.host=0.0.0.0"
- "cluster.name=wellcome"
| Add dynamo docker image to common so that tests for common pass | Add dynamo docker image to common so that tests for common pass
| YAML | mit | wellcometrust/platform-api,wellcometrust/platform-api,wellcometrust/platform-api,wellcometrust/platform-api | yaml | ## Code Before:
sns:
image: alicefuzier/fake-sns
ports:
- "9292:9292"
sqs:
image: s12v/elasticmq
ports:
- "9324:9324"
elasticsearch:
image: docker.elastic.co/elasticsearch/elasticsearch:5.4.0
ports:
- "9200:9200"
- "9300:9300"
environment:
- "http.host=0.0.0.0"
- "transport.host=0.0.0.0"
- "cluster.name=wellcome"
## Instruction:
Add dynamo docker image to common so that tests for common pass
## Code After:
dynamodb:
image: peopleperhour/dynamodb
ports:
- "45678:8000"
sns:
image: alicefuzier/fake-sns
ports:
- "9292:9292"
sqs:
image: s12v/elasticmq
ports:
- "9324:9324"
elasticsearch:
image: docker.elastic.co/elasticsearch/elasticsearch:5.4.0
ports:
- "9200:9200"
- "9300:9300"
environment:
- "http.host=0.0.0.0"
- "transport.host=0.0.0.0"
- "cluster.name=wellcome"
| + dynamodb:
+ image: peopleperhour/dynamodb
+ ports:
+ - "45678:8000"
sns:
image: alicefuzier/fake-sns
ports:
- "9292:9292"
sqs:
image: s12v/elasticmq
ports:
- "9324:9324"
elasticsearch:
image: docker.elastic.co/elasticsearch/elasticsearch:5.4.0
ports:
- "9200:9200"
- "9300:9300"
environment:
- "http.host=0.0.0.0"
- "transport.host=0.0.0.0"
- "cluster.name=wellcome" | 4 | 0.235294 | 4 | 0 |
4d148f4ecafa89b45a747e16920e258161d476e6 | deployment/puppet/osnailyfacter/modular/keystone/tasks.yaml | deployment/puppet/osnailyfacter/modular/keystone/tasks.yaml | - id: keystone
type: puppet
groups: [primary-controller, controller]
required_for: [openstack-controller]
requires: [openstack-haproxy, database]
parameters:
puppet_manifest: /etc/puppet/modules/osnailyfacter/modular/keystone/keystone.pp
puppet_modules: /etc/puppet/modules
timeout: 3600
test_pre:
cmd: ruby /etc/puppet/modules/osnailyfacter/modular/keystone/keystone_pre.rb
test_post:
cmd: ruby /etc/puppet/modules/osnailyfacter/modular/keystone/keystone_post.rb
| - id: keystone
type: puppet
groups: [primary-controller, controller]
required_for: [openstack-controller]
requires: [openstack-haproxy, database, rabbitmq]
parameters:
puppet_manifest: /etc/puppet/modules/osnailyfacter/modular/keystone/keystone.pp
puppet_modules: /etc/puppet/modules
timeout: 3600
test_pre:
cmd: ruby /etc/puppet/modules/osnailyfacter/modular/keystone/keystone_pre.rb
test_post:
cmd: ruby /etc/puppet/modules/osnailyfacter/modular/keystone/keystone_post.rb
| Add dependencies for keystone installation task | Add dependencies for keystone installation task
Change-Id: Ie6f3464659fd6645b3e774e1316db3e3ab55d199
Closes-Bug: #1452793
| YAML | apache-2.0 | stackforge/fuel-library,eayunstack/fuel-library,xarses/fuel-library,xarses/fuel-library,ddepaoli3/fuel-library-dev,eayunstack/fuel-library,stackforge/fuel-library,huntxu/fuel-library,xarses/fuel-library,stackforge/fuel-library,xarses/fuel-library,ddepaoli3/fuel-library-dev,eayunstack/fuel-library,huntxu/fuel-library,eayunstack/fuel-library,ddepaoli3/fuel-library-dev,huntxu/fuel-library,eayunstack/fuel-library,stackforge/fuel-library,huntxu/fuel-library,ddepaoli3/fuel-library-dev,huntxu/fuel-library,ddepaoli3/fuel-library-dev | yaml | ## Code Before:
- id: keystone
type: puppet
groups: [primary-controller, controller]
required_for: [openstack-controller]
requires: [openstack-haproxy, database]
parameters:
puppet_manifest: /etc/puppet/modules/osnailyfacter/modular/keystone/keystone.pp
puppet_modules: /etc/puppet/modules
timeout: 3600
test_pre:
cmd: ruby /etc/puppet/modules/osnailyfacter/modular/keystone/keystone_pre.rb
test_post:
cmd: ruby /etc/puppet/modules/osnailyfacter/modular/keystone/keystone_post.rb
## Instruction:
Add dependencies for keystone installation task
Change-Id: Ie6f3464659fd6645b3e774e1316db3e3ab55d199
Closes-Bug: #1452793
## Code After:
- id: keystone
type: puppet
groups: [primary-controller, controller]
required_for: [openstack-controller]
requires: [openstack-haproxy, database, rabbitmq]
parameters:
puppet_manifest: /etc/puppet/modules/osnailyfacter/modular/keystone/keystone.pp
puppet_modules: /etc/puppet/modules
timeout: 3600
test_pre:
cmd: ruby /etc/puppet/modules/osnailyfacter/modular/keystone/keystone_pre.rb
test_post:
cmd: ruby /etc/puppet/modules/osnailyfacter/modular/keystone/keystone_post.rb
| - id: keystone
type: puppet
groups: [primary-controller, controller]
required_for: [openstack-controller]
- requires: [openstack-haproxy, database]
+ requires: [openstack-haproxy, database, rabbitmq]
? ++++++++++
parameters:
puppet_manifest: /etc/puppet/modules/osnailyfacter/modular/keystone/keystone.pp
puppet_modules: /etc/puppet/modules
timeout: 3600
test_pre:
cmd: ruby /etc/puppet/modules/osnailyfacter/modular/keystone/keystone_pre.rb
test_post:
cmd: ruby /etc/puppet/modules/osnailyfacter/modular/keystone/keystone_post.rb | 2 | 0.153846 | 1 | 1 |
2fd30735d9c678ab81d812b29a95bd1780ed7324 | stylesheets/_mixin.media-queries.scss | stylesheets/_mixin.media-queries.scss | @mixin respond-min($width) {
@if $ie-version < 9 {
@content;
}
@else {
@media screen and (min-width: $width) {
@content;
}
}
}
@mixin respond-max($width) {
@if $ie-version < 9 {
@if $width >= $screen-medium {
@content;
}
}
@else {
@media screen and (max-width: $width - 1) {
@content;
}
}
}
| @mixin respond-min($width) {
@if $ie-version < 9 {
@content;
}
@else {
@media screen and (min-width: $width) {
@content;
}
}
}
@mixin respond-max($width) {
@if $ie-version < 9 {
@if $width >= $screen-medium {
@content;
}
}
@else {
@media screen and (max-width: $width - 1) {
@content;
}
}
}
@mixin respond-min-max($min-width, $max-width) {
@if $ie-version < 9 {
@if $max-width >= $screen-medium {
@content;
}
}
@else {
@media screen and (min-width: $min-width) and (max-width: $max-width - 1) {
@content;
}
}
}
| Add mixin for min-width and max-width MQs. | Add mixin for min-width and max-width MQs.
| SCSS | mit | jadu/pulsar,jadu/pulsar,jadu/pulsar | scss | ## Code Before:
@mixin respond-min($width) {
@if $ie-version < 9 {
@content;
}
@else {
@media screen and (min-width: $width) {
@content;
}
}
}
@mixin respond-max($width) {
@if $ie-version < 9 {
@if $width >= $screen-medium {
@content;
}
}
@else {
@media screen and (max-width: $width - 1) {
@content;
}
}
}
## Instruction:
Add mixin for min-width and max-width MQs.
## Code After:
@mixin respond-min($width) {
@if $ie-version < 9 {
@content;
}
@else {
@media screen and (min-width: $width) {
@content;
}
}
}
@mixin respond-max($width) {
@if $ie-version < 9 {
@if $width >= $screen-medium {
@content;
}
}
@else {
@media screen and (max-width: $width - 1) {
@content;
}
}
}
@mixin respond-min-max($min-width, $max-width) {
@if $ie-version < 9 {
@if $max-width >= $screen-medium {
@content;
}
}
@else {
@media screen and (min-width: $min-width) and (max-width: $max-width - 1) {
@content;
}
}
}
| @mixin respond-min($width) {
@if $ie-version < 9 {
@content;
}
@else {
@media screen and (min-width: $width) {
@content;
}
}
}
@mixin respond-max($width) {
@if $ie-version < 9 {
@if $width >= $screen-medium {
@content;
}
}
@else {
@media screen and (max-width: $width - 1) {
@content;
}
}
}
+
+ @mixin respond-min-max($min-width, $max-width) {
+ @if $ie-version < 9 {
+ @if $max-width >= $screen-medium {
+ @content;
+ }
+ }
+ @else {
+ @media screen and (min-width: $min-width) and (max-width: $max-width - 1) {
+ @content;
+ }
+ }
+ } | 13 | 0.565217 | 13 | 0 |
9d25c9b73f5b37b68324a504a908368944174319 | tools/ci/test.sh | tools/ci/test.sh |
set -ex -o pipefail
echo 'travis_fold:start:TEST'
# used by xvfb that is used by Chrome
export DISPLAY=:99.0
# Used by karma and karma-chrome-launcher
export CHROME_BIN=/usr/bin/google-chrome
echo 'travis_fold:start:test.run'
sh -e /etc/init.d/xvfb start
npm run lint
if [[ $TRAVIS_BRANCH = master ]]; then
./node_modules/.bin/karma start karma.sauce.conf.js --single-run
else
./node_modules/.bin/karma start karma.conf.js --single-run
fi
if [[ -z $CI_PULL_REQUEST ]]; then
./node_modules/.bin/karma start karma.coverage.conf.js
./node_modules/.bin/codecov
fi
echo 'travis_fold:end:test.run'
echo 'travis_fold:end:TEST'
|
set -ex -o pipefail
echo 'travis_fold:start:TEST'
# used by xvfb that is used by Chrome
export DISPLAY=:99.0
# Used by karma and karma-chrome-launcher
export CHROME_BIN=/usr/bin/google-chrome
echo 'travis_fold:start:test.run'
sh -e /etc/init.d/xvfb start
npm run lint
if [[ $TRAVIS_BRANCH = master ]]; then
./node_modules/.bin/karma start karma.sauce.conf.js --single-run
else
./node_modules/.bin/karma start karma.conf.js --single-run
fi
if [[ -z $CI_PULL_REQUEST ]]; then
./node_modules/.bin/karma start karma.coverage.conf.js --single-run
./node_modules/.bin/codecov
fi
echo 'travis_fold:end:test.run'
echo 'travis_fold:end:TEST'
| Add --single-run to karma coverage | Add --single-run to karma coverage
| Shell | mit | ivijs/ivi,ivijs/ivi | shell | ## Code Before:
set -ex -o pipefail
echo 'travis_fold:start:TEST'
# used by xvfb that is used by Chrome
export DISPLAY=:99.0
# Used by karma and karma-chrome-launcher
export CHROME_BIN=/usr/bin/google-chrome
echo 'travis_fold:start:test.run'
sh -e /etc/init.d/xvfb start
npm run lint
if [[ $TRAVIS_BRANCH = master ]]; then
./node_modules/.bin/karma start karma.sauce.conf.js --single-run
else
./node_modules/.bin/karma start karma.conf.js --single-run
fi
if [[ -z $CI_PULL_REQUEST ]]; then
./node_modules/.bin/karma start karma.coverage.conf.js
./node_modules/.bin/codecov
fi
echo 'travis_fold:end:test.run'
echo 'travis_fold:end:TEST'
## Instruction:
Add --single-run to karma coverage
## Code After:
set -ex -o pipefail
echo 'travis_fold:start:TEST'
# used by xvfb that is used by Chrome
export DISPLAY=:99.0
# Used by karma and karma-chrome-launcher
export CHROME_BIN=/usr/bin/google-chrome
echo 'travis_fold:start:test.run'
sh -e /etc/init.d/xvfb start
npm run lint
if [[ $TRAVIS_BRANCH = master ]]; then
./node_modules/.bin/karma start karma.sauce.conf.js --single-run
else
./node_modules/.bin/karma start karma.conf.js --single-run
fi
if [[ -z $CI_PULL_REQUEST ]]; then
./node_modules/.bin/karma start karma.coverage.conf.js --single-run
./node_modules/.bin/codecov
fi
echo 'travis_fold:end:test.run'
echo 'travis_fold:end:TEST'
|
set -ex -o pipefail
echo 'travis_fold:start:TEST'
# used by xvfb that is used by Chrome
export DISPLAY=:99.0
# Used by karma and karma-chrome-launcher
export CHROME_BIN=/usr/bin/google-chrome
echo 'travis_fold:start:test.run'
sh -e /etc/init.d/xvfb start
npm run lint
if [[ $TRAVIS_BRANCH = master ]]; then
./node_modules/.bin/karma start karma.sauce.conf.js --single-run
else
./node_modules/.bin/karma start karma.conf.js --single-run
fi
if [[ -z $CI_PULL_REQUEST ]]; then
- ./node_modules/.bin/karma start karma.coverage.conf.js
+ ./node_modules/.bin/karma start karma.coverage.conf.js --single-run
? +++++++++++++
./node_modules/.bin/codecov
fi
echo 'travis_fold:end:test.run'
echo 'travis_fold:end:TEST' | 2 | 0.068966 | 1 | 1 |
45b1e360bc01175efcf83170d116aaa1243ca33f | Sources/ZamzamUI/Views/UIKit/Extensions/UIBarButtonItem.swift | Sources/ZamzamUI/Views/UIKit/Extensions/UIBarButtonItem.swift | //
// UIBarButtonItem.swift
// ZamzamUI
//
// Created by Basem Emara on 5/3/16.
// Copyright © 2016 Zamzam Inc. All rights reserved.
//
#if os(iOS)
import UIKit
public extension UIBarButtonItem {
/// Initializes a UIBarButtonItem with an image more conveniently.
///
/// - Parameters:
/// - imageName: Image name.
/// - bundle: The bundle containing the image file or asset catalog. Specify nil to search the app's main bundle.
/// - target: Target of the context.
/// - action: Action to trigger.
convenience init(imageName: String, inBundle bundle: Bundle? = nil, target: Any?, action: Selector) {
self.init(
image: UIImage(
named: imageName,
inBundle: bundle
),
style: .plain,
target: target,
action: action
)
}
}
#endif
| //
// UIBarButtonItem.swift
// ZamzamUI
//
// Created by Basem Emara on 5/3/16.
// Copyright © 2016 Zamzam Inc. All rights reserved.
//
#if os(iOS)
import UIKit
public extension UIBarButtonItem {
/// Initializes a `UIBarButtonItem` with an image more conveniently.
///
/// - Parameters:
/// - imageName: Image name.
/// - bundle: The bundle containing the image file or asset catalog. Specify nil to search the app's main bundle.
/// - target: Target of the context.
/// - action: Action to trigger.
convenience init(imageName: String, inBundle bundle: Bundle? = nil, target: Any?, action: Selector) {
self.init(
image: UIImage(
named: imageName,
inBundle: bundle
),
style: .plain,
target: target,
action: action
)
}
}
public extension UIBarButtonItem {
/// Initializes a blank back `UIBarButtonItem` conveniently.
static func makeBackBarButtonItem() -> UIBarButtonItem {
UIBarButtonItem(title: "", style: .plain, target: nil, action: nil)
}
}
#endif
| Add bacck bar button factory | Add bacck bar button factory
| Swift | mit | ZamzamInc/ZamzamKit | swift | ## Code Before:
//
// UIBarButtonItem.swift
// ZamzamUI
//
// Created by Basem Emara on 5/3/16.
// Copyright © 2016 Zamzam Inc. All rights reserved.
//
#if os(iOS)
import UIKit
public extension UIBarButtonItem {
/// Initializes a UIBarButtonItem with an image more conveniently.
///
/// - Parameters:
/// - imageName: Image name.
/// - bundle: The bundle containing the image file or asset catalog. Specify nil to search the app's main bundle.
/// - target: Target of the context.
/// - action: Action to trigger.
convenience init(imageName: String, inBundle bundle: Bundle? = nil, target: Any?, action: Selector) {
self.init(
image: UIImage(
named: imageName,
inBundle: bundle
),
style: .plain,
target: target,
action: action
)
}
}
#endif
## Instruction:
Add bacck bar button factory
## Code After:
//
// UIBarButtonItem.swift
// ZamzamUI
//
// Created by Basem Emara on 5/3/16.
// Copyright © 2016 Zamzam Inc. All rights reserved.
//
#if os(iOS)
import UIKit
public extension UIBarButtonItem {
/// Initializes a `UIBarButtonItem` with an image more conveniently.
///
/// - Parameters:
/// - imageName: Image name.
/// - bundle: The bundle containing the image file or asset catalog. Specify nil to search the app's main bundle.
/// - target: Target of the context.
/// - action: Action to trigger.
convenience init(imageName: String, inBundle bundle: Bundle? = nil, target: Any?, action: Selector) {
self.init(
image: UIImage(
named: imageName,
inBundle: bundle
),
style: .plain,
target: target,
action: action
)
}
}
public extension UIBarButtonItem {
/// Initializes a blank back `UIBarButtonItem` conveniently.
static func makeBackBarButtonItem() -> UIBarButtonItem {
UIBarButtonItem(title: "", style: .plain, target: nil, action: nil)
}
}
#endif
| //
// UIBarButtonItem.swift
// ZamzamUI
//
// Created by Basem Emara on 5/3/16.
// Copyright © 2016 Zamzam Inc. All rights reserved.
//
#if os(iOS)
import UIKit
public extension UIBarButtonItem {
- /// Initializes a UIBarButtonItem with an image more conveniently.
+ /// Initializes a `UIBarButtonItem` with an image more conveniently.
? + +
///
/// - Parameters:
/// - imageName: Image name.
/// - bundle: The bundle containing the image file or asset catalog. Specify nil to search the app's main bundle.
/// - target: Target of the context.
/// - action: Action to trigger.
convenience init(imageName: String, inBundle bundle: Bundle? = nil, target: Any?, action: Selector) {
self.init(
image: UIImage(
named: imageName,
inBundle: bundle
),
style: .plain,
target: target,
action: action
)
}
}
+
+ public extension UIBarButtonItem {
+
+ /// Initializes a blank back `UIBarButtonItem` conveniently.
+ static func makeBackBarButtonItem() -> UIBarButtonItem {
+ UIBarButtonItem(title: "", style: .plain, target: nil, action: nil)
+ }
+ }
#endif | 10 | 0.30303 | 9 | 1 |
c6dfebff50e3789b6d1b81948a9239cc565b71c8 | www/templates/default/html/EventListing.tpl.php | www/templates/default/html/EventListing.tpl.php | <table>
<thead>
<tr>
<th scope="col" class="date">Time</th>
<th scope="col" class="title">Event Title</th>
</tr>
</thead>
<tbody class="vcalendar">
<?php
$oddrow = false;
foreach ($context as $eventinstance) {
//Start building an array of row classes
$row_classes = array('vevent');
if ($oddrow) {
//Add an alt class to odd rows
$row_classes[] = 'alt';
}
//Invert oddrow
$oddrow = !$oddrow;
?>
<tr class="<?php echo implode(' ', $row_classes) ?>">
<td class="date">
<?php echo $savvy->render($eventinstance, 'EventInstance/Date.tpl.php') ?>
</td>
<td>
<?php echo $savvy->render($eventinstance, 'EventInstance/Summary.tpl.php') ?>
<?php echo $savvy->render($eventinstance, 'EventInstance/Location.tpl.php') ?>
<?php echo $savvy->render($eventinstance, 'EventInstance/Description.tpl.php') ?>
</td>
</tr>
<?php
}
?>
</tbody>
</table>
| <table>
<thead>
<tr>
<th scope="col" class="date">Time</th>
<th scope="col" class="title">Event Title</th>
</tr>
</thead>
<tbody class="vcalendar">
<?php
$oddrow = false;
foreach ($context as $eventinstance) {
//Start building an array of row classes
$row_classes = array('vevent');
if ($oddrow) {
//Add an alt class to odd rows
$row_classes[] = 'alt';
}
if ($eventinstance->isAllDay()) {
$row_classes[] = 'all-day';
}
if ($eventinstance->isInProgress()) {
$row_classes[] = 'in-progress';
}
if ($eventinstance->isOnGoing()) {
$row_classes[] = 'ongoing';
}
//Invert oddrow
$oddrow = !$oddrow;
?>
<tr class="<?php echo implode(' ', $row_classes) ?>">
<td class="date">
<?php echo $savvy->render($eventinstance, 'EventInstance/Date.tpl.php') ?>
</td>
<td>
<?php echo $savvy->render($eventinstance, 'EventInstance/Summary.tpl.php') ?>
<?php echo $savvy->render($eventinstance, 'EventInstance/Location.tpl.php') ?>
<?php echo $savvy->render($eventinstance, 'EventInstance/Description.tpl.php') ?>
</td>
</tr>
<?php
}
?>
</tbody>
</table>
| Add helper classes to the event row | Add helper classes to the event row
| PHP | bsd-3-clause | unl/UNL_UCBCN_System,unl/UNL_UCBCN_System,unl/UNL_UCBCN_System,unl/UNL_UCBCN_System,unl/UNL_UCBCN_System | php | ## Code Before:
<table>
<thead>
<tr>
<th scope="col" class="date">Time</th>
<th scope="col" class="title">Event Title</th>
</tr>
</thead>
<tbody class="vcalendar">
<?php
$oddrow = false;
foreach ($context as $eventinstance) {
//Start building an array of row classes
$row_classes = array('vevent');
if ($oddrow) {
//Add an alt class to odd rows
$row_classes[] = 'alt';
}
//Invert oddrow
$oddrow = !$oddrow;
?>
<tr class="<?php echo implode(' ', $row_classes) ?>">
<td class="date">
<?php echo $savvy->render($eventinstance, 'EventInstance/Date.tpl.php') ?>
</td>
<td>
<?php echo $savvy->render($eventinstance, 'EventInstance/Summary.tpl.php') ?>
<?php echo $savvy->render($eventinstance, 'EventInstance/Location.tpl.php') ?>
<?php echo $savvy->render($eventinstance, 'EventInstance/Description.tpl.php') ?>
</td>
</tr>
<?php
}
?>
</tbody>
</table>
## Instruction:
Add helper classes to the event row
## Code After:
<table>
<thead>
<tr>
<th scope="col" class="date">Time</th>
<th scope="col" class="title">Event Title</th>
</tr>
</thead>
<tbody class="vcalendar">
<?php
$oddrow = false;
foreach ($context as $eventinstance) {
//Start building an array of row classes
$row_classes = array('vevent');
if ($oddrow) {
//Add an alt class to odd rows
$row_classes[] = 'alt';
}
if ($eventinstance->isAllDay()) {
$row_classes[] = 'all-day';
}
if ($eventinstance->isInProgress()) {
$row_classes[] = 'in-progress';
}
if ($eventinstance->isOnGoing()) {
$row_classes[] = 'ongoing';
}
//Invert oddrow
$oddrow = !$oddrow;
?>
<tr class="<?php echo implode(' ', $row_classes) ?>">
<td class="date">
<?php echo $savvy->render($eventinstance, 'EventInstance/Date.tpl.php') ?>
</td>
<td>
<?php echo $savvy->render($eventinstance, 'EventInstance/Summary.tpl.php') ?>
<?php echo $savvy->render($eventinstance, 'EventInstance/Location.tpl.php') ?>
<?php echo $savvy->render($eventinstance, 'EventInstance/Description.tpl.php') ?>
</td>
</tr>
<?php
}
?>
</tbody>
</table>
| <table>
<thead>
<tr>
<th scope="col" class="date">Time</th>
<th scope="col" class="title">Event Title</th>
</tr>
</thead>
<tbody class="vcalendar">
<?php
$oddrow = false;
foreach ($context as $eventinstance) {
//Start building an array of row classes
$row_classes = array('vevent');
if ($oddrow) {
//Add an alt class to odd rows
$row_classes[] = 'alt';
+ }
+
+ if ($eventinstance->isAllDay()) {
+ $row_classes[] = 'all-day';
+ }
+
+ if ($eventinstance->isInProgress()) {
+ $row_classes[] = 'in-progress';
+ }
+
+ if ($eventinstance->isOnGoing()) {
+ $row_classes[] = 'ongoing';
}
//Invert oddrow
$oddrow = !$oddrow;
?>
<tr class="<?php echo implode(' ', $row_classes) ?>">
<td class="date">
<?php echo $savvy->render($eventinstance, 'EventInstance/Date.tpl.php') ?>
</td>
<td>
<?php echo $savvy->render($eventinstance, 'EventInstance/Summary.tpl.php') ?>
<?php echo $savvy->render($eventinstance, 'EventInstance/Location.tpl.php') ?>
<?php echo $savvy->render($eventinstance, 'EventInstance/Description.tpl.php') ?>
</td>
</tr>
<?php
}
?>
</tbody>
</table>
| 12 | 0.307692 | 12 | 0 |
2c371923e5e609b885edf65c91c1036faeef4c2b | README.md | README.md |
Basis is a simple and extendable WordPress theme that makes it easy to build an online store with Schema.
Schema is a cloud platform and API to build and scale ecommerce. Create a free account at https://schema.io.
## Status
This theme is currently in development. Take a look at `functions.php` to get a head start.
## License
MIT
## Support
Join the discussion at https://slack.schema.io or email support@schema.io with questions.
|
Basis is a simple WordPress theme that makes it easy to build an online store with Schema.
Schema is a cloud ecommerce platform and API. Learn more at https://schema.io.
## Status
This theme is currently in development. Take a look at `functions.php` to get a head start.
## Setup
1. Edit `functions.php` and replace `client_id` and `client_key` with your own.
2. Make sure the `cache/` folder is writable by your web server. Cache entries are created and used automatically by the Schema API client.
## License
MIT
## Support
Join the discussion at https://slack.schema.io or email support@schema.io with questions.
| Update readme with setup info | Update readme with setup info
| Markdown | mit | schemaio/schema-wp-basis | markdown | ## Code Before:
Basis is a simple and extendable WordPress theme that makes it easy to build an online store with Schema.
Schema is a cloud platform and API to build and scale ecommerce. Create a free account at https://schema.io.
## Status
This theme is currently in development. Take a look at `functions.php` to get a head start.
## License
MIT
## Support
Join the discussion at https://slack.schema.io or email support@schema.io with questions.
## Instruction:
Update readme with setup info
## Code After:
Basis is a simple WordPress theme that makes it easy to build an online store with Schema.
Schema is a cloud ecommerce platform and API. Learn more at https://schema.io.
## Status
This theme is currently in development. Take a look at `functions.php` to get a head start.
## Setup
1. Edit `functions.php` and replace `client_id` and `client_key` with your own.
2. Make sure the `cache/` folder is writable by your web server. Cache entries are created and used automatically by the Schema API client.
## License
MIT
## Support
Join the discussion at https://slack.schema.io or email support@schema.io with questions.
|
- Basis is a simple and extendable WordPress theme that makes it easy to build an online store with Schema.
? ---------------
+ Basis is a simple WordPress theme that makes it easy to build an online store with Schema.
- Schema is a cloud platform and API to build and scale ecommerce. Create a free account at https://schema.io.
+ Schema is a cloud ecommerce platform and API. Learn more at https://schema.io.
## Status
This theme is currently in development. Take a look at `functions.php` to get a head start.
+
+ ## Setup
+
+ 1. Edit `functions.php` and replace `client_id` and `client_key` with your own.
+
+ 2. Make sure the `cache/` folder is writable by your web server. Cache entries are created and used automatically by the Schema API client.
## License
MIT
## Support
Join the discussion at https://slack.schema.io or email support@schema.io with questions. | 10 | 0.625 | 8 | 2 |
6f992bda1747d8dd23dd03f1ae3679c00f2fc977 | marketpulse/geo/lookup.py | marketpulse/geo/lookup.py | from urlparse import urljoin
from django.conf import settings
from django_countries import countries
import requests
COUNTRY_CODES = {key: value for (value, key) in list(countries)}
def reverse_geocode(lat, lng):
"""Query Mapbox API to get data for lat, lng"""
query = '{0},{1}.json'.format(lng, lat)
url = urljoin(settings.MAPBOX_GEOCODE_URL, query)
params = {'access_token': settings.MAPBOX_TOKEN}
response = requests.get(url, params=params)
results = {}
if response.status_code != 200:
return results
data = response.json()
for feature in data['features']:
text = feature['text']
if feature['id'].startswith('country.'):
results['country'] = COUNTRY_CODES[text]
if feature['id'].startswith('region.'):
results['region'] = text
if feature['id'].startswith('place.'):
results['city'] = text
if feature['id'].startswith('address.'):
results['address'] = text
return results
| from urlparse import urljoin
from django.conf import settings
from django_countries import countries
import requests
COUNTRY_CODES = {key: value for (value, key) in list(countries)}
def reverse_geocode(lat, lng):
"""Query Mapbox API to get data for lat, lng"""
query = '{0},{1}.json'.format(lng, lat)
url = urljoin(settings.MAPBOX_GEOCODE_URL, query)
params = {'access_token': settings.MAPBOX_TOKEN}
response = requests.get(url, params=params)
results = {}
if response.status_code != 200:
return results
data = response.json()
for feature in data['features']:
text = feature['text']
if feature['id'].startswith('country.'):
try:
results['country'] = COUNTRY_CODES[text]
except KeyError:
results['country'] = text
if feature['id'].startswith('region.'):
results['region'] = text
if feature['id'].startswith('place.'):
results['city'] = text
if feature['id'].startswith('address.'):
results['address'] = text
return results
| Handle errors in case of a country mismatch. | Handle errors in case of a country mismatch.
| Python | mpl-2.0 | mozilla/marketpulse,akatsoulas/marketpulse,akatsoulas/marketpulse,akatsoulas/marketpulse,mozilla/marketpulse,mozilla/marketpulse,akatsoulas/marketpulse,mozilla/marketpulse | python | ## Code Before:
from urlparse import urljoin
from django.conf import settings
from django_countries import countries
import requests
COUNTRY_CODES = {key: value for (value, key) in list(countries)}
def reverse_geocode(lat, lng):
"""Query Mapbox API to get data for lat, lng"""
query = '{0},{1}.json'.format(lng, lat)
url = urljoin(settings.MAPBOX_GEOCODE_URL, query)
params = {'access_token': settings.MAPBOX_TOKEN}
response = requests.get(url, params=params)
results = {}
if response.status_code != 200:
return results
data = response.json()
for feature in data['features']:
text = feature['text']
if feature['id'].startswith('country.'):
results['country'] = COUNTRY_CODES[text]
if feature['id'].startswith('region.'):
results['region'] = text
if feature['id'].startswith('place.'):
results['city'] = text
if feature['id'].startswith('address.'):
results['address'] = text
return results
## Instruction:
Handle errors in case of a country mismatch.
## Code After:
from urlparse import urljoin
from django.conf import settings
from django_countries import countries
import requests
COUNTRY_CODES = {key: value for (value, key) in list(countries)}
def reverse_geocode(lat, lng):
"""Query Mapbox API to get data for lat, lng"""
query = '{0},{1}.json'.format(lng, lat)
url = urljoin(settings.MAPBOX_GEOCODE_URL, query)
params = {'access_token': settings.MAPBOX_TOKEN}
response = requests.get(url, params=params)
results = {}
if response.status_code != 200:
return results
data = response.json()
for feature in data['features']:
text = feature['text']
if feature['id'].startswith('country.'):
try:
results['country'] = COUNTRY_CODES[text]
except KeyError:
results['country'] = text
if feature['id'].startswith('region.'):
results['region'] = text
if feature['id'].startswith('place.'):
results['city'] = text
if feature['id'].startswith('address.'):
results['address'] = text
return results
| from urlparse import urljoin
from django.conf import settings
from django_countries import countries
import requests
COUNTRY_CODES = {key: value for (value, key) in list(countries)}
def reverse_geocode(lat, lng):
"""Query Mapbox API to get data for lat, lng"""
query = '{0},{1}.json'.format(lng, lat)
url = urljoin(settings.MAPBOX_GEOCODE_URL, query)
params = {'access_token': settings.MAPBOX_TOKEN}
response = requests.get(url, params=params)
results = {}
if response.status_code != 200:
return results
data = response.json()
for feature in data['features']:
text = feature['text']
if feature['id'].startswith('country.'):
+ try:
- results['country'] = COUNTRY_CODES[text]
+ results['country'] = COUNTRY_CODES[text]
? ++++
+ except KeyError:
+ results['country'] = text
if feature['id'].startswith('region.'):
results['region'] = text
if feature['id'].startswith('place.'):
results['city'] = text
if feature['id'].startswith('address.'):
results['address'] = text
return results | 5 | 0.142857 | 4 | 1 |
1a7ad0dfa0065b3eac2004354bc4752590d8fc1b | script.js | script.js | (function(){
colors = ['#eeeeee', '#d6e685', '#8cc665', '#44a340', '#1e6823'];
days = document.getElementsByClassName('day');
Array.prototype.forEach.call(days, function(element) {
element.addEventListener('click', function(event){
event.stopPropagation()
currentColor = element.getAttribute('fill');
nextIndex = (colors.indexOf(currentColor) + 1) % colors.length;
element.setAttribute('fill', colors[nextIndex]);
});
});
})();
| (function(){
colors = ['#eeeeee', '#d6e685', '#8cc665', '#44a340', '#1e6823'];
days = document.getElementsByClassName('day');
Array.prototype.forEach.call(days, function(element) {
element.addEventListener('click', function(event) {
event.stopPropagation();
});
element.addEventListener('mouseenter', function(event) {
currentColor = element.getAttribute('fill');
if (event.buttons & 1 && !event.shiftKey) {
nextIndex = Math.min(colors.indexOf(currentColor) + 1, colors.length - 1);
element.setAttribute('fill', colors[nextIndex]);
} else if (event.buttons & 1 && event.shiftKey) {
nextIndex = Math.max(colors.indexOf(currentColor) - 1, 0);
element.setAttribute('fill', colors[nextIndex]);
}
});
});
})();
| Make to draw by dragging | Make to draw by dragging
| JavaScript | mit | ueokande/github-pseudo-contributions,iBenza/github-pseudo-contributions | javascript | ## Code Before:
(function(){
colors = ['#eeeeee', '#d6e685', '#8cc665', '#44a340', '#1e6823'];
days = document.getElementsByClassName('day');
Array.prototype.forEach.call(days, function(element) {
element.addEventListener('click', function(event){
event.stopPropagation()
currentColor = element.getAttribute('fill');
nextIndex = (colors.indexOf(currentColor) + 1) % colors.length;
element.setAttribute('fill', colors[nextIndex]);
});
});
})();
## Instruction:
Make to draw by dragging
## Code After:
(function(){
colors = ['#eeeeee', '#d6e685', '#8cc665', '#44a340', '#1e6823'];
days = document.getElementsByClassName('day');
Array.prototype.forEach.call(days, function(element) {
element.addEventListener('click', function(event) {
event.stopPropagation();
});
element.addEventListener('mouseenter', function(event) {
currentColor = element.getAttribute('fill');
if (event.buttons & 1 && !event.shiftKey) {
nextIndex = Math.min(colors.indexOf(currentColor) + 1, colors.length - 1);
element.setAttribute('fill', colors[nextIndex]);
} else if (event.buttons & 1 && event.shiftKey) {
nextIndex = Math.max(colors.indexOf(currentColor) - 1, 0);
element.setAttribute('fill', colors[nextIndex]);
}
});
});
})();
| (function(){
colors = ['#eeeeee', '#d6e685', '#8cc665', '#44a340', '#1e6823'];
days = document.getElementsByClassName('day');
Array.prototype.forEach.call(days, function(element) {
- element.addEventListener('click', function(event){
+ element.addEventListener('click', function(event) {
? +
- event.stopPropagation()
+ event.stopPropagation();
? +
+ });
+ element.addEventListener('mouseenter', function(event) {
currentColor = element.getAttribute('fill');
+ if (event.buttons & 1 && !event.shiftKey) {
- nextIndex = (colors.indexOf(currentColor) + 1) % colors.length;
? ^^^
+ nextIndex = Math.min(colors.indexOf(currentColor) + 1, colors.length - 1);
? ++ ++++++++ ^ +++++
- element.setAttribute('fill', colors[nextIndex]);
+ element.setAttribute('fill', colors[nextIndex]);
? ++
+ } else if (event.buttons & 1 && event.shiftKey) {
+ nextIndex = Math.max(colors.indexOf(currentColor) - 1, 0);
+ element.setAttribute('fill', colors[nextIndex]);
+ }
});
});
})(); | 15 | 1.25 | 11 | 4 |
7e2481ac5ec169e628d517e2916775bf2a1ff73d | .travis.yml | .travis.yml | language: android
android:
components:
# https://github.com/travis-ci/travis-ci/issues/5036
- tools
- platform-tools
- build-tools-23.0.3
- android-23
- extra-android-m2repository
- extra-google-m2repository
# Necessary because otherwise Gradle will OOM during the compilation phase:
# https://travis-ci.org/firebase/firebase-jobdispatcher-android/builds/164194294
# But also can't be >= 3gb, otherwise Travis will kill our process:
# https://docs.travis-ci.com/user/common-build-problems/#My-build-script-is-killed-without-any-error
env:
- GRADLE_OPTS="-XX:MaxPermSize=1024m"
script:
- ./gradlew jobdispatcher:build testapp:assemble
# "avoid uploading the cache after every build"
before_cache:
- rm -f $HOME/.gradle/caches/modules-2/modules-2.lock
cache:
directories:
- $HOME/.gradle/caches/
- $HOME/.gradle/wrapper/
| language: android
jdk: oraclejdk8
# Container builds don't have enough memory to reliably finish without being
# killed, so pretend sudo is required so we don't use the container infra.
# See: https://github.com/travis-ci/travis-ci/issues/5582
sudo: required
android:
components:
# https://github.com/travis-ci/travis-ci/issues/5036
- tools
- platform-tools
- build-tools-23.0.3
- android-23
- extra-android-m2repository
- extra-google-m2repository
# "avoid uploading the cache after every build"
before_cache:
- rm -f $HOME/.gradle/caches/modules-2/modules-2.lock
- rm -rf $HOME/.gradle/caches/*/plugin-resolution/
cache:
directories:
- $HOME/.gradle/caches/
- $HOME/.gradle/wrapper/
script: ./gradlew jobdispatcher:build testapp:assemble
| Mark sudo as required to fix Travis | Mark sudo as required to fix Travis
Thanks @SUPERCILEX for the help (#39)!
| YAML | apache-2.0 | googlearchive/firebase-jobdispatcher-android | yaml | ## Code Before:
language: android
android:
components:
# https://github.com/travis-ci/travis-ci/issues/5036
- tools
- platform-tools
- build-tools-23.0.3
- android-23
- extra-android-m2repository
- extra-google-m2repository
# Necessary because otherwise Gradle will OOM during the compilation phase:
# https://travis-ci.org/firebase/firebase-jobdispatcher-android/builds/164194294
# But also can't be >= 3gb, otherwise Travis will kill our process:
# https://docs.travis-ci.com/user/common-build-problems/#My-build-script-is-killed-without-any-error
env:
- GRADLE_OPTS="-XX:MaxPermSize=1024m"
script:
- ./gradlew jobdispatcher:build testapp:assemble
# "avoid uploading the cache after every build"
before_cache:
- rm -f $HOME/.gradle/caches/modules-2/modules-2.lock
cache:
directories:
- $HOME/.gradle/caches/
- $HOME/.gradle/wrapper/
## Instruction:
Mark sudo as required to fix Travis
Thanks @SUPERCILEX for the help (#39)!
## Code After:
language: android
jdk: oraclejdk8
# Container builds don't have enough memory to reliably finish without being
# killed, so pretend sudo is required so we don't use the container infra.
# See: https://github.com/travis-ci/travis-ci/issues/5582
sudo: required
android:
components:
# https://github.com/travis-ci/travis-ci/issues/5036
- tools
- platform-tools
- build-tools-23.0.3
- android-23
- extra-android-m2repository
- extra-google-m2repository
# "avoid uploading the cache after every build"
before_cache:
- rm -f $HOME/.gradle/caches/modules-2/modules-2.lock
- rm -rf $HOME/.gradle/caches/*/plugin-resolution/
cache:
directories:
- $HOME/.gradle/caches/
- $HOME/.gradle/wrapper/
script: ./gradlew jobdispatcher:build testapp:assemble
| language: android
+ jdk: oraclejdk8
+ # Container builds don't have enough memory to reliably finish without being
+ # killed, so pretend sudo is required so we don't use the container infra.
+ # See: https://github.com/travis-ci/travis-ci/issues/5582
+ sudo: required
android:
components:
# https://github.com/travis-ci/travis-ci/issues/5036
- tools
- platform-tools
- build-tools-23.0.3
- android-23
- extra-android-m2repository
- extra-google-m2repository
- # Necessary because otherwise Gradle will OOM during the compilation phase:
- # https://travis-ci.org/firebase/firebase-jobdispatcher-android/builds/164194294
- # But also can't be >= 3gb, otherwise Travis will kill our process:
- # https://docs.travis-ci.com/user/common-build-problems/#My-build-script-is-killed-without-any-error
- env:
- - GRADLE_OPTS="-XX:MaxPermSize=1024m"
-
- script:
- - ./gradlew jobdispatcher:build testapp:assemble
-
# "avoid uploading the cache after every build"
before_cache:
- rm -f $HOME/.gradle/caches/modules-2/modules-2.lock
+ - rm -rf $HOME/.gradle/caches/*/plugin-resolution/
cache:
directories:
- $HOME/.gradle/caches/
- $HOME/.gradle/wrapper/
+
+ script: ./gradlew jobdispatcher:build testapp:assemble | 18 | 0.642857 | 8 | 10 |
b9c175059f0f2f3321ffd495fd46c6f5770afd22 | bluebottle/payouts_dorado/adapters.py | bluebottle/payouts_dorado/adapters.py | import json
import requests
from django.core.exceptions import ImproperlyConfigured
from django.db import connection
from requests.exceptions import MissingSchema
from bluebottle.clients import properties
class PayoutValidationError(Exception):
pass
class PayoutCreationError(Exception):
pass
class DoradoPayoutAdapter(object):
def __init__(self, project):
self.settings = getattr(properties, 'PAYOUT_SERVICE', None)
self.project = project
self.tenant = connection.tenant
def trigger_payout(self):
# Send the signal to Dorado
data = {
'project_id': self.project.id,
'tenant': self.tenant.schema_name
}
try:
response = requests.post(self.settings['url'], data)
response.raise_for_status()
self.project.payout_status = 'created'
self.project.save()
except requests.HTTPError:
try:
raise PayoutValidationError(json.loads(response.content))
except ValueError:
raise PayoutCreationError(response.content)
except MissingSchema:
raise ImproperlyConfigured("Incorrect Payout URL")
except IOError, e:
raise PayoutCreationError(unicode(e))
except TypeError:
raise ImproperlyConfigured("Invalid Payout settings")
| import json
import requests
from django.core.exceptions import ImproperlyConfigured
from django.db import connection
from requests.exceptions import MissingSchema
from bluebottle.clients import properties
class PayoutValidationError(Exception):
pass
class PayoutCreationError(Exception):
pass
class DoradoPayoutAdapter(object):
def __init__(self, project):
self.settings = getattr(properties, 'PAYOUT_SERVICE', None)
self.project = project
self.tenant = connection.tenant
def trigger_payout(self):
# Send the signal to Dorado
data = {
'project_id': self.project.id,
'tenant': self.tenant.schema_name
}
try:
self.project.payout_status = 'created'
self.project.save()
response = requests.post(self.settings['url'], data)
response.raise_for_status()
except requests.HTTPError:
try:
raise PayoutValidationError(json.loads(response.content))
except ValueError:
raise PayoutCreationError(response.content)
except MissingSchema:
raise ImproperlyConfigured("Incorrect Payout URL")
except IOError, e:
raise PayoutCreationError(unicode(e))
except TypeError:
raise ImproperlyConfigured("Invalid Payout settings")
| Set the payout status to created BEFORE we call out to dorado. This way we do not override that status that dorado set. | Set the payout status to created BEFORE we call out to dorado. This way
we do not override that status that dorado set.
BB-9471 #resolve
| Python | bsd-3-clause | onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle | python | ## Code Before:
import json
import requests
from django.core.exceptions import ImproperlyConfigured
from django.db import connection
from requests.exceptions import MissingSchema
from bluebottle.clients import properties
class PayoutValidationError(Exception):
pass
class PayoutCreationError(Exception):
pass
class DoradoPayoutAdapter(object):
def __init__(self, project):
self.settings = getattr(properties, 'PAYOUT_SERVICE', None)
self.project = project
self.tenant = connection.tenant
def trigger_payout(self):
# Send the signal to Dorado
data = {
'project_id': self.project.id,
'tenant': self.tenant.schema_name
}
try:
response = requests.post(self.settings['url'], data)
response.raise_for_status()
self.project.payout_status = 'created'
self.project.save()
except requests.HTTPError:
try:
raise PayoutValidationError(json.loads(response.content))
except ValueError:
raise PayoutCreationError(response.content)
except MissingSchema:
raise ImproperlyConfigured("Incorrect Payout URL")
except IOError, e:
raise PayoutCreationError(unicode(e))
except TypeError:
raise ImproperlyConfigured("Invalid Payout settings")
## Instruction:
Set the payout status to created BEFORE we call out to dorado. This way
we do not override that status that dorado set.
BB-9471 #resolve
## Code After:
import json
import requests
from django.core.exceptions import ImproperlyConfigured
from django.db import connection
from requests.exceptions import MissingSchema
from bluebottle.clients import properties
class PayoutValidationError(Exception):
pass
class PayoutCreationError(Exception):
pass
class DoradoPayoutAdapter(object):
def __init__(self, project):
self.settings = getattr(properties, 'PAYOUT_SERVICE', None)
self.project = project
self.tenant = connection.tenant
def trigger_payout(self):
# Send the signal to Dorado
data = {
'project_id': self.project.id,
'tenant': self.tenant.schema_name
}
try:
self.project.payout_status = 'created'
self.project.save()
response = requests.post(self.settings['url'], data)
response.raise_for_status()
except requests.HTTPError:
try:
raise PayoutValidationError(json.loads(response.content))
except ValueError:
raise PayoutCreationError(response.content)
except MissingSchema:
raise ImproperlyConfigured("Incorrect Payout URL")
except IOError, e:
raise PayoutCreationError(unicode(e))
except TypeError:
raise ImproperlyConfigured("Invalid Payout settings")
| import json
import requests
from django.core.exceptions import ImproperlyConfigured
from django.db import connection
from requests.exceptions import MissingSchema
from bluebottle.clients import properties
class PayoutValidationError(Exception):
pass
class PayoutCreationError(Exception):
pass
class DoradoPayoutAdapter(object):
def __init__(self, project):
self.settings = getattr(properties, 'PAYOUT_SERVICE', None)
self.project = project
self.tenant = connection.tenant
def trigger_payout(self):
# Send the signal to Dorado
data = {
'project_id': self.project.id,
'tenant': self.tenant.schema_name
}
try:
+ self.project.payout_status = 'created'
+ self.project.save()
+
response = requests.post(self.settings['url'], data)
response.raise_for_status()
-
- self.project.payout_status = 'created'
- self.project.save()
except requests.HTTPError:
try:
raise PayoutValidationError(json.loads(response.content))
except ValueError:
raise PayoutCreationError(response.content)
except MissingSchema:
raise ImproperlyConfigured("Incorrect Payout URL")
except IOError, e:
raise PayoutCreationError(unicode(e))
except TypeError:
raise ImproperlyConfigured("Invalid Payout settings") | 6 | 0.122449 | 3 | 3 |
dc5e8a8df5e68653857e6d70d73bc817cf3f7403 | _plugins/jekyll_slideshow.rb | _plugins/jekyll_slideshow.rb | require 'RMagick'
require 'nokogiri'
include Magick
module Jekyll
class ThumbGenerator < Generator
safe true
def generate(site)
site.static_files.each do |file|
if File.extname(file.path).downcase == ('.jpg' || '.png') && file.path.index("-thumb") == nil
img = Magick::Image::read(file.path).first
thumb = img.resize_to_fill(150, 150)
path = file.path.sub(File.extname(file.path), '-thumb' << File.extname(file.path))
thumb.write path
site.static_files << StaticFile.new(thumb, site.source, File.dirname(file.path).sub(site.source, ''), File.basename(file.path).sub('.JPG', '-thumb.JPG'))
end
end
end
end
module ImageThumbs
def slideshows(content)
doc = Nokogiri::HTML(content)
doc.css('ul li img').each do |img|
url = img['src']
newurl = File.dirname(url) << '/' << File.basename(url, File.extname(url)) << '-thumb' << File.extname(url)
img['src'] = newurl
img['data-fullimage'] = url
end
return doc
end
end
end
Liquid::Template.register_filter(Jekyll::ImageThumbs) | require 'RMagick'
require 'nokogiri'
include Magick
module Jekyll
class ThumbGenerator < Generator
safe true
def generate(site)
config = Jekyll.configuration({})['slideshow']
site.static_files.each do |file|
if File.extname(file.path).downcase == ('.jpg' || '.png') && file.path.index("-thumb") == nil
img = Magick::Image::read(file.path).first
thumb = img.resize_to_fill(config['width'], config['height'])
path = file.path.sub(File.extname(file.path), '-thumb' << File.extname(file.path))
thumb.write path
site.static_files << StaticFile.new(thumb, site.source, File.dirname(file.path).sub(site.source, ''), File.basename(file.path).sub('.JPG', '-thumb.JPG'))
end
end
end
end
module ImageThumbs
def slideshows(content)
doc = Nokogiri::HTML(content)
doc.css('ul li img').each do |img|
url = img['src']
newurl = File.dirname(url) << '/' << File.basename(url, File.extname(url)) << '-thumb' << File.extname(url)
img['src'] = newurl
img['data-fullimage'] = url
end
return doc
end
end
end
Liquid::Template.register_filter(Jekyll::ImageThumbs) | Set thumbnail size based on config | Set thumbnail size based on config
| Ruby | mit | matthewowen/jekyll-slideshow,matthewowen/jekyll-slideshow | ruby | ## Code Before:
require 'RMagick'
require 'nokogiri'
include Magick
module Jekyll
class ThumbGenerator < Generator
safe true
def generate(site)
site.static_files.each do |file|
if File.extname(file.path).downcase == ('.jpg' || '.png') && file.path.index("-thumb") == nil
img = Magick::Image::read(file.path).first
thumb = img.resize_to_fill(150, 150)
path = file.path.sub(File.extname(file.path), '-thumb' << File.extname(file.path))
thumb.write path
site.static_files << StaticFile.new(thumb, site.source, File.dirname(file.path).sub(site.source, ''), File.basename(file.path).sub('.JPG', '-thumb.JPG'))
end
end
end
end
module ImageThumbs
def slideshows(content)
doc = Nokogiri::HTML(content)
doc.css('ul li img').each do |img|
url = img['src']
newurl = File.dirname(url) << '/' << File.basename(url, File.extname(url)) << '-thumb' << File.extname(url)
img['src'] = newurl
img['data-fullimage'] = url
end
return doc
end
end
end
Liquid::Template.register_filter(Jekyll::ImageThumbs)
## Instruction:
Set thumbnail size based on config
## Code After:
require 'RMagick'
require 'nokogiri'
include Magick
module Jekyll
class ThumbGenerator < Generator
safe true
def generate(site)
config = Jekyll.configuration({})['slideshow']
site.static_files.each do |file|
if File.extname(file.path).downcase == ('.jpg' || '.png') && file.path.index("-thumb") == nil
img = Magick::Image::read(file.path).first
thumb = img.resize_to_fill(config['width'], config['height'])
path = file.path.sub(File.extname(file.path), '-thumb' << File.extname(file.path))
thumb.write path
site.static_files << StaticFile.new(thumb, site.source, File.dirname(file.path).sub(site.source, ''), File.basename(file.path).sub('.JPG', '-thumb.JPG'))
end
end
end
end
module ImageThumbs
def slideshows(content)
doc = Nokogiri::HTML(content)
doc.css('ul li img').each do |img|
url = img['src']
newurl = File.dirname(url) << '/' << File.basename(url, File.extname(url)) << '-thumb' << File.extname(url)
img['src'] = newurl
img['data-fullimage'] = url
end
return doc
end
end
end
Liquid::Template.register_filter(Jekyll::ImageThumbs) | require 'RMagick'
require 'nokogiri'
include Magick
module Jekyll
class ThumbGenerator < Generator
safe true
def generate(site)
+ config = Jekyll.configuration({})['slideshow']
site.static_files.each do |file|
if File.extname(file.path).downcase == ('.jpg' || '.png') && file.path.index("-thumb") == nil
img = Magick::Image::read(file.path).first
- thumb = img.resize_to_fill(150, 150)
+ thumb = img.resize_to_fill(config['width'], config['height'])
path = file.path.sub(File.extname(file.path), '-thumb' << File.extname(file.path))
thumb.write path
site.static_files << StaticFile.new(thumb, site.source, File.dirname(file.path).sub(site.source, ''), File.basename(file.path).sub('.JPG', '-thumb.JPG'))
end
end
end
end
module ImageThumbs
def slideshows(content)
doc = Nokogiri::HTML(content)
doc.css('ul li img').each do |img|
url = img['src']
newurl = File.dirname(url) << '/' << File.basename(url, File.extname(url)) << '-thumb' << File.extname(url)
img['src'] = newurl
img['data-fullimage'] = url
end
return doc
end
end
end
Liquid::Template.register_filter(Jekyll::ImageThumbs) | 3 | 0.075 | 2 | 1 |
7a172a7fe98223fd20a4bb5d497aa17653b8a13b | dev_tools/coverage_runner.py | dev_tools/coverage_runner.py |
import os
import sys
from os.path import join, realpath
# Third Party modules
import nose
import coverage
cov = coverage.coverage(branch=True)
cov.start()
result = nose.run(defaultTest=realpath(join(__file__, "..", "..", "py2c")))
cov.stop()
cov.save()
if result == 0:
# If we are in CI environment, don't write an HTML report.
if os.environ.get("CI", None) is None:
cov.html_report()
cov.report()
sys.exit(result)
|
import os
import sys
from os.path import join, realpath
# Third Party modules
import nose
import coverage
cov = coverage.coverage(branch=True)
cov.start()
success = nose.run(defaultTest=realpath(join(__file__, "..", "..", "py2c")))
cov.stop()
cov.save()
if success:
# If we are in CI environment, don't write an HTML report.
if os.environ.get("CI", None) is None:
cov.html_report()
cov.report()
sys.exit(0 if success else 1)
| Correct the usage of nose.run. | [TRAVIS] Correct the usage of nose.run.
nose.run returns whether the test run was sucessful or not.
| Python | bsd-3-clause | pradyunsg/Py2C,pradyunsg/Py2C | python | ## Code Before:
import os
import sys
from os.path import join, realpath
# Third Party modules
import nose
import coverage
cov = coverage.coverage(branch=True)
cov.start()
result = nose.run(defaultTest=realpath(join(__file__, "..", "..", "py2c")))
cov.stop()
cov.save()
if result == 0:
# If we are in CI environment, don't write an HTML report.
if os.environ.get("CI", None) is None:
cov.html_report()
cov.report()
sys.exit(result)
## Instruction:
[TRAVIS] Correct the usage of nose.run.
nose.run returns whether the test run was sucessful or not.
## Code After:
import os
import sys
from os.path import join, realpath
# Third Party modules
import nose
import coverage
cov = coverage.coverage(branch=True)
cov.start()
success = nose.run(defaultTest=realpath(join(__file__, "..", "..", "py2c")))
cov.stop()
cov.save()
if success:
# If we are in CI environment, don't write an HTML report.
if os.environ.get("CI", None) is None:
cov.html_report()
cov.report()
sys.exit(0 if success else 1)
|
import os
import sys
from os.path import join, realpath
# Third Party modules
import nose
import coverage
cov = coverage.coverage(branch=True)
cov.start()
- result = nose.run(defaultTest=realpath(join(__file__, "..", "..", "py2c")))
? ^ ^^^
+ success = nose.run(defaultTest=realpath(join(__file__, "..", "..", "py2c")))
? ^^^^ ^
cov.stop()
cov.save()
+ if success:
-
- if result == 0:
# If we are in CI environment, don't write an HTML report.
if os.environ.get("CI", None) is None:
cov.html_report()
cov.report()
- sys.exit(result)
+ sys.exit(0 if success else 1) | 7 | 0.291667 | 3 | 4 |
10f5a21b3a479adc814204f2169b3b324c9a175f | spec/chek_spec.rb | spec/chek_spec.rb | require File.expand_path(File.dirname(__FILE__) + '/spec_helper')
$LOAD_PATH.unshift(File.join(File.dirname(__FILE__)))
describe Chek do
it "normal require" do
expect { require 'foobar-can-not-require' }.to raise_error(LoadError)
expect { require 'foobar-normal' }.to_not raise_error(LoadError)
FoobarNormal.piyo.should == "piyopiyo"
end
context '☑' do
it "require" do
expect { ☑ 'foobar-can-not-require' }.to raise_error(LoadError)
expect { ☑ 'foobar-with-chek' }.to_not raise_error(LoadError)
FoobarWithChek.piyo.should == "piyopiyo"
end
end
context '☐' do
it "does not require" do
expect { ☐ 'foobar-can-not-require' }.to_not raise_error(LoadError)
expect { ☐ 'foobar-without-chek' }.to_not raise_error(LoadError)
expect { FoobarWithoutChek.piyo }.to raise_error(NameError)
end
end
end
| require File.expand_path(File.dirname(__FILE__) + '/spec_helper')
$LOAD_PATH.unshift(File.join(File.dirname(__FILE__)))
describe Chek do
it "normal require" do
expect { require 'foobar-can-not-require' }.to raise_error(LoadError)
expect { require 'foobar-normal' }.not_to raise_error(LoadError)
FoobarNormal.piyo.should == "piyopiyo"
end
context '☑' do
it "require" do
expect { ☑ 'foobar-can-not-require' }.to raise_error(LoadError)
expect { ☑ 'foobar-with-chek' }.not_to raise_error(LoadError)
FoobarWithChek.piyo.should == "piyopiyo"
end
end
context '☐' do
it "does not require" do
expect { ☐ 'foobar-can-not-require' }.not_to raise_error(LoadError)
expect { ☐ 'foobar-without-chek' }.not_to raise_error(LoadError)
expect { FoobarWithoutChek.piyo }.to raise_error(NameError)
end
end
end
| Fix for newer rspec version | Fix for newer rspec version
| Ruby | mit | togusafish/tobynet-_-chek,tobynet/chek | ruby | ## Code Before:
require File.expand_path(File.dirname(__FILE__) + '/spec_helper')
$LOAD_PATH.unshift(File.join(File.dirname(__FILE__)))
describe Chek do
it "normal require" do
expect { require 'foobar-can-not-require' }.to raise_error(LoadError)
expect { require 'foobar-normal' }.to_not raise_error(LoadError)
FoobarNormal.piyo.should == "piyopiyo"
end
context '☑' do
it "require" do
expect { ☑ 'foobar-can-not-require' }.to raise_error(LoadError)
expect { ☑ 'foobar-with-chek' }.to_not raise_error(LoadError)
FoobarWithChek.piyo.should == "piyopiyo"
end
end
context '☐' do
it "does not require" do
expect { ☐ 'foobar-can-not-require' }.to_not raise_error(LoadError)
expect { ☐ 'foobar-without-chek' }.to_not raise_error(LoadError)
expect { FoobarWithoutChek.piyo }.to raise_error(NameError)
end
end
end
## Instruction:
Fix for newer rspec version
## Code After:
require File.expand_path(File.dirname(__FILE__) + '/spec_helper')
$LOAD_PATH.unshift(File.join(File.dirname(__FILE__)))
describe Chek do
it "normal require" do
expect { require 'foobar-can-not-require' }.to raise_error(LoadError)
expect { require 'foobar-normal' }.not_to raise_error(LoadError)
FoobarNormal.piyo.should == "piyopiyo"
end
context '☑' do
it "require" do
expect { ☑ 'foobar-can-not-require' }.to raise_error(LoadError)
expect { ☑ 'foobar-with-chek' }.not_to raise_error(LoadError)
FoobarWithChek.piyo.should == "piyopiyo"
end
end
context '☐' do
it "does not require" do
expect { ☐ 'foobar-can-not-require' }.not_to raise_error(LoadError)
expect { ☐ 'foobar-without-chek' }.not_to raise_error(LoadError)
expect { FoobarWithoutChek.piyo }.to raise_error(NameError)
end
end
end
| require File.expand_path(File.dirname(__FILE__) + '/spec_helper')
$LOAD_PATH.unshift(File.join(File.dirname(__FILE__)))
describe Chek do
it "normal require" do
expect { require 'foobar-can-not-require' }.to raise_error(LoadError)
- expect { require 'foobar-normal' }.to_not raise_error(LoadError)
? ---
+ expect { require 'foobar-normal' }.not_to raise_error(LoadError)
? +++
FoobarNormal.piyo.should == "piyopiyo"
end
context '☑' do
it "require" do
expect { ☑ 'foobar-can-not-require' }.to raise_error(LoadError)
- expect { ☑ 'foobar-with-chek' }.to_not raise_error(LoadError)
? ---
+ expect { ☑ 'foobar-with-chek' }.not_to raise_error(LoadError)
? +++
FoobarWithChek.piyo.should == "piyopiyo"
end
end
context '☐' do
it "does not require" do
- expect { ☐ 'foobar-can-not-require' }.to_not raise_error(LoadError)
? ---
+ expect { ☐ 'foobar-can-not-require' }.not_to raise_error(LoadError)
? +++
- expect { ☐ 'foobar-without-chek' }.to_not raise_error(LoadError)
? ---
+ expect { ☐ 'foobar-without-chek' }.not_to raise_error(LoadError)
? +++
expect { FoobarWithoutChek.piyo }.to raise_error(NameError)
end
end
end
| 8 | 0.296296 | 4 | 4 |
80167ef3d5b62fa747f3df0edfc0b7c2c9d2d4ec | app/models/asset_file.rb | app/models/asset_file.rb | class AssetFile < ActiveRecord::Base
attr_protected :id
belongs_to :account
scope :with_query, lambda { |s| where('name LIKE ? OR file_name LIKE ?', "%#{s}%", "%#{s}%") }
def datastore
return nil unless file
@datastore ||= file.app.datastore
end
def object_url(opts = {})
expires = opts[:expires] || 1.week.from_now
datastore.storage.get_object_http_url(datastore.bucket_name, file_uid, expires, opts)
end
def to_s
name.present? ? name : file_name
end
def to_token_hash
{ :id => id, :text => to_s }
end
def self.class_for_file(filename)
case File.extname(filename).downcase
when ".mp3", ".wav", ".aif", ".aiff"
klass = AudioFile
when ".zip"
klass = ZipFile
when ".jpg", ".gif", ".png", ".jpeg"
klass = ImageFile
else
klass = MediaFile
end
end
def self.create_from_s3(data, attrs = {})
klass = class_for_file(data['name'])
attrs[:file_uid] = data['key']
attrs[:file_name] = data['name']
attrs[:file_size] = data['size']
klass.create(attrs)
end
end
| class AssetFile < ActiveRecord::Base
attr_protected :id
belongs_to :account
scope :with_query, lambda { |s| where('name LIKE ? OR file_name LIKE ?', "%#{s}%", "%#{s}%") }
def datastore
return nil unless file
@datastore ||= file.app.datastore
end
def object_url(opts = {})
expires = opts[:expires] || 1.week.from_now
datastore.storage.get_object_http_url(datastore.bucket_name, file_uid, expires, opts)
end
def safe_file_name
ActiveSupport::Inflector.transliterate(file_name)
end
def to_s
name.present? ? name : file_name
end
def to_token_hash
{ :id => id, :text => to_s }
end
def self.class_for_file(filename)
case File.extname(filename).downcase
when ".mp3", ".wav", ".aif", ".aiff"
klass = AudioFile
when ".zip"
klass = ZipFile
when ".jpg", ".gif", ".png", ".jpeg"
klass = ImageFile
else
klass = MediaFile
end
end
def self.create_from_s3(data, attrs = {})
klass = class_for_file(data['name'])
attrs[:file_uid] = data['key']
attrs[:file_name] = data['name']
attrs[:file_size] = data['size']
klass.create(attrs)
end
end
| Add a safe file name method | Add a safe file name method
| Ruby | mit | mateomurphy/app_frame_assets,mateomurphy/app_frame_assets | ruby | ## Code Before:
class AssetFile < ActiveRecord::Base
attr_protected :id
belongs_to :account
scope :with_query, lambda { |s| where('name LIKE ? OR file_name LIKE ?', "%#{s}%", "%#{s}%") }
def datastore
return nil unless file
@datastore ||= file.app.datastore
end
def object_url(opts = {})
expires = opts[:expires] || 1.week.from_now
datastore.storage.get_object_http_url(datastore.bucket_name, file_uid, expires, opts)
end
def to_s
name.present? ? name : file_name
end
def to_token_hash
{ :id => id, :text => to_s }
end
def self.class_for_file(filename)
case File.extname(filename).downcase
when ".mp3", ".wav", ".aif", ".aiff"
klass = AudioFile
when ".zip"
klass = ZipFile
when ".jpg", ".gif", ".png", ".jpeg"
klass = ImageFile
else
klass = MediaFile
end
end
def self.create_from_s3(data, attrs = {})
klass = class_for_file(data['name'])
attrs[:file_uid] = data['key']
attrs[:file_name] = data['name']
attrs[:file_size] = data['size']
klass.create(attrs)
end
end
## Instruction:
Add a safe file name method
## Code After:
class AssetFile < ActiveRecord::Base
attr_protected :id
belongs_to :account
scope :with_query, lambda { |s| where('name LIKE ? OR file_name LIKE ?', "%#{s}%", "%#{s}%") }
def datastore
return nil unless file
@datastore ||= file.app.datastore
end
def object_url(opts = {})
expires = opts[:expires] || 1.week.from_now
datastore.storage.get_object_http_url(datastore.bucket_name, file_uid, expires, opts)
end
def safe_file_name
ActiveSupport::Inflector.transliterate(file_name)
end
def to_s
name.present? ? name : file_name
end
def to_token_hash
{ :id => id, :text => to_s }
end
def self.class_for_file(filename)
case File.extname(filename).downcase
when ".mp3", ".wav", ".aif", ".aiff"
klass = AudioFile
when ".zip"
klass = ZipFile
when ".jpg", ".gif", ".png", ".jpeg"
klass = ImageFile
else
klass = MediaFile
end
end
def self.create_from_s3(data, attrs = {})
klass = class_for_file(data['name'])
attrs[:file_uid] = data['key']
attrs[:file_name] = data['name']
attrs[:file_size] = data['size']
klass.create(attrs)
end
end
| class AssetFile < ActiveRecord::Base
attr_protected :id
belongs_to :account
scope :with_query, lambda { |s| where('name LIKE ? OR file_name LIKE ?', "%#{s}%", "%#{s}%") }
def datastore
return nil unless file
@datastore ||= file.app.datastore
end
def object_url(opts = {})
expires = opts[:expires] || 1.week.from_now
datastore.storage.get_object_http_url(datastore.bucket_name, file_uid, expires, opts)
end
+
+ def safe_file_name
+ ActiveSupport::Inflector.transliterate(file_name)
+ end
def to_s
name.present? ? name : file_name
end
def to_token_hash
{ :id => id, :text => to_s }
end
def self.class_for_file(filename)
case File.extname(filename).downcase
when ".mp3", ".wav", ".aif", ".aiff"
klass = AudioFile
when ".zip"
klass = ZipFile
when ".jpg", ".gif", ".png", ".jpeg"
klass = ImageFile
else
klass = MediaFile
end
end
def self.create_from_s3(data, attrs = {})
klass = class_for_file(data['name'])
attrs[:file_uid] = data['key']
attrs[:file_name] = data['name']
attrs[:file_size] = data['size']
klass.create(attrs)
end
end | 4 | 0.081633 | 4 | 0 |
4384facfcbc5f1ea91918f520e5262a44550dd3f | lisp/editor.el | lisp/editor.el | ;; set your desired tab width
(setq-default indent-tabs-mode nil)
;; enable y/n answers
(fset 'yes-or-no-p 'y-or-n-p)
;; highlight matching parens
(show-paren-mode t)
;; highlight current line
(add-hook 'after-change-major-mode-hook 'hl-line-mode)
;; show line numbers in programming modes
(add-hook 'prog-mode-hook 'linum-mode)
(setq linum-format "%4d ")
;; spellchecker in comments for programming modes
(add-hook 'prog-mode-hook (lambda () (flyspell-prog-mode)))
(setq flyspell-issue-message-flag nil)
;; enable interactive do
(ido-mode t)
;; disable auto-save capabilities
(setq make-backup-files nil)
(setq auto-save-default nil)
;; disable line wrapping
(set-default 'truncate-lines t)
;; quick whitespace-mode shortcut
(global-set-key (kbd "C-x w") 'whitespace-mode)
| ;; set your desired tab width
(setq-default indent-tabs-mode nil)
;; enable y/n answers
(fset 'yes-or-no-p 'y-or-n-p)
;; highlight matching parens
(show-paren-mode t)
;; highlight current line
(add-hook 'after-change-major-mode-hook 'hl-line-mode)
;; show line numbers in programming modes
(add-hook 'prog-mode-hook 'linum-mode)
(setq linum-format "%4d ")
;; spellchecker in comments for programming modes
(add-hook 'prog-mode-hook 'flyspell-prog-mode)
(setq flyspell-issue-message-flag nil)
;; enable interactive do
(ido-mode t)
;; disable auto-save capabilities
(setq make-backup-files nil)
(setq auto-save-default nil)
;; disable line wrapping
(set-default 'truncate-lines t)
;; quick whitespace-mode shortcut
(global-set-key (kbd "C-x w") 'whitespace-mode)
;; enable follow-mode for multi-column editing in large screens
(add-hook 'prog-mode-hook 'follow-mode)
| Enable follow-mode for programming modes, for multi-column editing. | Enable follow-mode for programming modes, for multi-column editing.
| Emacs Lisp | bsd-2-clause | danielfm/dotfiles-emacs | emacs-lisp | ## Code Before:
;; set your desired tab width
(setq-default indent-tabs-mode nil)
;; enable y/n answers
(fset 'yes-or-no-p 'y-or-n-p)
;; highlight matching parens
(show-paren-mode t)
;; highlight current line
(add-hook 'after-change-major-mode-hook 'hl-line-mode)
;; show line numbers in programming modes
(add-hook 'prog-mode-hook 'linum-mode)
(setq linum-format "%4d ")
;; spellchecker in comments for programming modes
(add-hook 'prog-mode-hook (lambda () (flyspell-prog-mode)))
(setq flyspell-issue-message-flag nil)
;; enable interactive do
(ido-mode t)
;; disable auto-save capabilities
(setq make-backup-files nil)
(setq auto-save-default nil)
;; disable line wrapping
(set-default 'truncate-lines t)
;; quick whitespace-mode shortcut
(global-set-key (kbd "C-x w") 'whitespace-mode)
## Instruction:
Enable follow-mode for programming modes, for multi-column editing.
## Code After:
;; set your desired tab width
(setq-default indent-tabs-mode nil)
;; enable y/n answers
(fset 'yes-or-no-p 'y-or-n-p)
;; highlight matching parens
(show-paren-mode t)
;; highlight current line
(add-hook 'after-change-major-mode-hook 'hl-line-mode)
;; show line numbers in programming modes
(add-hook 'prog-mode-hook 'linum-mode)
(setq linum-format "%4d ")
;; spellchecker in comments for programming modes
(add-hook 'prog-mode-hook 'flyspell-prog-mode)
(setq flyspell-issue-message-flag nil)
;; enable interactive do
(ido-mode t)
;; disable auto-save capabilities
(setq make-backup-files nil)
(setq auto-save-default nil)
;; disable line wrapping
(set-default 'truncate-lines t)
;; quick whitespace-mode shortcut
(global-set-key (kbd "C-x w") 'whitespace-mode)
;; enable follow-mode for multi-column editing in large screens
(add-hook 'prog-mode-hook 'follow-mode)
| ;; set your desired tab width
(setq-default indent-tabs-mode nil)
;; enable y/n answers
(fset 'yes-or-no-p 'y-or-n-p)
;; highlight matching parens
(show-paren-mode t)
;; highlight current line
(add-hook 'after-change-major-mode-hook 'hl-line-mode)
;; show line numbers in programming modes
(add-hook 'prog-mode-hook 'linum-mode)
(setq linum-format "%4d ")
;; spellchecker in comments for programming modes
- (add-hook 'prog-mode-hook (lambda () (flyspell-prog-mode)))
? ^^^^^^^^^^^^ --
+ (add-hook 'prog-mode-hook 'flyspell-prog-mode)
? ^
(setq flyspell-issue-message-flag nil)
;; enable interactive do
(ido-mode t)
;; disable auto-save capabilities
(setq make-backup-files nil)
(setq auto-save-default nil)
;; disable line wrapping
(set-default 'truncate-lines t)
;; quick whitespace-mode shortcut
(global-set-key (kbd "C-x w") 'whitespace-mode)
+
+ ;; enable follow-mode for multi-column editing in large screens
+ (add-hook 'prog-mode-hook 'follow-mode) | 5 | 0.15625 | 4 | 1 |
e681b012d9746fff0ee77226054c336178ef6879 | README.md | README.md |
The introduction of Promises and Generators in ECMAScript presents an opportunity to dramatically improve the language-level model for writing asynchronous code in ECMAScript. The spec text can be found [here](https://tc39.github.io/ecmascript-asyncawait).
This proposal is implemented in a [regenerator](https://github.com/facebook/regenerator) which can compile ES5 code containing `async` and `await` down to vanilla ES5 to run in existing browsers and runtimes.
This repo contains a complete example using a large number of the features of the proposal. To run this example:
```Shell
npm install
regenerator -r server.asyncawait.js | node
```
## Debatable Syntax & Semantics
### Awaiting Non-Promise
When the value passed to `await` is a Promise, the completion of the async function is scheduled on completion of the Promise. For non-promises, behaviour aligns with Promise conversion rules according to the proposed semantic polyfill.
### Surface syntax
Instead of `async function`/`await`, the following are options:
- `function^`/`await`
- `function!`/`yield`
- `function!`/`await`
- `function^`/`yield`
|
The introduction of Promises and Generators in ECMAScript presents an opportunity to dramatically improve the language-level model for writing asynchronous code in ECMAScript. The spec text can be found [here](https://tc39.github.io/ecmascript-asyncawait).
This proposal is implemented in a [regenerator](https://github.com/facebook/regenerator) which can compile ES5 code containing `async` and `await` down to vanilla ES5 to run in existing browsers and runtimes.
This repo contains a complete example using a large number of the features of the proposal. To run this example:
```Shell
npm install
regenerator -r server.asyncawait.js | node
```
| Remove debatable syntax and semantics | Remove debatable syntax and semantics
This proposal is done and baked. People should not come to this repo and be under the impression there are open questions. | Markdown | apache-2.0 | tc39/ecmascript-asyncawait,lukehoban/ecmascript-asyncawait,tc39/ecmascript-asyncawait,lukehoban/ecmascript-asyncawait | markdown | ## Code Before:
The introduction of Promises and Generators in ECMAScript presents an opportunity to dramatically improve the language-level model for writing asynchronous code in ECMAScript. The spec text can be found [here](https://tc39.github.io/ecmascript-asyncawait).
This proposal is implemented in a [regenerator](https://github.com/facebook/regenerator) which can compile ES5 code containing `async` and `await` down to vanilla ES5 to run in existing browsers and runtimes.
This repo contains a complete example using a large number of the features of the proposal. To run this example:
```Shell
npm install
regenerator -r server.asyncawait.js | node
```
## Debatable Syntax & Semantics
### Awaiting Non-Promise
When the value passed to `await` is a Promise, the completion of the async function is scheduled on completion of the Promise. For non-promises, behaviour aligns with Promise conversion rules according to the proposed semantic polyfill.
### Surface syntax
Instead of `async function`/`await`, the following are options:
- `function^`/`await`
- `function!`/`yield`
- `function!`/`await`
- `function^`/`yield`
## Instruction:
Remove debatable syntax and semantics
This proposal is done and baked. People should not come to this repo and be under the impression there are open questions.
## Code After:
The introduction of Promises and Generators in ECMAScript presents an opportunity to dramatically improve the language-level model for writing asynchronous code in ECMAScript. The spec text can be found [here](https://tc39.github.io/ecmascript-asyncawait).
This proposal is implemented in a [regenerator](https://github.com/facebook/regenerator) which can compile ES5 code containing `async` and `await` down to vanilla ES5 to run in existing browsers and runtimes.
This repo contains a complete example using a large number of the features of the proposal. To run this example:
```Shell
npm install
regenerator -r server.asyncawait.js | node
```
|
The introduction of Promises and Generators in ECMAScript presents an opportunity to dramatically improve the language-level model for writing asynchronous code in ECMAScript. The spec text can be found [here](https://tc39.github.io/ecmascript-asyncawait).
This proposal is implemented in a [regenerator](https://github.com/facebook/regenerator) which can compile ES5 code containing `async` and `await` down to vanilla ES5 to run in existing browsers and runtimes.
This repo contains a complete example using a large number of the features of the proposal. To run this example:
```Shell
npm install
regenerator -r server.asyncawait.js | node
```
-
- ## Debatable Syntax & Semantics
-
- ### Awaiting Non-Promise
- When the value passed to `await` is a Promise, the completion of the async function is scheduled on completion of the Promise. For non-promises, behaviour aligns with Promise conversion rules according to the proposed semantic polyfill.
-
- ### Surface syntax
- Instead of `async function`/`await`, the following are options:
- - `function^`/`await`
- - `function!`/`yield`
- - `function!`/`await`
- - `function^`/`yield` | 12 | 0.5 | 0 | 12 |
bc19f46d193037a1f44bf322e492d117ae158b30 | app/partials/include/tumor-extent.jade | app/partials/include/tumor-extent.jade | .qi-subtable-title Tumor Extent
.qi-panel.qi-nested(ng-controller='TumorExtentCtrl')
.qi-ver-space
table.table.qi-table
tr
th Length
td {{ extent.length }} mm
tr
th Width
td {{ extent.width }} mm
tr
th Depth
td {{ extent.depth }} mm
| .qi-subtable-title Tumor Extent
.qi-panel.qi-nested(ng-controller='TumorExtentCtrl')
.qi-ver-space
table.table.qi-table
tr(ng-show="extent.length != null")
th Length
td {{ extent.length }} mm
tr(ng-show="extent.width != null")
th Width
td {{ extent.width }} mm
tr(ng-show="extent.depth != null")
th Depth
td {{ extent.depth }} mm
| Hide rows with null values in tumor extent panel. | Hide rows with null values in tumor extent panel.
| Jade | bsd-2-clause | ohsu-qin/qiprofile,ohsu-qin/qiprofile,ohsu-qin/qiprofile,ohsu-qin/qiprofile | jade | ## Code Before:
.qi-subtable-title Tumor Extent
.qi-panel.qi-nested(ng-controller='TumorExtentCtrl')
.qi-ver-space
table.table.qi-table
tr
th Length
td {{ extent.length }} mm
tr
th Width
td {{ extent.width }} mm
tr
th Depth
td {{ extent.depth }} mm
## Instruction:
Hide rows with null values in tumor extent panel.
## Code After:
.qi-subtable-title Tumor Extent
.qi-panel.qi-nested(ng-controller='TumorExtentCtrl')
.qi-ver-space
table.table.qi-table
tr(ng-show="extent.length != null")
th Length
td {{ extent.length }} mm
tr(ng-show="extent.width != null")
th Width
td {{ extent.width }} mm
tr(ng-show="extent.depth != null")
th Depth
td {{ extent.depth }} mm
| .qi-subtable-title Tumor Extent
.qi-panel.qi-nested(ng-controller='TumorExtentCtrl')
.qi-ver-space
table.table.qi-table
- tr
+ tr(ng-show="extent.length != null")
th Length
td {{ extent.length }} mm
- tr
+ tr(ng-show="extent.width != null")
th Width
td {{ extent.width }} mm
- tr
+ tr(ng-show="extent.depth != null")
th Depth
td {{ extent.depth }} mm | 6 | 0.428571 | 3 | 3 |
c16422676ee1d4b7abe4fa41ff357f0bbe263b47 | src/Artsy/Router/Utils/findCurrentRoute.tsx | src/Artsy/Router/Utils/findCurrentRoute.tsx | import { Match, RouteConfig } from "found"
export const findCurrentRoute = ({
routes,
routeIndices,
}: Match & { route?: RouteConfig }) => {
let remainingRouteIndicies = [...routeIndices]
let route: RouteConfig = routes[remainingRouteIndicies.shift()]
while (remainingRouteIndicies.length > 0) {
route = route.children[remainingRouteIndicies.shift()]
}
return route
}
| import { Match, RouteConfig } from "found"
export const findCurrentRoute = ({
route: baseRoute,
routes,
routeIndices,
}: Match & { route?: RouteConfig }) => {
if (!routeIndices || routeIndices.length === 0) {
return baseRoute
}
let remainingRouteIndicies = [...routeIndices]
let route: RouteConfig = routes[remainingRouteIndicies.shift()]
while (remainingRouteIndicies.length > 0) {
route = route.children[remainingRouteIndicies.shift()]
}
return route
}
| Add fallback logic if routeIndicies aren't present | Add fallback logic if routeIndicies aren't present
| TypeScript | mit | artsy/reaction-force,artsy/reaction-force,artsy/reaction,artsy/reaction,artsy/reaction | typescript | ## Code Before:
import { Match, RouteConfig } from "found"
export const findCurrentRoute = ({
routes,
routeIndices,
}: Match & { route?: RouteConfig }) => {
let remainingRouteIndicies = [...routeIndices]
let route: RouteConfig = routes[remainingRouteIndicies.shift()]
while (remainingRouteIndicies.length > 0) {
route = route.children[remainingRouteIndicies.shift()]
}
return route
}
## Instruction:
Add fallback logic if routeIndicies aren't present
## Code After:
import { Match, RouteConfig } from "found"
export const findCurrentRoute = ({
route: baseRoute,
routes,
routeIndices,
}: Match & { route?: RouteConfig }) => {
if (!routeIndices || routeIndices.length === 0) {
return baseRoute
}
let remainingRouteIndicies = [...routeIndices]
let route: RouteConfig = routes[remainingRouteIndicies.shift()]
while (remainingRouteIndicies.length > 0) {
route = route.children[remainingRouteIndicies.shift()]
}
return route
}
| import { Match, RouteConfig } from "found"
export const findCurrentRoute = ({
+ route: baseRoute,
routes,
routeIndices,
}: Match & { route?: RouteConfig }) => {
+ if (!routeIndices || routeIndices.length === 0) {
+ return baseRoute
+ }
let remainingRouteIndicies = [...routeIndices]
let route: RouteConfig = routes[remainingRouteIndicies.shift()]
while (remainingRouteIndicies.length > 0) {
route = route.children[remainingRouteIndicies.shift()]
}
return route
} | 4 | 0.266667 | 4 | 0 |
d6881528efe425ad059f513701f8891a940f6d3c | README.rst | README.rst | This is an initial prototype for redesigning Peer Grading and general Open Ended
Submission Evaluation. This project is in the early stages of development and is
not ready for general use.
Installation
============
The intent of this project is to be installed as Django apps that will be
included in `edx-platform <https://github.com/edx/edx-platform>`_. To install
for development purposes, run::
pip install -r requirements/dev.txt
pip install -e .
The second line is necessary to register edx-tim's XBlock so that it will show
up in the XBlock workbench.
License
=======
The code in this repository is licensed under version 3 of the AGPL unless
otherwise noted.
Please see ``LICENSE.txt`` for details.
How to Contribute
=================
Due to the very early stage of development we're at, we are not accepting
contributions at this time. Large portions of the API can change with little
notice.
Reporting Security Issues
=========================
Please do not report security issues in public. Please email security@edx.org
Mailing List and IRC Channel
============================
You can discuss this code on the
`edx-code Google Group <https://groups.google.com/forum/#!forum/edx-code>`_ or
in the `edx-code` IRC channel on Freenode.
| .. image:: https://travis-ci.org/edx/edx-tim.png?branch=master
:alt: Travis build status
This is an initial prototype for redesigning Peer Grading and general Open Ended
Submission Evaluation. This project is in the early stages of development and is
not ready for general use.
Installation
============
The intent of this project is to be installed as Django apps that will be
included in `edx-platform <https://github.com/edx/edx-platform>`_. To install
for development purposes, run::
pip install -r requirements/dev.txt
pip install -e .
The second line is necessary to register edx-tim's XBlock so that it will show
up in the XBlock workbench.
License
=======
The code in this repository is licensed under version 3 of the AGPL unless
otherwise noted.
Please see ``LICENSE.txt`` for details.
How to Contribute
=================
Due to the very early stage of development we're at, we are not accepting
contributions at this time. Large portions of the API can change with little
notice.
Reporting Security Issues
=========================
Please do not report security issues in public. Please email security@edx.org
Mailing List and IRC Channel
============================
You can discuss this code on the
`edx-code Google Group <https://groups.google.com/forum/#!forum/edx-code>`_ or
in the `edx-code` IRC channel on Freenode.
| Add Travis build status badge | Add Travis build status badge
| reStructuredText | agpl-3.0 | eestay/edx-ora2,vasyarv/edx-ora2,singingwolfboy/edx-ora2,EDUlib/edx-ora2,vasyarv/edx-ora2,ubc/edx-ora2,eestay/edx-ora2,nttks/edx-ora2,singingwolfboy/edx-ora2,Edraak/edx-ora2,Stanford-Online/edx-ora2,edx/edx-ora2,miptliot/edx-ora2,singingwolfboy/edx-ora2,ubc/edx-ora2,eestay/edx-ora2,eestay/edx-ora2,Edraak/edx-ora2,Lektorium-LLC/edx-ora2,ubc/edx-ora2,edx/edx-ora2,vasyarv/edx-ora2,nttks/edx-ora2,Stanford-Online/edx-ora2,edx/edx-ora2,Edraak/edx-ora2,kursitet/edx-ora2,nttks/edx-ora2,singingwolfboy/edx-ora2,edx/edx-ora2,nttks/edx-ora2,Edraak/edx-ora2,kursitet/edx-ora2,miptliot/edx-ora2,EDUlib/edx-ora2,EDUlib/edx-ora2,Lektorium-LLC/edx-ora2,Stanford-Online/edx-ora2,kursitet/edx-ora2,Lektorium-LLC/edx-ora2,EDUlib/edx-ora2,vasyarv/edx-ora2,miptliot/edx-ora2,ubc/edx-ora2,Stanford-Online/edx-ora2,miptliot/edx-ora2,kursitet/edx-ora2,Lektorium-LLC/edx-ora2 | restructuredtext | ## Code Before:
This is an initial prototype for redesigning Peer Grading and general Open Ended
Submission Evaluation. This project is in the early stages of development and is
not ready for general use.
Installation
============
The intent of this project is to be installed as Django apps that will be
included in `edx-platform <https://github.com/edx/edx-platform>`_. To install
for development purposes, run::
pip install -r requirements/dev.txt
pip install -e .
The second line is necessary to register edx-tim's XBlock so that it will show
up in the XBlock workbench.
License
=======
The code in this repository is licensed under version 3 of the AGPL unless
otherwise noted.
Please see ``LICENSE.txt`` for details.
How to Contribute
=================
Due to the very early stage of development we're at, we are not accepting
contributions at this time. Large portions of the API can change with little
notice.
Reporting Security Issues
=========================
Please do not report security issues in public. Please email security@edx.org
Mailing List and IRC Channel
============================
You can discuss this code on the
`edx-code Google Group <https://groups.google.com/forum/#!forum/edx-code>`_ or
in the `edx-code` IRC channel on Freenode.
## Instruction:
Add Travis build status badge
## Code After:
.. image:: https://travis-ci.org/edx/edx-tim.png?branch=master
:alt: Travis build status
This is an initial prototype for redesigning Peer Grading and general Open Ended
Submission Evaluation. This project is in the early stages of development and is
not ready for general use.
Installation
============
The intent of this project is to be installed as Django apps that will be
included in `edx-platform <https://github.com/edx/edx-platform>`_. To install
for development purposes, run::
pip install -r requirements/dev.txt
pip install -e .
The second line is necessary to register edx-tim's XBlock so that it will show
up in the XBlock workbench.
License
=======
The code in this repository is licensed under version 3 of the AGPL unless
otherwise noted.
Please see ``LICENSE.txt`` for details.
How to Contribute
=================
Due to the very early stage of development we're at, we are not accepting
contributions at this time. Large portions of the API can change with little
notice.
Reporting Security Issues
=========================
Please do not report security issues in public. Please email security@edx.org
Mailing List and IRC Channel
============================
You can discuss this code on the
`edx-code Google Group <https://groups.google.com/forum/#!forum/edx-code>`_ or
in the `edx-code` IRC channel on Freenode.
| + .. image:: https://travis-ci.org/edx/edx-tim.png?branch=master
+ :alt: Travis build status
+
This is an initial prototype for redesigning Peer Grading and general Open Ended
Submission Evaluation. This project is in the early stages of development and is
not ready for general use.
Installation
============
The intent of this project is to be installed as Django apps that will be
included in `edx-platform <https://github.com/edx/edx-platform>`_. To install
for development purposes, run::
pip install -r requirements/dev.txt
pip install -e .
The second line is necessary to register edx-tim's XBlock so that it will show
up in the XBlock workbench.
License
=======
The code in this repository is licensed under version 3 of the AGPL unless
otherwise noted.
Please see ``LICENSE.txt`` for details.
How to Contribute
=================
Due to the very early stage of development we're at, we are not accepting
contributions at this time. Large portions of the API can change with little
notice.
Reporting Security Issues
=========================
Please do not report security issues in public. Please email security@edx.org
Mailing List and IRC Channel
============================
You can discuss this code on the
`edx-code Google Group <https://groups.google.com/forum/#!forum/edx-code>`_ or
in the `edx-code` IRC channel on Freenode. | 3 | 0.069767 | 3 | 0 |
443fed801c814c05cafa2f07c0067619630b49c9 | src/main/scala/com/ftchinese/jobs/database/AnalyticDataSource.scala | src/main/scala/com/ftchinese/jobs/database/AnalyticDataSource.scala | package com.ftchinese.jobs.database
import java.sql.Connection
import com.zaxxer.hikari.HikariDataSource
/**
* Analytic database source
* Created by GWB on 2014/12/11.
*/
class AnalyticDataSource {
private val ds = new HikariDataSource()
ds.setMaximumPoolSize(10)
def setUrl(url: String): Unit ={
ds.setJdbcUrl(url)
}
def setUsername(username: String): Unit ={
ds.setUsername(username)
}
def setPassword(password: String): Unit ={
ds.setPassword(password)
}
def getConnection: Connection = ds.getConnection
}
| package com.ftchinese.jobs.database
import java.sql.Connection
import com.zaxxer.hikari.HikariDataSource
/**
* Analytic database source
* Created by GWB on 2014/12/11.
*/
class AnalyticDataSource {
private val ds = new HikariDataSource()
ds.setMinimumIdle(1)
ds.setMaximumPoolSize(10)
def setUrl(url: String): Unit ={
ds.setJdbcUrl(url)
}
def setUsername(username: String): Unit ={
ds.setUsername(username)
}
def setPassword(password: String): Unit ={
ds.setPassword(password)
}
def getConnection: Connection = ds.getConnection
}
| Switch to new connection pool. | Switch to new connection pool.
| Scala | mit | FTChinese/push,FTChinese/push,FTChinese/push | scala | ## Code Before:
package com.ftchinese.jobs.database
import java.sql.Connection
import com.zaxxer.hikari.HikariDataSource
/**
* Analytic database source
* Created by GWB on 2014/12/11.
*/
class AnalyticDataSource {
private val ds = new HikariDataSource()
ds.setMaximumPoolSize(10)
def setUrl(url: String): Unit ={
ds.setJdbcUrl(url)
}
def setUsername(username: String): Unit ={
ds.setUsername(username)
}
def setPassword(password: String): Unit ={
ds.setPassword(password)
}
def getConnection: Connection = ds.getConnection
}
## Instruction:
Switch to new connection pool.
## Code After:
package com.ftchinese.jobs.database
import java.sql.Connection
import com.zaxxer.hikari.HikariDataSource
/**
* Analytic database source
* Created by GWB on 2014/12/11.
*/
class AnalyticDataSource {
private val ds = new HikariDataSource()
ds.setMinimumIdle(1)
ds.setMaximumPoolSize(10)
def setUrl(url: String): Unit ={
ds.setJdbcUrl(url)
}
def setUsername(username: String): Unit ={
ds.setUsername(username)
}
def setPassword(password: String): Unit ={
ds.setPassword(password)
}
def getConnection: Connection = ds.getConnection
}
| package com.ftchinese.jobs.database
import java.sql.Connection
import com.zaxxer.hikari.HikariDataSource
/**
* Analytic database source
* Created by GWB on 2014/12/11.
*/
class AnalyticDataSource {
private val ds = new HikariDataSource()
+ ds.setMinimumIdle(1)
ds.setMaximumPoolSize(10)
def setUrl(url: String): Unit ={
ds.setJdbcUrl(url)
}
def setUsername(username: String): Unit ={
ds.setUsername(username)
}
def setPassword(password: String): Unit ={
ds.setPassword(password)
}
def getConnection: Connection = ds.getConnection
} | 1 | 0.034483 | 1 | 0 |
308cbf1f62e254643a0ad47db8ad55eb63e1c888 | argonauts/testutils.py | argonauts/testutils.py | import json
import functools
from django.conf import settings
from django.test import Client, TestCase
__all__ = ['JsonTestClient', 'JsonTestCase']
class JsonTestClient(Client):
def _json_request(self, method, url, data=None, *args, **kwargs):
method_func = getattr(super(JsonTestClient, self), method)
if method == 'get':
encode = lambda x: x
else:
encode = json.dumps
if data is not None:
resp = method_func(url, encode(data), content_type='application/json', *args, **kwargs)
else:
resp = method_func(url, content_type='application/json', *args, **kwargs)
if resp['Content-Type'].startswith('application/json') and resp.content:
charset = resp.charset or settings.DEFAULT_CHARSET
resp.json = json.loads(resp.content.decode(charset))
return resp
def __getattribute__(self, attr):
if attr in ('get', 'post', 'put', 'delete', 'trace', 'head', 'patch', 'options'):
return functools.partial(self._json_request, attr)
else:
return super(JsonTestClient, self).__getattribute__(attr)
class JsonTestCase(TestCase):
client_class = JsonTestClient
| import json
import functools
from django.conf import settings
from django.test import Client, TestCase
__all__ = ['JsonTestClient', 'JsonTestMixin', 'JsonTestCase']
class JsonTestClient(Client):
def _json_request(self, method, url, data=None, *args, **kwargs):
method_func = getattr(super(JsonTestClient, self), method)
if method == 'get':
encode = lambda x: x
else:
encode = json.dumps
if data is not None:
resp = method_func(url, encode(data), content_type='application/json', *args, **kwargs)
else:
resp = method_func(url, content_type='application/json', *args, **kwargs)
if resp['Content-Type'].startswith('application/json') and resp.content:
charset = resp.charset or settings.DEFAULT_CHARSET
resp.json = json.loads(resp.content.decode(charset))
return resp
def __getattribute__(self, attr):
if attr in ('get', 'post', 'put', 'delete', 'trace', 'head', 'patch', 'options'):
return functools.partial(self._json_request, attr)
else:
return super(JsonTestClient, self).__getattribute__(attr)
class JsonTestMixin(object):
client_class = JsonTestClient
class JsonTestCase(JsonTestMixin, TestCase):
pass
| Make the TestCase a mixin | Make the TestCase a mixin
| Python | bsd-2-clause | fusionbox/django-argonauts | python | ## Code Before:
import json
import functools
from django.conf import settings
from django.test import Client, TestCase
__all__ = ['JsonTestClient', 'JsonTestCase']
class JsonTestClient(Client):
def _json_request(self, method, url, data=None, *args, **kwargs):
method_func = getattr(super(JsonTestClient, self), method)
if method == 'get':
encode = lambda x: x
else:
encode = json.dumps
if data is not None:
resp = method_func(url, encode(data), content_type='application/json', *args, **kwargs)
else:
resp = method_func(url, content_type='application/json', *args, **kwargs)
if resp['Content-Type'].startswith('application/json') and resp.content:
charset = resp.charset or settings.DEFAULT_CHARSET
resp.json = json.loads(resp.content.decode(charset))
return resp
def __getattribute__(self, attr):
if attr in ('get', 'post', 'put', 'delete', 'trace', 'head', 'patch', 'options'):
return functools.partial(self._json_request, attr)
else:
return super(JsonTestClient, self).__getattribute__(attr)
class JsonTestCase(TestCase):
client_class = JsonTestClient
## Instruction:
Make the TestCase a mixin
## Code After:
import json
import functools
from django.conf import settings
from django.test import Client, TestCase
__all__ = ['JsonTestClient', 'JsonTestMixin', 'JsonTestCase']
class JsonTestClient(Client):
def _json_request(self, method, url, data=None, *args, **kwargs):
method_func = getattr(super(JsonTestClient, self), method)
if method == 'get':
encode = lambda x: x
else:
encode = json.dumps
if data is not None:
resp = method_func(url, encode(data), content_type='application/json', *args, **kwargs)
else:
resp = method_func(url, content_type='application/json', *args, **kwargs)
if resp['Content-Type'].startswith('application/json') and resp.content:
charset = resp.charset or settings.DEFAULT_CHARSET
resp.json = json.loads(resp.content.decode(charset))
return resp
def __getattribute__(self, attr):
if attr in ('get', 'post', 'put', 'delete', 'trace', 'head', 'patch', 'options'):
return functools.partial(self._json_request, attr)
else:
return super(JsonTestClient, self).__getattribute__(attr)
class JsonTestMixin(object):
client_class = JsonTestClient
class JsonTestCase(JsonTestMixin, TestCase):
pass
| import json
import functools
from django.conf import settings
from django.test import Client, TestCase
- __all__ = ['JsonTestClient', 'JsonTestCase']
+ __all__ = ['JsonTestClient', 'JsonTestMixin', 'JsonTestCase']
? +++++++++++++++++
class JsonTestClient(Client):
def _json_request(self, method, url, data=None, *args, **kwargs):
method_func = getattr(super(JsonTestClient, self), method)
if method == 'get':
encode = lambda x: x
else:
encode = json.dumps
if data is not None:
resp = method_func(url, encode(data), content_type='application/json', *args, **kwargs)
else:
resp = method_func(url, content_type='application/json', *args, **kwargs)
if resp['Content-Type'].startswith('application/json') and resp.content:
charset = resp.charset or settings.DEFAULT_CHARSET
resp.json = json.loads(resp.content.decode(charset))
return resp
def __getattribute__(self, attr):
if attr in ('get', 'post', 'put', 'delete', 'trace', 'head', 'patch', 'options'):
return functools.partial(self._json_request, attr)
else:
return super(JsonTestClient, self).__getattribute__(attr)
- class JsonTestCase(TestCase):
+ class JsonTestMixin(object):
client_class = JsonTestClient
+
+ class JsonTestCase(JsonTestMixin, TestCase):
+ pass | 7 | 0.184211 | 5 | 2 |
2bd449678d34187efdf3e4ca92daa28ea1d9fa48 | imagemodal/mixins/fragment.py | imagemodal/mixins/fragment.py | from __future__ import absolute_import
from django.template.context import Context
from xblock.fragment import Fragment
class XBlockFragmentBuilderMixin(object):
"""
Create a default XBlock fragment builder
"""
def build_fragment(
self,
template='',
context=None,
css=None,
js=None,
js_init=None,
):
"""
Creates a fragment for display.
"""
template = 'templates/' + template
context = context or {}
css = css or []
js = js or []
rendered_template = ''
if template:
rendered_template = self.loader.render_django_template(
template,
context=Context(context),
i18n_service=self.runtime.service(self, 'i18n'),
)
fragment = Fragment(rendered_template)
for item in css:
if item.startswith('/'):
url = item
else:
item = 'public/' + item
url = self.runtime.local_resource_url(self, item)
fragment.add_css_url(url)
for item in js:
item = 'public/' + item
url = self.runtime.local_resource_url(self, item)
fragment.add_javascript_url(url)
if js_init:
fragment.initialize_js(js_init)
return fragment
| from __future__ import absolute_import
from django.template.context import Context
from xblock.fragment import Fragment
class XBlockFragmentBuilderMixin(object):
"""
Create a default XBlock fragment builder
"""
def build_fragment(
self,
template='',
context=None,
css=None,
js=None,
js_init=None,
):
"""
Creates a fragment for display.
"""
context = context or {}
css = css or []
js = js or []
rendered_template = ''
if template:
template = 'templates/' + template
rendered_template = self.loader.render_django_template(
template,
context=Context(context),
i18n_service=self.runtime.service(self, 'i18n'),
)
fragment = Fragment(rendered_template)
for item in css:
if item.startswith('/'):
url = item
else:
item = 'public/' + item
url = self.runtime.local_resource_url(self, item)
fragment.add_css_url(url)
for item in js:
item = 'public/' + item
url = self.runtime.local_resource_url(self, item)
fragment.add_javascript_url(url)
if js_init:
fragment.initialize_js(js_init)
return fragment
| Fix bug to make template optional | Fix bug to make template optional
| Python | agpl-3.0 | Stanford-Online/xblock-image-modal,Stanford-Online/xblock-image-modal,Stanford-Online/xblock-image-modal | python | ## Code Before:
from __future__ import absolute_import
from django.template.context import Context
from xblock.fragment import Fragment
class XBlockFragmentBuilderMixin(object):
"""
Create a default XBlock fragment builder
"""
def build_fragment(
self,
template='',
context=None,
css=None,
js=None,
js_init=None,
):
"""
Creates a fragment for display.
"""
template = 'templates/' + template
context = context or {}
css = css or []
js = js or []
rendered_template = ''
if template:
rendered_template = self.loader.render_django_template(
template,
context=Context(context),
i18n_service=self.runtime.service(self, 'i18n'),
)
fragment = Fragment(rendered_template)
for item in css:
if item.startswith('/'):
url = item
else:
item = 'public/' + item
url = self.runtime.local_resource_url(self, item)
fragment.add_css_url(url)
for item in js:
item = 'public/' + item
url = self.runtime.local_resource_url(self, item)
fragment.add_javascript_url(url)
if js_init:
fragment.initialize_js(js_init)
return fragment
## Instruction:
Fix bug to make template optional
## Code After:
from __future__ import absolute_import
from django.template.context import Context
from xblock.fragment import Fragment
class XBlockFragmentBuilderMixin(object):
"""
Create a default XBlock fragment builder
"""
def build_fragment(
self,
template='',
context=None,
css=None,
js=None,
js_init=None,
):
"""
Creates a fragment for display.
"""
context = context or {}
css = css or []
js = js or []
rendered_template = ''
if template:
template = 'templates/' + template
rendered_template = self.loader.render_django_template(
template,
context=Context(context),
i18n_service=self.runtime.service(self, 'i18n'),
)
fragment = Fragment(rendered_template)
for item in css:
if item.startswith('/'):
url = item
else:
item = 'public/' + item
url = self.runtime.local_resource_url(self, item)
fragment.add_css_url(url)
for item in js:
item = 'public/' + item
url = self.runtime.local_resource_url(self, item)
fragment.add_javascript_url(url)
if js_init:
fragment.initialize_js(js_init)
return fragment
| from __future__ import absolute_import
from django.template.context import Context
from xblock.fragment import Fragment
class XBlockFragmentBuilderMixin(object):
"""
Create a default XBlock fragment builder
"""
def build_fragment(
self,
template='',
context=None,
css=None,
js=None,
js_init=None,
):
"""
Creates a fragment for display.
"""
- template = 'templates/' + template
context = context or {}
css = css or []
js = js or []
rendered_template = ''
if template:
+ template = 'templates/' + template
rendered_template = self.loader.render_django_template(
template,
context=Context(context),
i18n_service=self.runtime.service(self, 'i18n'),
)
fragment = Fragment(rendered_template)
for item in css:
if item.startswith('/'):
url = item
else:
item = 'public/' + item
url = self.runtime.local_resource_url(self, item)
fragment.add_css_url(url)
for item in js:
item = 'public/' + item
url = self.runtime.local_resource_url(self, item)
fragment.add_javascript_url(url)
if js_init:
fragment.initialize_js(js_init)
return fragment | 2 | 0.042553 | 1 | 1 |
aaf8d27c7b0d42d60356de6816f64958025ac360 | docker-compose.yml | docker-compose.yml | version: '2'
services:
django:
extends:
file: ./docker/compose-common.yml
service: django
links:
- db
environment:
- DJANGO_SETTINGS_MODULE=pyconcz_2016.settings_dev
webpack:
build:
context: .
dockerfile: ./docker/webpack.docker
ports:
- "8001:8001"
volumes:
- .:/app
db:
image: postgres
environment:
- POSTGRES_USER=pyconcz
| version: '2'
services:
django:
extends:
file: ./docker/compose-common.yml
service: django
links:
- db
environment:
- DJANGO_SETTINGS_MODULE=pyconcz_2016.settings_dev
webpack:
build:
context: .
dockerfile: ./docker/webpack.docker
ports:
- "8001:8001"
volumes:
- /app/node_modules
- .:/app
db:
image: postgres
environment:
- POSTGRES_USER=pyconcz
| Fix webpack volume mounting clearing node_modules inside container | Fix webpack volume mounting clearing node_modules inside container
Mounting . to /app inside the container will replace everything under /app
in there, including /app/node_modules. By adding a separate volume for
/app/node_modules, Docker will keep everything under /app/node_modules
intact (even if we don't mount anything there.)
| YAML | mit | benabraham/cz.pycon.org-2017,pyvec/cz.pycon.org-2016,pyvec/cz.pycon.org-2017,benabraham/cz.pycon.org-2017,pyvec/cz.pycon.org-2017,pyvec/cz.pycon.org-2016,pyvec/cz.pycon.org-2017,pyvec/cz.pycon.org-2016,benabraham/cz.pycon.org-2017 | yaml | ## Code Before:
version: '2'
services:
django:
extends:
file: ./docker/compose-common.yml
service: django
links:
- db
environment:
- DJANGO_SETTINGS_MODULE=pyconcz_2016.settings_dev
webpack:
build:
context: .
dockerfile: ./docker/webpack.docker
ports:
- "8001:8001"
volumes:
- .:/app
db:
image: postgres
environment:
- POSTGRES_USER=pyconcz
## Instruction:
Fix webpack volume mounting clearing node_modules inside container
Mounting . to /app inside the container will replace everything under /app
in there, including /app/node_modules. By adding a separate volume for
/app/node_modules, Docker will keep everything under /app/node_modules
intact (even if we don't mount anything there.)
## Code After:
version: '2'
services:
django:
extends:
file: ./docker/compose-common.yml
service: django
links:
- db
environment:
- DJANGO_SETTINGS_MODULE=pyconcz_2016.settings_dev
webpack:
build:
context: .
dockerfile: ./docker/webpack.docker
ports:
- "8001:8001"
volumes:
- /app/node_modules
- .:/app
db:
image: postgres
environment:
- POSTGRES_USER=pyconcz
| version: '2'
services:
django:
extends:
file: ./docker/compose-common.yml
service: django
links:
- db
environment:
- DJANGO_SETTINGS_MODULE=pyconcz_2016.settings_dev
webpack:
build:
context: .
dockerfile: ./docker/webpack.docker
ports:
- "8001:8001"
volumes:
+ - /app/node_modules
- .:/app
db:
image: postgres
environment:
- POSTGRES_USER=pyconcz | 1 | 0.041667 | 1 | 0 |
c1a6937c6084c3f64b1199785015f212ae59f6a8 | static/js/microblogging.js | static/js/microblogging.js | jQuery(document).ready(function($) {
function update_chars_left() {
var max_len = 140;
var textarea = $('#new_tweet')[0];
var tweet_len = textarea.value.length;
if (tweet_len >= max_len) {
textarea.value = textarea.value.substring(0, max_len); // truncate
$('#chars_left').html("0");
} else {
$('#chars_left').html(max_len - tweet_len);
}
}
$('#new_tweet').keyup(function() {
update_chars_left();
});
update_chars_left();
$('#new_tweet').focus();
});
| jQuery(document).ready(function($) {
if ($('#new_tweet').length) {
function update_chars_left() {
var max_len = 140;
var textarea = $('#new_tweet')[0];
var tweet_len = textarea.value.length;
if (tweet_len >= max_len) {
textarea.value = textarea.value.substring(0, max_len); // truncate
$('#chars_left').html("0");
} else {
$('#chars_left').html(max_len - tweet_len);
}
}
$('#new_tweet').keyup(function() {
update_chars_left();
});
update_chars_left();
$('#new_tweet').focus();
}
});
| Fix js error on touite input when not authenticated | Fix js error on touite input when not authenticated
| JavaScript | mit | amarandon/smeuhsocial,amarandon/smeuhsocial,amarandon/smeuhsocial,fgirault/smeuhsocial,fgirault/smeuhsocial,fgirault/smeuhsocial | javascript | ## Code Before:
jQuery(document).ready(function($) {
function update_chars_left() {
var max_len = 140;
var textarea = $('#new_tweet')[0];
var tweet_len = textarea.value.length;
if (tweet_len >= max_len) {
textarea.value = textarea.value.substring(0, max_len); // truncate
$('#chars_left').html("0");
} else {
$('#chars_left').html(max_len - tweet_len);
}
}
$('#new_tweet').keyup(function() {
update_chars_left();
});
update_chars_left();
$('#new_tweet').focus();
});
## Instruction:
Fix js error on touite input when not authenticated
## Code After:
jQuery(document).ready(function($) {
if ($('#new_tweet').length) {
function update_chars_left() {
var max_len = 140;
var textarea = $('#new_tweet')[0];
var tweet_len = textarea.value.length;
if (tweet_len >= max_len) {
textarea.value = textarea.value.substring(0, max_len); // truncate
$('#chars_left').html("0");
} else {
$('#chars_left').html(max_len - tweet_len);
}
}
$('#new_tweet').keyup(function() {
update_chars_left();
});
update_chars_left();
$('#new_tweet').focus();
}
});
| jQuery(document).ready(function($) {
+ if ($('#new_tweet').length) {
- function update_chars_left() {
+ function update_chars_left() {
? ++
- var max_len = 140;
+ var max_len = 140;
? ++
- var textarea = $('#new_tweet')[0];
+ var textarea = $('#new_tweet')[0];
? ++
- var tweet_len = textarea.value.length;
+ var tweet_len = textarea.value.length;
? ++
- if (tweet_len >= max_len) {
+ if (tweet_len >= max_len) {
? ++
- textarea.value = textarea.value.substring(0, max_len); // truncate
+ textarea.value = textarea.value.substring(0, max_len); // truncate
? ++
- $('#chars_left').html("0");
+ $('#chars_left').html("0");
? ++
- } else {
+ } else {
? ++
- $('#chars_left').html(max_len - tweet_len);
+ $('#chars_left').html(max_len - tweet_len);
? ++
- }
+ }
? ++
+ }
+ $('#new_tweet').keyup(function() {
+ update_chars_left();
+ });
+ update_chars_left();
+ $('#new_tweet').focus();
}
- $('#new_tweet').keyup(function() {
- update_chars_left();
- });
- update_chars_left();
- $('#new_tweet').focus();
});
| 32 | 1.684211 | 17 | 15 |
9b41f311f99c53221c57efeb4afb1aa742375b83 | README.markdown | README.markdown | Paradox
=======
Paradox is a markdown documentation tool for software projects.
**Paradox is NOT supported under the Lightbend subscription.**
See [Paradox docs](http://developer.lightbend.com/docs/paradox/latest/) for details.
| Paradox [![scaladex-badge][]][scaladex] [![travis-badge][]][travis] [![gitter-badge][]][gitter]
=======
[scaladex]: https://index.scala-lang.org/lightbend/paradox
[scaladex-badge]: https://index.scala-lang.org/lightbend/paradox/paradox/latest.svg
[travis]: https://travis-ci.org/lightbend/paradox
[travis-badge]: https://travis-ci.org/lightbend/paradox.svg?branch=master
[gitter]: https://gitter.im/lightbend/paradox
[gitter-badge]: https://badges.gitter.im/lightbend/paradox.svg
Paradox is a markdown documentation tool for software projects.
**Paradox is NOT supported under the Lightbend subscription.**
See [Paradox docs](http://developer.lightbend.com/docs/paradox/latest/) for details.
| Add a badge to gitter community and others | Add a badge to gitter community and others
Fixes #155
| Markdown | apache-2.0 | lightbend/paradox,lightbend/paradox,lightbend/paradox | markdown | ## Code Before:
Paradox
=======
Paradox is a markdown documentation tool for software projects.
**Paradox is NOT supported under the Lightbend subscription.**
See [Paradox docs](http://developer.lightbend.com/docs/paradox/latest/) for details.
## Instruction:
Add a badge to gitter community and others
Fixes #155
## Code After:
Paradox [![scaladex-badge][]][scaladex] [![travis-badge][]][travis] [![gitter-badge][]][gitter]
=======
[scaladex]: https://index.scala-lang.org/lightbend/paradox
[scaladex-badge]: https://index.scala-lang.org/lightbend/paradox/paradox/latest.svg
[travis]: https://travis-ci.org/lightbend/paradox
[travis-badge]: https://travis-ci.org/lightbend/paradox.svg?branch=master
[gitter]: https://gitter.im/lightbend/paradox
[gitter-badge]: https://badges.gitter.im/lightbend/paradox.svg
Paradox is a markdown documentation tool for software projects.
**Paradox is NOT supported under the Lightbend subscription.**
See [Paradox docs](http://developer.lightbend.com/docs/paradox/latest/) for details.
| - Paradox
+ Paradox [![scaladex-badge][]][scaladex] [![travis-badge][]][travis] [![gitter-badge][]][gitter]
=======
+
+ [scaladex]: https://index.scala-lang.org/lightbend/paradox
+ [scaladex-badge]: https://index.scala-lang.org/lightbend/paradox/paradox/latest.svg
+ [travis]: https://travis-ci.org/lightbend/paradox
+ [travis-badge]: https://travis-ci.org/lightbend/paradox.svg?branch=master
+ [gitter]: https://gitter.im/lightbend/paradox
+ [gitter-badge]: https://badges.gitter.im/lightbend/paradox.svg
Paradox is a markdown documentation tool for software projects.
**Paradox is NOT supported under the Lightbend subscription.**
See [Paradox docs](http://developer.lightbend.com/docs/paradox/latest/) for details. | 9 | 1.125 | 8 | 1 |
cf6c4796764e1b0a652e2962a962263fc8beabe2 | app/views/users/index.html.erb | app/views/users/index.html.erb | <h1>Users</h1>
<table class="table table-condensed">
<thead>
<tr>
<th>ID</th>
<th>Email</th>
<th>Role</th>
<th>School</th>
<th>Sign in count</th>
<th>Access locked</th>
<th></th>
</tr>
</thead>
<tbody>
<% @users.each do |user| %>
<tr>
<td><%= user.id %></td>
<td><%= user.email %></td>
<td><%= user.role.titleize %></td>
<td><%= link_to user.school.name, school_path(user.school) if user.school %></td>
<td><%= user.sign_in_count %></td>
<td><%= user.access_locked? ? 'Y' : '' %></td>
<td>
<div class="btn-group">
<%= link_to 'Edit', edit_user_path(user), class: 'btn btn-primary btn-sm' %>
<%= link_to 'Delete', user_path(user), method: :delete, data: { confirm: 'Are you sure?' }, class: 'btn btn-danger btn-sm' %>
</div>
</td>
</tr>
<% end %>
</tbody>
</table>
<%= link_to 'New User', new_user_path, class: 'btn btn-primary' %>
| <h1>Users</h1>
<table class="table table-condensed">
<thead>
<tr>
<th>ID</th>
<th>Name</th>
<th>Email</th>
<th>Role</th>
<th>School</th>
<th>Sign in count</th>
<th>Access locked</th>
<th></th>
</tr>
</thead>
<tbody>
<% @users.each do |user| %>
<tr>
<td><%= user.id %></td>
<td><%= user.name %></td>
<td><%= user.email %></td>
<td><%= user.role.titleize %></td>
<td><%= link_to user.school.name, school_path(user.school) if user.school %></td>
<td><%= user.sign_in_count %></td>
<td><%= user.access_locked? ? 'Y' : '' %></td>
<td>
<div class="btn-group">
<%= link_to 'Edit', edit_user_path(user), class: 'btn btn-primary btn-sm' %>
<%= link_to 'Delete', user_path(user), method: :delete, data: { confirm: 'Are you sure?' }, class: 'btn btn-danger btn-sm' %>
</div>
</td>
</tr>
<% end %>
</tbody>
</table>
<%= link_to 'New User', new_user_path, class: 'btn btn-primary' %>
| Add names to user list | Add names to user list
| HTML+ERB | mit | BathHacked/energy-sparks,BathHacked/energy-sparks,BathHacked/energy-sparks,BathHacked/energy-sparks | html+erb | ## Code Before:
<h1>Users</h1>
<table class="table table-condensed">
<thead>
<tr>
<th>ID</th>
<th>Email</th>
<th>Role</th>
<th>School</th>
<th>Sign in count</th>
<th>Access locked</th>
<th></th>
</tr>
</thead>
<tbody>
<% @users.each do |user| %>
<tr>
<td><%= user.id %></td>
<td><%= user.email %></td>
<td><%= user.role.titleize %></td>
<td><%= link_to user.school.name, school_path(user.school) if user.school %></td>
<td><%= user.sign_in_count %></td>
<td><%= user.access_locked? ? 'Y' : '' %></td>
<td>
<div class="btn-group">
<%= link_to 'Edit', edit_user_path(user), class: 'btn btn-primary btn-sm' %>
<%= link_to 'Delete', user_path(user), method: :delete, data: { confirm: 'Are you sure?' }, class: 'btn btn-danger btn-sm' %>
</div>
</td>
</tr>
<% end %>
</tbody>
</table>
<%= link_to 'New User', new_user_path, class: 'btn btn-primary' %>
## Instruction:
Add names to user list
## Code After:
<h1>Users</h1>
<table class="table table-condensed">
<thead>
<tr>
<th>ID</th>
<th>Name</th>
<th>Email</th>
<th>Role</th>
<th>School</th>
<th>Sign in count</th>
<th>Access locked</th>
<th></th>
</tr>
</thead>
<tbody>
<% @users.each do |user| %>
<tr>
<td><%= user.id %></td>
<td><%= user.name %></td>
<td><%= user.email %></td>
<td><%= user.role.titleize %></td>
<td><%= link_to user.school.name, school_path(user.school) if user.school %></td>
<td><%= user.sign_in_count %></td>
<td><%= user.access_locked? ? 'Y' : '' %></td>
<td>
<div class="btn-group">
<%= link_to 'Edit', edit_user_path(user), class: 'btn btn-primary btn-sm' %>
<%= link_to 'Delete', user_path(user), method: :delete, data: { confirm: 'Are you sure?' }, class: 'btn btn-danger btn-sm' %>
</div>
</td>
</tr>
<% end %>
</tbody>
</table>
<%= link_to 'New User', new_user_path, class: 'btn btn-primary' %>
| <h1>Users</h1>
<table class="table table-condensed">
<thead>
<tr>
<th>ID</th>
+ <th>Name</th>
<th>Email</th>
<th>Role</th>
<th>School</th>
<th>Sign in count</th>
<th>Access locked</th>
<th></th>
</tr>
</thead>
<tbody>
<% @users.each do |user| %>
<tr>
<td><%= user.id %></td>
+ <td><%= user.name %></td>
<td><%= user.email %></td>
<td><%= user.role.titleize %></td>
<td><%= link_to user.school.name, school_path(user.school) if user.school %></td>
<td><%= user.sign_in_count %></td>
<td><%= user.access_locked? ? 'Y' : '' %></td>
<td>
<div class="btn-group">
<%= link_to 'Edit', edit_user_path(user), class: 'btn btn-primary btn-sm' %>
<%= link_to 'Delete', user_path(user), method: :delete, data: { confirm: 'Are you sure?' }, class: 'btn btn-danger btn-sm' %>
</div>
</td>
</tr>
<% end %>
</tbody>
</table>
<%= link_to 'New User', new_user_path, class: 'btn btn-primary' %> | 2 | 0.057143 | 2 | 0 |
51f912033b9d2eb7b667cea855bc71c123730883 | spec/models/spree/payment_method_spec.rb | spec/models/spree/payment_method_spec.rb | require 'spec_helper'
module Spree
describe PaymentMethod do
it "orders payment methods by name" do
pm1 = create(:payment_method, name: 'ZZ')
pm2 = create(:payment_method, name: 'AA')
pm3 = create(:payment_method, name: 'BB')
PaymentMethod.by_name.should == [pm2, pm3, pm1]
end
it "raises errors when required fields are missing" do
pm = PaymentMethod.new()
pm.save
pm.errors.to_a.should == ["Name can't be blank", "At least one hub must be selected"]
end
it "generates a clean name for known Payment Method types" do
Spree::PaymentMethod::Check.clean_name.should == "Cash/EFT/etc. (payments for which automatic validation is not required)"
Spree::Gateway::Migs.clean_name.should == "MasterCard Internet Gateway Service (MIGS)"
Spree::BillingIntegration::PaypalExpressUk.clean_name.should == "PayPal Express (UK)"
Spree::BillingIntegration::PaypalExpress.clean_name.should == "PayPal Express"
# Testing else condition
Spree::Gateway::BogusSimple.clean_name.should == "BogusSimple"
end
end
end
| require 'spec_helper'
module Spree
describe PaymentMethod do
it "orders payment methods by name" do
pm1 = create(:payment_method, name: 'ZZ')
pm2 = create(:payment_method, name: 'AA')
pm3 = create(:payment_method, name: 'BB')
PaymentMethod.by_name.should == [pm2, pm3, pm1]
end
it "raises errors when required fields are missing" do
pm = PaymentMethod.new()
pm.save
pm.errors.to_a.should == ["Name can't be blank", "At least one hub must be selected"]
end
it "generates a clean name for known Payment Method types" do
Spree::PaymentMethod::Check.clean_name.should == "Cash/EFT/etc. (payments for which automatic validation is not required)"
Spree::Gateway::Migs.clean_name.should == "MasterCard Internet Gateway Service (MIGS)"
Spree::BillingIntegration::PaypalExpress.clean_name.should == "PayPal Express"
# Testing else condition
Spree::Gateway::BogusSimple.clean_name.should == "BogusSimple"
end
end
end
| Remove test for PayPalExpress UK which does not exist anymore | Remove test for PayPalExpress UK which does not exist anymore
| Ruby | agpl-3.0 | lin-d-hop/openfoodnetwork,stveep/openfoodnetwork,ecocitycore/openfoodnetwork,MikeiLL/openfoodnetwork,openfoodfoundation/openfoodnetwork,MikeiLL/openfoodnetwork,levent/openfoodnetwork,mkllnk/openfoodnetwork,lin-d-hop/openfoodnetwork,ltrls/openfoodnetwork,RohanM/openfoodnetwork,stveep/openfoodnetwork,folklabs/openfoodnetwork,Em-AK/openfoodnetwork,lin-d-hop/openfoodnetwork,levent/openfoodnetwork,levent/openfoodnetwork,KosenkoDmitriy/openfoodnetwork,Matt-Yorkley/openfoodnetwork,openfoodfoundation/openfoodnetwork,RohanM/openfoodnetwork,folklabs/openfoodnetwork,ecocitycore/openfoodnetwork,Matt-Yorkley/openfoodnetwork,oeoeaio/openfoodnetwork,oeoeaio/openfoodnetwork,lin-d-hop/openfoodnetwork,mkllnk/openfoodnetwork,openfoodfoundation/openfoodnetwork,KateDavis/openfoodnetwork,Matt-Yorkley/openfoodnetwork,ltrls/openfoodnetwork,levent/openfoodnetwork,mkllnk/openfoodnetwork,MikeiLL/openfoodnetwork,ecocitycore/openfoodnetwork,ecocitycore/openfoodnetwork,ltrls/openfoodnetwork,Em-AK/openfoodnetwork,folklabs/openfoodnetwork,KosenkoDmitriy/openfoodnetwork,openfoodfoundation/openfoodnetwork,KosenkoDmitriy/openfoodnetwork,RohanM/openfoodnetwork,oeoeaio/openfoodnetwork,Matt-Yorkley/openfoodnetwork,KateDavis/openfoodnetwork,KateDavis/openfoodnetwork,folklabs/openfoodnetwork,stveep/openfoodnetwork,stveep/openfoodnetwork,ltrls/openfoodnetwork,RohanM/openfoodnetwork,KosenkoDmitriy/openfoodnetwork,Em-AK/openfoodnetwork,MikeiLL/openfoodnetwork,mkllnk/openfoodnetwork,Em-AK/openfoodnetwork,oeoeaio/openfoodnetwork,KateDavis/openfoodnetwork | ruby | ## Code Before:
require 'spec_helper'
module Spree
describe PaymentMethod do
it "orders payment methods by name" do
pm1 = create(:payment_method, name: 'ZZ')
pm2 = create(:payment_method, name: 'AA')
pm3 = create(:payment_method, name: 'BB')
PaymentMethod.by_name.should == [pm2, pm3, pm1]
end
it "raises errors when required fields are missing" do
pm = PaymentMethod.new()
pm.save
pm.errors.to_a.should == ["Name can't be blank", "At least one hub must be selected"]
end
it "generates a clean name for known Payment Method types" do
Spree::PaymentMethod::Check.clean_name.should == "Cash/EFT/etc. (payments for which automatic validation is not required)"
Spree::Gateway::Migs.clean_name.should == "MasterCard Internet Gateway Service (MIGS)"
Spree::BillingIntegration::PaypalExpressUk.clean_name.should == "PayPal Express (UK)"
Spree::BillingIntegration::PaypalExpress.clean_name.should == "PayPal Express"
# Testing else condition
Spree::Gateway::BogusSimple.clean_name.should == "BogusSimple"
end
end
end
## Instruction:
Remove test for PayPalExpress UK which does not exist anymore
## Code After:
require 'spec_helper'
module Spree
describe PaymentMethod do
it "orders payment methods by name" do
pm1 = create(:payment_method, name: 'ZZ')
pm2 = create(:payment_method, name: 'AA')
pm3 = create(:payment_method, name: 'BB')
PaymentMethod.by_name.should == [pm2, pm3, pm1]
end
it "raises errors when required fields are missing" do
pm = PaymentMethod.new()
pm.save
pm.errors.to_a.should == ["Name can't be blank", "At least one hub must be selected"]
end
it "generates a clean name for known Payment Method types" do
Spree::PaymentMethod::Check.clean_name.should == "Cash/EFT/etc. (payments for which automatic validation is not required)"
Spree::Gateway::Migs.clean_name.should == "MasterCard Internet Gateway Service (MIGS)"
Spree::BillingIntegration::PaypalExpress.clean_name.should == "PayPal Express"
# Testing else condition
Spree::Gateway::BogusSimple.clean_name.should == "BogusSimple"
end
end
end
| require 'spec_helper'
module Spree
describe PaymentMethod do
it "orders payment methods by name" do
pm1 = create(:payment_method, name: 'ZZ')
pm2 = create(:payment_method, name: 'AA')
pm3 = create(:payment_method, name: 'BB')
PaymentMethod.by_name.should == [pm2, pm3, pm1]
end
it "raises errors when required fields are missing" do
pm = PaymentMethod.new()
pm.save
pm.errors.to_a.should == ["Name can't be blank", "At least one hub must be selected"]
end
it "generates a clean name for known Payment Method types" do
Spree::PaymentMethod::Check.clean_name.should == "Cash/EFT/etc. (payments for which automatic validation is not required)"
Spree::Gateway::Migs.clean_name.should == "MasterCard Internet Gateway Service (MIGS)"
- Spree::BillingIntegration::PaypalExpressUk.clean_name.should == "PayPal Express (UK)"
Spree::BillingIntegration::PaypalExpress.clean_name.should == "PayPal Express"
# Testing else condition
Spree::Gateway::BogusSimple.clean_name.should == "BogusSimple"
end
end
end | 1 | 0.034483 | 0 | 1 |
d9fc2cfdcfaf13f2e8491ace60680f3c94ad5c83 | tests/test_async.py | tests/test_async.py | try:
import asyncio
loop = asyncio.get_event_loop()
except ImportError:
asyncio = None
import pexpect
import unittest
@unittest.skipIf(asyncio is None, "Requires asyncio")
class AsyncTests(unittest.TestCase):
def test_simple_expect(self):
p = pexpect.spawn('cat')
p.sendline('Hello asyncio')
coro = p.expect('Hello', async=True)
task = asyncio.Task(coro)
results = []
def complete(task):
results.append(task.result())
task.add_done_callback(complete)
loop.run_until_complete(task)
assert results == [0] | try:
import asyncio
except ImportError:
asyncio = None
import pexpect
import unittest
def run(coro):
return asyncio.get_event_loop().run_until_complete(coro)
@unittest.skipIf(asyncio is None, "Requires asyncio")
class AsyncTests(unittest.TestCase):
def test_simple_expect(self):
p = pexpect.spawn('cat')
p.sendline('Hello asyncio')
coro = p.expect(['Hello', pexpect.EOF] , async=True)
assert run(coro) == 0
print('Done')
def test_timeout(self):
p = pexpect.spawn('cat')
coro = p.expect('foo', timeout=1, async=True)
with self.assertRaises(pexpect.TIMEOUT):
run(coro)
p = pexpect.spawn('cat')
coro = p.expect(['foo', pexpect.TIMEOUT], timeout=1, async=True)
assert run(coro) == 1
def test_eof(self):
p = pexpect.spawn('cat')
p.sendline('Hi')
coro = p.expect(pexpect.EOF, async=True)
p.sendeof()
assert run(coro) == 0
p = pexpect.spawn('cat')
p.sendeof()
coro = p.expect('Blah', async=True)
with self.assertRaises(pexpect.EOF):
run(coro) | Expand tests for async expect | Expand tests for async expect
| Python | isc | dongguangming/pexpect,quatanium/pexpect,crdoconnor/pexpect,Depado/pexpect,crdoconnor/pexpect,dongguangming/pexpect,crdoconnor/pexpect,Wakeupbuddy/pexpect,nodish/pexpect,Wakeupbuddy/pexpect,blink1073/pexpect,nodish/pexpect,quatanium/pexpect,bangi123/pexpect,dongguangming/pexpect,bangi123/pexpect,quatanium/pexpect,Depado/pexpect,Wakeupbuddy/pexpect,blink1073/pexpect,dongguangming/pexpect,Wakeupbuddy/pexpect,nodish/pexpect,Depado/pexpect,blink1073/pexpect,bangi123/pexpect,Depado/pexpect,bangi123/pexpect | python | ## Code Before:
try:
import asyncio
loop = asyncio.get_event_loop()
except ImportError:
asyncio = None
import pexpect
import unittest
@unittest.skipIf(asyncio is None, "Requires asyncio")
class AsyncTests(unittest.TestCase):
def test_simple_expect(self):
p = pexpect.spawn('cat')
p.sendline('Hello asyncio')
coro = p.expect('Hello', async=True)
task = asyncio.Task(coro)
results = []
def complete(task):
results.append(task.result())
task.add_done_callback(complete)
loop.run_until_complete(task)
assert results == [0]
## Instruction:
Expand tests for async expect
## Code After:
try:
import asyncio
except ImportError:
asyncio = None
import pexpect
import unittest
def run(coro):
return asyncio.get_event_loop().run_until_complete(coro)
@unittest.skipIf(asyncio is None, "Requires asyncio")
class AsyncTests(unittest.TestCase):
def test_simple_expect(self):
p = pexpect.spawn('cat')
p.sendline('Hello asyncio')
coro = p.expect(['Hello', pexpect.EOF] , async=True)
assert run(coro) == 0
print('Done')
def test_timeout(self):
p = pexpect.spawn('cat')
coro = p.expect('foo', timeout=1, async=True)
with self.assertRaises(pexpect.TIMEOUT):
run(coro)
p = pexpect.spawn('cat')
coro = p.expect(['foo', pexpect.TIMEOUT], timeout=1, async=True)
assert run(coro) == 1
def test_eof(self):
p = pexpect.spawn('cat')
p.sendline('Hi')
coro = p.expect(pexpect.EOF, async=True)
p.sendeof()
assert run(coro) == 0
p = pexpect.spawn('cat')
p.sendeof()
coro = p.expect('Blah', async=True)
with self.assertRaises(pexpect.EOF):
run(coro) | try:
import asyncio
- loop = asyncio.get_event_loop()
except ImportError:
asyncio = None
import pexpect
import unittest
+ def run(coro):
+ return asyncio.get_event_loop().run_until_complete(coro)
+
@unittest.skipIf(asyncio is None, "Requires asyncio")
class AsyncTests(unittest.TestCase):
def test_simple_expect(self):
p = pexpect.spawn('cat')
p.sendline('Hello asyncio')
- coro = p.expect('Hello', async=True)
+ coro = p.expect(['Hello', pexpect.EOF] , async=True)
? + +++++++++++++++
- task = asyncio.Task(coro)
- results = []
- def complete(task):
- results.append(task.result())
- task.add_done_callback(complete)
- loop.run_until_complete(task)
+ assert run(coro) == 0
+ print('Done')
+
+ def test_timeout(self):
+ p = pexpect.spawn('cat')
+ coro = p.expect('foo', timeout=1, async=True)
+ with self.assertRaises(pexpect.TIMEOUT):
+ run(coro)
+ p = pexpect.spawn('cat')
+ coro = p.expect(['foo', pexpect.TIMEOUT], timeout=1, async=True)
+ assert run(coro) == 1
+
+ def test_eof(self):
+ p = pexpect.spawn('cat')
+ p.sendline('Hi')
+ coro = p.expect(pexpect.EOF, async=True)
+ p.sendeof()
- assert results == [0]
? -- ^^^ - -
+ assert run(coro) == 0
? ^^^^^^^
+
+ p = pexpect.spawn('cat')
+ p.sendeof()
+ coro = p.expect('Blah', async=True)
+ with self.assertRaises(pexpect.EOF):
+ run(coro) | 37 | 1.608696 | 28 | 9 |
1264a8926c889a1d0f8a3dc5c1710d25d0a2fc92 | app/views/shared/_video.html.erb | app/views/shared/_video.html.erb | <%= f.input :video_url,
:label => "Video URL",
:hint => 'Put the YouTube URL in here if it exists',
:input_html => { :class => 'col-md-7' , :disabled => @resource.locked_for_edits? } %>
<%= f.input :video_summary,
:as => :text,
:label => "Video Summary",
:input_html => { :class => 'col-md-7' , :rows => 2, :disabled => @resource.locked_for_edits? } %>
| <div class="row">
<div class="col-md-7">
<%= f.input :video_url,
:label => "Video URL",
:hint => 'Put the YouTube URL in here if it exists',
:input_html => { :disabled => @resource.locked_for_edits? } %>
<%= f.input :video_summary,
:as => :text,
:label => "Video Summary",
:input_html => { :rows => 2, :disabled => @resource.locked_for_edits? } %>
</div>
</div>
| Add bootstrap 3 grid wrappers to video form | Add bootstrap 3 grid wrappers to video form
* Use grid wrappers instead of input classes for video form
| HTML+ERB | mit | telekomatrix/publisher,telekomatrix/publisher,telekomatrix/publisher,telekomatrix/publisher,leftees/publisher,leftees/publisher,leftees/publisher,alphagov/publisher,leftees/publisher,alphagov/publisher,alphagov/publisher | html+erb | ## Code Before:
<%= f.input :video_url,
:label => "Video URL",
:hint => 'Put the YouTube URL in here if it exists',
:input_html => { :class => 'col-md-7' , :disabled => @resource.locked_for_edits? } %>
<%= f.input :video_summary,
:as => :text,
:label => "Video Summary",
:input_html => { :class => 'col-md-7' , :rows => 2, :disabled => @resource.locked_for_edits? } %>
## Instruction:
Add bootstrap 3 grid wrappers to video form
* Use grid wrappers instead of input classes for video form
## Code After:
<div class="row">
<div class="col-md-7">
<%= f.input :video_url,
:label => "Video URL",
:hint => 'Put the YouTube URL in here if it exists',
:input_html => { :disabled => @resource.locked_for_edits? } %>
<%= f.input :video_summary,
:as => :text,
:label => "Video Summary",
:input_html => { :rows => 2, :disabled => @resource.locked_for_edits? } %>
</div>
</div>
| + <div class="row">
+ <div class="col-md-7">
<%= f.input :video_url,
:label => "Video URL",
:hint => 'Put the YouTube URL in here if it exists',
- :input_html => { :class => 'col-md-7' , :disabled => @resource.locked_for_edits? } %>
? -----------------------
+ :input_html => { :disabled => @resource.locked_for_edits? } %>
<%= f.input :video_summary,
:as => :text,
:label => "Video Summary",
- :input_html => { :class => 'col-md-7' , :rows => 2, :disabled => @resource.locked_for_edits? } %>
? -----------------------
+ :input_html => { :rows => 2, :disabled => @resource.locked_for_edits? } %>
+ </div>
+ </div> | 8 | 0.888889 | 6 | 2 |
ce6ce4822426e87dbca0c64791c0a789e7a44cec | lib/troo.rb | lib/troo.rb | $LOAD_PATH << File.expand_path(File.dirname(__FILE__) + '/../lib')
require_relative 'troo/troo'
require_relative 'troo/version'
module Troo
InvalidAccessToken = Class.new(StandardError)
EndpointNotFound = Class.new(StandardError)
def self.configuration(file = Dir.home + '/.trooconf', env = :default)
@configuration ||= Troo::Configuration.load(file, env)
end
configuration
def self.endpoints(version = :version_1)
@endpoints ||= Troo::API::Endpoints
.load(File.dirname(__FILE__) + '/../config/trello_api.yml', version)
end
endpoints
def self.logger
@logger ||= Logger.new('logs/troo.log')
end
# RestClient.log = File.dirname(__FILE__) + '/../logs/restclient.log'
class Launcher
def initialize(argv, stdin = STDIN,
stdout = STDOUT,
stderr = STDERR,
kernel = Kernel)
@argv = argv
@stdin = stdin
@stdout = stdout
@stderr = stderr
@kernel = kernel
end
def execute!
$stdin, $stdout, $stderr = @stdin, @stdout, @stderr
puts
Troo::CLI::Main.start(@argv)
puts
@kernel.exit(0)
rescue Troo::InvalidAccessToken
@stderr.puts 'Your Trello access credentials have expired, ' \
' please renew and try again.'
@kernel.exit(1)
rescue SocketError
@stderr.puts 'Cannot continue, no network connection.'
@kernel.exit(1)
end
end
end
| $LOAD_PATH << File.expand_path(File.dirname(__FILE__) + '/../lib')
require_relative 'troo/troo'
require_relative 'troo/version'
module Troo
InvalidAccessToken = Class.new(StandardError)
EndpointNotFound = Class.new(StandardError)
def self.configuration(file = Dir.home + '/.trooconf', env = :default)
@configuration ||= Troo::Configuration.load(file, env)
end
configuration
def self.endpoints(version = :version_1)
@endpoints ||= Troo::API::Endpoints
.load(File.dirname(__FILE__) + '/../config/trello_api.yml', version)
end
endpoints
def self.logger
@logger ||= Logger.new('logs/troo.log')
end
# RestClient.log = File.dirname(__FILE__) + '/../logs/restclient.log'
class Launcher
def initialize(argv, stdin = STDIN,
stdout = STDOUT,
stderr = STDERR,
kernel = Kernel)
@argv = argv
@stdin = stdin
@stdout = stdout
@stderr = stderr
@kernel = kernel
end
def execute!
$stdin, $stdout, $stderr = @stdin, @stdout, @stderr
puts
Troo::CLI::Main.start(@argv)
puts
@kernel.exit(0)
rescue Troo::InvalidAccessToken
@stderr.puts 'Your Trello access credentials have expired ' \
'or are invalid, please renew and try again.'
@kernel.exit(1)
ensure
$stdin, $stdout, $stderr = STDIN, STDOUT, STDERR
end
end
end
| Reset stdin, stdout and stderr before exiting. | Reset stdin, stdout and stderr before exiting. | Ruby | mit | gavinlaking/troo,gavinlaking/troo | ruby | ## Code Before:
$LOAD_PATH << File.expand_path(File.dirname(__FILE__) + '/../lib')
require_relative 'troo/troo'
require_relative 'troo/version'
module Troo
InvalidAccessToken = Class.new(StandardError)
EndpointNotFound = Class.new(StandardError)
def self.configuration(file = Dir.home + '/.trooconf', env = :default)
@configuration ||= Troo::Configuration.load(file, env)
end
configuration
def self.endpoints(version = :version_1)
@endpoints ||= Troo::API::Endpoints
.load(File.dirname(__FILE__) + '/../config/trello_api.yml', version)
end
endpoints
def self.logger
@logger ||= Logger.new('logs/troo.log')
end
# RestClient.log = File.dirname(__FILE__) + '/../logs/restclient.log'
class Launcher
def initialize(argv, stdin = STDIN,
stdout = STDOUT,
stderr = STDERR,
kernel = Kernel)
@argv = argv
@stdin = stdin
@stdout = stdout
@stderr = stderr
@kernel = kernel
end
def execute!
$stdin, $stdout, $stderr = @stdin, @stdout, @stderr
puts
Troo::CLI::Main.start(@argv)
puts
@kernel.exit(0)
rescue Troo::InvalidAccessToken
@stderr.puts 'Your Trello access credentials have expired, ' \
' please renew and try again.'
@kernel.exit(1)
rescue SocketError
@stderr.puts 'Cannot continue, no network connection.'
@kernel.exit(1)
end
end
end
## Instruction:
Reset stdin, stdout and stderr before exiting.
## Code After:
$LOAD_PATH << File.expand_path(File.dirname(__FILE__) + '/../lib')
require_relative 'troo/troo'
require_relative 'troo/version'
module Troo
InvalidAccessToken = Class.new(StandardError)
EndpointNotFound = Class.new(StandardError)
def self.configuration(file = Dir.home + '/.trooconf', env = :default)
@configuration ||= Troo::Configuration.load(file, env)
end
configuration
def self.endpoints(version = :version_1)
@endpoints ||= Troo::API::Endpoints
.load(File.dirname(__FILE__) + '/../config/trello_api.yml', version)
end
endpoints
def self.logger
@logger ||= Logger.new('logs/troo.log')
end
# RestClient.log = File.dirname(__FILE__) + '/../logs/restclient.log'
class Launcher
def initialize(argv, stdin = STDIN,
stdout = STDOUT,
stderr = STDERR,
kernel = Kernel)
@argv = argv
@stdin = stdin
@stdout = stdout
@stderr = stderr
@kernel = kernel
end
def execute!
$stdin, $stdout, $stderr = @stdin, @stdout, @stderr
puts
Troo::CLI::Main.start(@argv)
puts
@kernel.exit(0)
rescue Troo::InvalidAccessToken
@stderr.puts 'Your Trello access credentials have expired ' \
'or are invalid, please renew and try again.'
@kernel.exit(1)
ensure
$stdin, $stdout, $stderr = STDIN, STDOUT, STDERR
end
end
end
| $LOAD_PATH << File.expand_path(File.dirname(__FILE__) + '/../lib')
require_relative 'troo/troo'
require_relative 'troo/version'
module Troo
InvalidAccessToken = Class.new(StandardError)
EndpointNotFound = Class.new(StandardError)
def self.configuration(file = Dir.home + '/.trooconf', env = :default)
@configuration ||= Troo::Configuration.load(file, env)
end
configuration
def self.endpoints(version = :version_1)
@endpoints ||= Troo::API::Endpoints
.load(File.dirname(__FILE__) + '/../config/trello_api.yml', version)
end
endpoints
def self.logger
@logger ||= Logger.new('logs/troo.log')
end
# RestClient.log = File.dirname(__FILE__) + '/../logs/restclient.log'
class Launcher
def initialize(argv, stdin = STDIN,
stdout = STDOUT,
stderr = STDERR,
kernel = Kernel)
@argv = argv
@stdin = stdin
@stdout = stdout
@stderr = stderr
@kernel = kernel
end
def execute!
$stdin, $stdout, $stderr = @stdin, @stdout, @stderr
puts
Troo::CLI::Main.start(@argv)
puts
@kernel.exit(0)
rescue Troo::InvalidAccessToken
- @stderr.puts 'Your Trello access credentials have expired, ' \
? -
+ @stderr.puts 'Your Trello access credentials have expired ' \
- ' please renew and try again.'
+ 'or are invalid, please renew and try again.'
? +++++++++++++++
@kernel.exit(1)
+ ensure
+ $stdin, $stdout, $stderr = STDIN, STDOUT, STDERR
- rescue SocketError
- @stderr.puts 'Cannot continue, no network connection.'
- @kernel.exit(1)
end
end
end | 9 | 0.160714 | 4 | 5 |
e93c4aedf2e8b4e72452cfe17568a3c094fd8d3f | source/MasterDevs.CoolWhip/MasterDevs.CoolWhip/NugetPackage/tools/templates/nuspecTemplate.txt | source/MasterDevs.CoolWhip/MasterDevs.CoolWhip/NugetPackage/tools/templates/nuspecTemplate.txt | <?xml version="1.0"?>
<package >
<metadata>
<id>$id$</id>
<version>$version$</version>
<title>$title$</title>
<authors>$author$</authors>
<owners>$author$</owners>
<licenseUrl></licenseUrl>
<projectUrl></projectUrl>
<iconUrl></iconUrl>
<requireLicenseAcceptance>false</requireLicenseAcceptance>
<description>$description$</description>
<releaseNotes></releaseNotes>
<copyright>Copyright 2015</copyright>
</metadata>
</package> | <?xml version="1.0"?>
<package >
<metadata>
<id>$id$</id>
<version>$version$</version>
<title>$title$</title>
<authors>$author$</authors>
<owners>$author$</owners>
<!--
<licenseUrl></licenseUrl>
<projectUrl></projectUrl>
<iconUrl></iconUrl>
-->
<requireLicenseAcceptance>false</requireLicenseAcceptance>
<description>$description$</description>
<releaseNotes></releaseNotes>
<copyright>Copyright 2015</copyright>
</metadata>
</package> | Comment out fields w/out logical defaults | Comment out fields w/out logical defaults
| Text | mit | MasterDevs/CoolWhip | text | ## Code Before:
<?xml version="1.0"?>
<package >
<metadata>
<id>$id$</id>
<version>$version$</version>
<title>$title$</title>
<authors>$author$</authors>
<owners>$author$</owners>
<licenseUrl></licenseUrl>
<projectUrl></projectUrl>
<iconUrl></iconUrl>
<requireLicenseAcceptance>false</requireLicenseAcceptance>
<description>$description$</description>
<releaseNotes></releaseNotes>
<copyright>Copyright 2015</copyright>
</metadata>
</package>
## Instruction:
Comment out fields w/out logical defaults
## Code After:
<?xml version="1.0"?>
<package >
<metadata>
<id>$id$</id>
<version>$version$</version>
<title>$title$</title>
<authors>$author$</authors>
<owners>$author$</owners>
<!--
<licenseUrl></licenseUrl>
<projectUrl></projectUrl>
<iconUrl></iconUrl>
-->
<requireLicenseAcceptance>false</requireLicenseAcceptance>
<description>$description$</description>
<releaseNotes></releaseNotes>
<copyright>Copyright 2015</copyright>
</metadata>
</package> | <?xml version="1.0"?>
<package >
<metadata>
<id>$id$</id>
<version>$version$</version>
<title>$title$</title>
<authors>$author$</authors>
<owners>$author$</owners>
+ <!--
<licenseUrl></licenseUrl>
<projectUrl></projectUrl>
<iconUrl></iconUrl>
+ -->
<requireLicenseAcceptance>false</requireLicenseAcceptance>
<description>$description$</description>
<releaseNotes></releaseNotes>
<copyright>Copyright 2015</copyright>
</metadata>
</package> | 2 | 0.117647 | 2 | 0 |
e0cac3a840adc0e36b2246b26d6aec8b0eebaa09 | 2.7/Dockerfile | 2.7/Dockerfile | FROM python:2.7-slim
RUN apt-get update && apt-get install -y \
gcc \
gettext \
mysql-client libmysqlclient-dev \
postgresql-client libpq-dev \
sqlite3 \
--no-install-recommends && rm -rf /var/lib/apt/lists/*
ENV DJANGO_VERSION 1.9.4
RUN pip install mysqlclient psycopg2 django=="$DJANGO_VERSION"
| FROM alpine:3.3
RUN apk add --update \
gcc musl-dev python-dev py-pip \
mariadb-dev \
postgresql-dev \
sqlite && \
rm -rf /var/cache/apk/*
ENV DJANGO_VERSION 1.9.4
RUN pip install mysqlclient psycopg2 django=="$DJANGO_VERSION"
| Use Alpine 3.3 as base for 2.7 image. | Use Alpine 3.3 as base for 2.7 image.
| unknown | mit | rvernica/Dockerfile,rvernica/docker-library,rvernica/docker-library | unknown | ## Code Before:
FROM python:2.7-slim
RUN apt-get update && apt-get install -y \
gcc \
gettext \
mysql-client libmysqlclient-dev \
postgresql-client libpq-dev \
sqlite3 \
--no-install-recommends && rm -rf /var/lib/apt/lists/*
ENV DJANGO_VERSION 1.9.4
RUN pip install mysqlclient psycopg2 django=="$DJANGO_VERSION"
## Instruction:
Use Alpine 3.3 as base for 2.7 image.
## Code After:
FROM alpine:3.3
RUN apk add --update \
gcc musl-dev python-dev py-pip \
mariadb-dev \
postgresql-dev \
sqlite && \
rm -rf /var/cache/apk/*
ENV DJANGO_VERSION 1.9.4
RUN pip install mysqlclient psycopg2 django=="$DJANGO_VERSION"
| - FROM python:2.7-slim
+ FROM alpine:3.3
+ RUN apk add --update \
+ gcc musl-dev python-dev py-pip \
+ mariadb-dev \
+ postgresql-dev \
+ sqlite && \
+ rm -rf /var/cache/apk/*
- RUN apt-get update && apt-get install -y \
- gcc \
- gettext \
- mysql-client libmysqlclient-dev \
- postgresql-client libpq-dev \
- sqlite3 \
- --no-install-recommends && rm -rf /var/lib/apt/lists/*
ENV DJANGO_VERSION 1.9.4
RUN pip install mysqlclient psycopg2 django=="$DJANGO_VERSION" | 15 | 1.153846 | 7 | 8 |
ca9019bb42f532a47272c8e8f00485afe7f787fa | test/smoke/source/source_test.rb | test/smoke/source/source_test.rb | describe processes('exabgp') do
it { should exist }
end
| describe directory('/usr/src/exabgp') do
it { should exist }
end
describe file('/etc/exabgp-default/exabgp.conf') do
it { should exist }
end
| Change from process to file/dir check | Change from process to file/dir check
| Ruby | apache-2.0 | aetrion/exabgp-cookbook,aetrion/exabgp-cookbook,aetrion/exabgp-cookbook | ruby | ## Code Before:
describe processes('exabgp') do
it { should exist }
end
## Instruction:
Change from process to file/dir check
## Code After:
describe directory('/usr/src/exabgp') do
it { should exist }
end
describe file('/etc/exabgp-default/exabgp.conf') do
it { should exist }
end
| - describe processes('exabgp') do
+ describe directory('/usr/src/exabgp') do
it { should exist }
end
+
+ describe file('/etc/exabgp-default/exabgp.conf') do
+ it { should exist }
+ end | 6 | 2 | 5 | 1 |
4ca4a8caae287b389c0905c1bcbe52b4011c5fcc | src/middleware/error-responder.js | src/middleware/error-responder.js | import http from 'http';
import _ from 'lodash';
// This reponder is assuming that all <500 errors are safe to be responded
// with their .message attribute.
// DO NOT write sensitive data into error messages.
function createErrorResponder(opts) {
opts = _.merge({
isErrorSafeToRespond: function(status) {
return status < 500;
}
}, opts);
return function errorResponder(err, req, res, next) {
var message;
var status = err.status ? err.status : 500;
var httpMessage = http.STATUS_CODES[status];
if (opts.isErrorSafeToRespond(status)) {
message = httpMessage + ': ' + err.message;
}
else {
message = httpMessage;
}
res.status(status);
res.send({
error: message
});
};
}
module.exports = createErrorResponder;
| import http from 'http';
import _ from 'lodash';
// This reponder is assuming that all <500 errors are safe to be responded
// with their .message attribute.
// DO NOT write sensitive data into error messages.
function createErrorResponder(opts) {
opts = _.merge({
isErrorSafeToRespond: function(status) {
return status < 500;
}
}, opts);
return function errorResponder(err, req, res, next) {
var message;
var status = err.status ? err.status : 500;
var httpMessage = http.STATUS_CODES[status];
if (opts.isErrorSafeToRespond(status)) {
message = httpMessage + ': ' + err.message;
}
else {
message = httpMessage;
}
let body = {error: message};
if (err.userMessage) {
body.showUser = true;
body.message = err.userMessage;
}
res.status(status);
res.send(body);
};
}
module.exports = createErrorResponder;
| Add user messages to errors | Add user messages to errors
| JavaScript | mit | futurice/wappuapp-backend,kaupunki-apina/prahapp-backend,futurice/wappuapp-backend,kaupunki-apina/prahapp-backend | javascript | ## Code Before:
import http from 'http';
import _ from 'lodash';
// This reponder is assuming that all <500 errors are safe to be responded
// with their .message attribute.
// DO NOT write sensitive data into error messages.
function createErrorResponder(opts) {
opts = _.merge({
isErrorSafeToRespond: function(status) {
return status < 500;
}
}, opts);
return function errorResponder(err, req, res, next) {
var message;
var status = err.status ? err.status : 500;
var httpMessage = http.STATUS_CODES[status];
if (opts.isErrorSafeToRespond(status)) {
message = httpMessage + ': ' + err.message;
}
else {
message = httpMessage;
}
res.status(status);
res.send({
error: message
});
};
}
module.exports = createErrorResponder;
## Instruction:
Add user messages to errors
## Code After:
import http from 'http';
import _ from 'lodash';
// This reponder is assuming that all <500 errors are safe to be responded
// with their .message attribute.
// DO NOT write sensitive data into error messages.
function createErrorResponder(opts) {
opts = _.merge({
isErrorSafeToRespond: function(status) {
return status < 500;
}
}, opts);
return function errorResponder(err, req, res, next) {
var message;
var status = err.status ? err.status : 500;
var httpMessage = http.STATUS_CODES[status];
if (opts.isErrorSafeToRespond(status)) {
message = httpMessage + ': ' + err.message;
}
else {
message = httpMessage;
}
let body = {error: message};
if (err.userMessage) {
body.showUser = true;
body.message = err.userMessage;
}
res.status(status);
res.send(body);
};
}
module.exports = createErrorResponder;
| import http from 'http';
import _ from 'lodash';
// This reponder is assuming that all <500 errors are safe to be responded
// with their .message attribute.
// DO NOT write sensitive data into error messages.
function createErrorResponder(opts) {
opts = _.merge({
isErrorSafeToRespond: function(status) {
return status < 500;
}
}, opts);
return function errorResponder(err, req, res, next) {
var message;
var status = err.status ? err.status : 500;
var httpMessage = http.STATUS_CODES[status];
if (opts.isErrorSafeToRespond(status)) {
message = httpMessage + ': ' + err.message;
}
else {
message = httpMessage;
}
+ let body = {error: message};
+ if (err.userMessage) {
+ body.showUser = true;
+ body.message = err.userMessage;
+ }
+
res.status(status);
- res.send({
? ^
+ res.send(body);
? ^^^^^^
- error: message
- });
};
}
module.exports = createErrorResponder; | 10 | 0.30303 | 7 | 3 |
81dd127b05af6d179bf3afe58268a9fb4d0df5ad | .travis.yml | .travis.yml | language: java
jdk: openjdk7
script:
- mvn verify
- mvn clean compile assembly:single
cache:
directories:
- $HOME/.m2 | language: java
jdk: openjdk7
script:
- mvn verify
- mvn clean compile assembly:single
- ls -l target/
cache:
directories:
- $HOME/.m2 | Add ls to Travis output | Add ls to Travis output
| YAML | mit | krishnakanthpps/lightning,automatictester/lightning,automatictester/lightning,krishnakanthpps/lightning | yaml | ## Code Before:
language: java
jdk: openjdk7
script:
- mvn verify
- mvn clean compile assembly:single
cache:
directories:
- $HOME/.m2
## Instruction:
Add ls to Travis output
## Code After:
language: java
jdk: openjdk7
script:
- mvn verify
- mvn clean compile assembly:single
- ls -l target/
cache:
directories:
- $HOME/.m2 | language: java
jdk: openjdk7
script:
- mvn verify
- mvn clean compile assembly:single
+ - ls -l target/
cache:
directories:
- $HOME/.m2 | 1 | 0.125 | 1 | 0 |
f3aee123ec7fb77091b8df4e3bea4f4da11d7d52 | lib/arel/nodes/extract.rb | lib/arel/nodes/extract.rb | module Arel
module Nodes
class Extract < Arel::Nodes::Unary
include Arel::AliasPredication
include Arel::Predications
attr_accessor :field
def initialize expr, field
super(expr)
@field = field
end
def hash
super ^ [@field, @alias].hash
end
def eql? other
super &&
self.field == other.field
end
alias :== :eql?
end
end
end
| module Arel
module Nodes
class Extract < Arel::Nodes::Unary
include Arel::AliasPredication
include Arel::Predications
attr_accessor :field
def initialize expr, field
super(expr)
@field = field
end
def hash
super ^ @field.hash
end
def eql? other
super &&
self.field == other.field
end
alias :== :eql?
end
end
end
| Remove unused @alias, being referenced in hashing. | Remove unused @alias, being referenced in hashing.
| Ruby | mit | tacid/arel,Envek/rails,yawboakye/rails,printercu/rails,yawboakye/rails,baerjam/rails,georgeclaghorn/rails,fabianoleittes/rails,assain/rails,utilum/rails,EmmaB/rails-1,assain/rails,yawboakye/rails,MSP-Greg/rails,kmcphillips/rails,notapatch/rails,betesh/rails,Edouard-chin/rails,deraru/rails,zeckalpha/arel,maclover7/arel,starknx/rails,mechanicles/rails,illacceptanything/illacceptanything,shioyama/rails,yahonda/rails,prathamesh-sonpatki/rails,printercu/rails,schuetzm/rails,assain/rails,tgxworld/rails,eileencodes/rails,illacceptanything/illacceptanything,vipulnsward/rails,jasnow/arel,printercu/rails,iainbeeston/rails,utilum/rails,Vasfed/rails,yui-knk/arel,yahonda/rails,bogdanvlviv/rails,ngpestelos/arel,georgeclaghorn/rails,pvalena/rails,shioyama/rails,flanger001/rails,tgxworld/rails,mechanicles/rails,vipulnsward/rails,jacobmoe/arel,ngpestelos/arel,georgeclaghorn/rails,bogdanvlviv/rails,jeremy/rails,kmcphillips/rails,yahonda/rails,tjschuck/rails,kddeisz/rails,fabianoleittes/rails,illacceptanything/illacceptanything,modernmsg/arel,BlakeWilliams/rails,rails/rails,rails/rails,eileencodes/rails,Envek/rails,repinel/rails,illacceptanything/illacceptanything,rails/rails,yalab/rails,slobodankovacevic/arel,jacobmoe/arel,illacceptanything/illacceptanything,arunagw/rails,untidy-hair/rails,illacceptanything/illacceptanything,kmcphillips/rails,mohitnatoo/rails,esparta/rails,maclover7/arel,fabianoleittes/rails,lcreid/rails,repinel/rails,assain/rails,yui-knk/arel,amitsuroliya/arel,printercu/rails,MSP-Greg/rails,betesh/rails,modernmsg/arel,mohitnatoo/rails,universsky/arel,shioyama/rails,untidy-hair/rails,prathamesh-sonpatki/rails,joonyou/rails,untidy-hair/rails,vipulnsward/rails,deraru/rails,aditya-kapoor/rails,palkan/rails,prathamesh-sonpatki/rails,illacceptanything/illacceptanything,baerjam/rails,repinel/rails,Edouard-chin/rails,Envek/rails,baerjam/rails,yahonda/rails,EmmaB/rails-1,joonyou/rails,zeckalpha/arel,notapatch/rails,gauravtiwari/rails,starknx/rails,illacceptanything/illacceptanything,tjschuck/rails,illacceptanything/illacceptanything,tjschuck/rails,rails/arel,kddeisz/rails,baerjam/rails,jetthoughts/arel,yalab/rails,Edouard-chin/rails,EmmaB/rails-1,MSP-Greg/rails,rafaelfranca/omg-rails,kbrock/arel,MSP-Greg/rails,Envek/rails,BlakeWilliams/rails,arunagw/rails,deraru/rails,esparta/rails,mohitnatoo/rails,eileencodes/rails,eileencodes/rails,flanger001/rails,schuetzm/rails,Stellenticket/rails,kmcphillips/rails,vipulnsward/rails,rafaelfranca/omg-rails,esparta/rails,notapatch/rails,Vasfed/rails,Erol/rails,rails/rails,mohitnatoo/rails,BlakeWilliams/rails,Stellenticket/rails,jeremy/rails,aditya-kapoor/rails,mechanicles/rails,tgxworld/rails,lcreid/rails,tgxworld/rails,schuetzm/rails,universsky/arel,arunagw/rails,jemc/arel,pvalena/rails,shioyama/rails,palkan/rails,yalab/rails,illacceptanything/illacceptanything,kddeisz/rails,yawboakye/rails,jemc/arel,lcreid/rails,Erol/rails,deraru/rails,Eric-Guo/arel,Vasfed/rails,jeremy/rails,flanger001/rails,yalab/rails,utilum/rails,yahonda/arel,yhirano55/rails,palkan/rails,yhirano55/rails,arunagw/rails,aditya-kapoor/rails,amitsuroliya/arel,pvalena/rails,yahonda/arel,illacceptanything/illacceptanything,flanger001/rails,BlakeWilliams/rails,notapatch/rails,bogdanvlviv/rails,repinel/rails,jeremy/rails,palkan/rails,starknx/rails,gauravtiwari/rails,yhirano55/rails,Eric-Guo/arel,Erol/rails,kddeisz/rails,pvalena/rails,Erol/rails,iainbeeston/rails,joonyou/rails,Stellenticket/rails,betesh/rails,bogdanvlviv/rails,iainbeeston/rails,prathamesh-sonpatki/rails,jasnow/arel,illacceptanything/illacceptanything,georgeclaghorn/rails,betesh/rails,mechanicles/rails,tacid/arel,aditya-kapoor/rails,untidy-hair/rails,gauravtiwari/rails,esparta/rails,kbrock/arel,utilum/rails,iainbeeston/rails,rafaelfranca/omg-rails,illacceptanything/illacceptanything,tjschuck/rails,Vasfed/rails,illacceptanything/illacceptanything,joonyou/rails,Stellenticket/rails,yhirano55/rails,schuetzm/rails,fabianoleittes/rails,lcreid/rails,Edouard-chin/rails,illacceptanything/illacceptanything | ruby | ## Code Before:
module Arel
module Nodes
class Extract < Arel::Nodes::Unary
include Arel::AliasPredication
include Arel::Predications
attr_accessor :field
def initialize expr, field
super(expr)
@field = field
end
def hash
super ^ [@field, @alias].hash
end
def eql? other
super &&
self.field == other.field
end
alias :== :eql?
end
end
end
## Instruction:
Remove unused @alias, being referenced in hashing.
## Code After:
module Arel
module Nodes
class Extract < Arel::Nodes::Unary
include Arel::AliasPredication
include Arel::Predications
attr_accessor :field
def initialize expr, field
super(expr)
@field = field
end
def hash
super ^ @field.hash
end
def eql? other
super &&
self.field == other.field
end
alias :== :eql?
end
end
end
| module Arel
module Nodes
class Extract < Arel::Nodes::Unary
include Arel::AliasPredication
include Arel::Predications
attr_accessor :field
def initialize expr, field
super(expr)
@field = field
end
def hash
- super ^ [@field, @alias].hash
? - ---------
+ super ^ @field.hash
end
def eql? other
super &&
self.field == other.field
end
alias :== :eql?
end
end
end | 2 | 0.08 | 1 | 1 |
1ddc5a460f17ae56e3b6b24763f888f1573fd446 | shuffle.rb | shuffle.rb | require 'sinatra'
require 'httparty'
require 'yaml'
data = YAML.load_file(File.expand_path("./data.yml"))
post('/') do
team = data['team_members'].shuffle
HTTParty.post(data['url'], body: { text: team, channel: "\##{params['channel_name']}" }.to_json )
end
| require 'sinatra'
require 'httparty'
require 'yaml'
require 'pry'
data = YAML.load_file(File.expand_path("./data.yml"))
post('/') do
HTTParty.post(data['url'], body: { text: data['team_members'].shuffle.join(', '), channel: "\##{params['channel_name']}" }.to_json )
end
| Remove unnecessary variable and return list joined | Remove unnecessary variable and return list joined
| Ruby | mit | romulomachado/slack-shuffle | ruby | ## Code Before:
require 'sinatra'
require 'httparty'
require 'yaml'
data = YAML.load_file(File.expand_path("./data.yml"))
post('/') do
team = data['team_members'].shuffle
HTTParty.post(data['url'], body: { text: team, channel: "\##{params['channel_name']}" }.to_json )
end
## Instruction:
Remove unnecessary variable and return list joined
## Code After:
require 'sinatra'
require 'httparty'
require 'yaml'
require 'pry'
data = YAML.load_file(File.expand_path("./data.yml"))
post('/') do
HTTParty.post(data['url'], body: { text: data['team_members'].shuffle.join(', '), channel: "\##{params['channel_name']}" }.to_json )
end
| require 'sinatra'
require 'httparty'
require 'yaml'
+ require 'pry'
data = YAML.load_file(File.expand_path("./data.yml"))
post('/') do
- team = data['team_members'].shuffle
-
- HTTParty.post(data['url'], body: { text: team, channel: "\##{params['channel_name']}" }.to_json )
+ HTTParty.post(data['url'], body: { text: data['team_members'].shuffle.join(', '), channel: "\##{params['channel_name']}" }.to_json )
? ++++++ +++++++++++++++++++++++++++++
end | 5 | 0.454545 | 2 | 3 |
c140c1a6d32c2caaf9f0e5a87efd219b9573608a | shub/tool.py | shub/tool.py | import click, importlib
from shub.utils import missing_modules
def missingmod_cmd(modules):
modlist = ", ".join(modules)
@click.command(help="*DISABLED* - requires %s" % modlist)
@click.pass_context
def cmd(ctx):
click.echo("Error: '%s' command requires %s" % (ctx.info_name, modlist))
ctx.exit(1)
return cmd
@click.group(help="Scrapinghub command-line client")
def cli():
pass
module_deps = {
"deploy": ["scrapy", "setuptools"],
"login": [],
}
for command, modules in module_deps.iteritems():
m = missing_modules(*modules)
if m:
cli.add_command(missingmod_cmd(m), command)
else:
module_path = "shub." + command
command_module = importlib.import_module(module_path)
cli.add_command(command_module.cli, command)
| import click, importlib
from shub.utils import missing_modules
def missingmod_cmd(modules):
modlist = ", ".join(modules)
@click.command(help="*DISABLED* - requires %s" % modlist)
@click.pass_context
def cmd(ctx):
click.echo("Error: '%s' command requires %s" % (ctx.info_name, modlist))
ctx.exit(1)
return cmd
@click.group(help="Scrapinghub command-line client")
def cli():
pass
module_deps = {
"deploy": ["scrapy", "setuptools"],
"login": [],
}
for command, modules in module_deps.iteritems():
m = missing_modules(*modules)
if m:
cli.add_command(missingmod_cmd(m), command)
else:
module_path = "shub." + command
command_module = importlib.import_module(module_path)
command_name = command.replace('_', '-') # easier to type
cli.add_command(command_module.cli, command)
| Use hifens instead of underscore for command names | Use hifens instead of underscore for command names
| Python | bsd-3-clause | scrapinghub/shub | python | ## Code Before:
import click, importlib
from shub.utils import missing_modules
def missingmod_cmd(modules):
modlist = ", ".join(modules)
@click.command(help="*DISABLED* - requires %s" % modlist)
@click.pass_context
def cmd(ctx):
click.echo("Error: '%s' command requires %s" % (ctx.info_name, modlist))
ctx.exit(1)
return cmd
@click.group(help="Scrapinghub command-line client")
def cli():
pass
module_deps = {
"deploy": ["scrapy", "setuptools"],
"login": [],
}
for command, modules in module_deps.iteritems():
m = missing_modules(*modules)
if m:
cli.add_command(missingmod_cmd(m), command)
else:
module_path = "shub." + command
command_module = importlib.import_module(module_path)
cli.add_command(command_module.cli, command)
## Instruction:
Use hifens instead of underscore for command names
## Code After:
import click, importlib
from shub.utils import missing_modules
def missingmod_cmd(modules):
modlist = ", ".join(modules)
@click.command(help="*DISABLED* - requires %s" % modlist)
@click.pass_context
def cmd(ctx):
click.echo("Error: '%s' command requires %s" % (ctx.info_name, modlist))
ctx.exit(1)
return cmd
@click.group(help="Scrapinghub command-line client")
def cli():
pass
module_deps = {
"deploy": ["scrapy", "setuptools"],
"login": [],
}
for command, modules in module_deps.iteritems():
m = missing_modules(*modules)
if m:
cli.add_command(missingmod_cmd(m), command)
else:
module_path = "shub." + command
command_module = importlib.import_module(module_path)
command_name = command.replace('_', '-') # easier to type
cli.add_command(command_module.cli, command)
| import click, importlib
from shub.utils import missing_modules
def missingmod_cmd(modules):
modlist = ", ".join(modules)
@click.command(help="*DISABLED* - requires %s" % modlist)
@click.pass_context
def cmd(ctx):
click.echo("Error: '%s' command requires %s" % (ctx.info_name, modlist))
ctx.exit(1)
return cmd
@click.group(help="Scrapinghub command-line client")
def cli():
pass
module_deps = {
"deploy": ["scrapy", "setuptools"],
"login": [],
}
for command, modules in module_deps.iteritems():
m = missing_modules(*modules)
if m:
cli.add_command(missingmod_cmd(m), command)
else:
module_path = "shub." + command
command_module = importlib.import_module(module_path)
+ command_name = command.replace('_', '-') # easier to type
cli.add_command(command_module.cli, command) | 1 | 0.034483 | 1 | 0 |
7e0a0f27dd76bdca5114a8f6f71f62faef8e7a56 | app/Console/Commands/MarkPastEventsDeleted.php | app/Console/Commands/MarkPastEventsDeleted.php | <?php
namespace App\Console\Commands;
use Exception;
use Illuminate\Console\Command;
use Illuminate\Support\Carbon;
use Illuminate\Support\Facades\DB;
class MarkPastEventsDeleted extends Command
{
/**
* The name and signature of the console command.
*
* @var string
*/
protected $signature = 'harvester:mark-past-events-deleted';
/**
* The console command description.
*
* @var string
*/
protected $description = 'Finds any events and performances that are in the past and soft-deletes them. This is intended to be run at 12:01AM.';
/**
* Create a new command instance.
*
* @return void
*/
public function __construct()
{
parent::__construct();
}
/**
* Execute the console command.
*
* @return mixed
* @throws Exception
*/
public function handle()
{
$now = new Carbon();
$eventCounts = DB::table('events')->where('occurs_at', '<', $now)->whereNull('deleted_at')->update(['deleted_at' => $now]);
$this->info($eventCounts . ' Events were soft-deleted.');
$performancesCounts = DB::table('performances')->where('occurs_at', '<', $now)->whereNull('deleted_at')->update(['deleted_at' => $now]);
$this->info($performancesCounts . ' Performances were soft-deleted.');
}
}
| <?php
namespace App\Console\Commands;
use Exception;
use Illuminate\Console\Command;
use Illuminate\Support\Carbon;
use Illuminate\Support\Facades\DB;
class MarkPastEventsDeleted extends Command
{
/**
* The name and signature of the console command.
*
* @var string
*/
protected $signature = 'harvester:mark-past-events-deleted';
/**
* The console command description.
*
* @var string
*/
protected $description = 'Finds any events and performances that are in the past and soft-deletes them. This is intended to be run at 12:01AM.';
/**
* Create a new command instance.
*
* @return void
*/
public function __construct()
{
parent::__construct();
}
/**
* Execute the console command.
*
* @return mixed
* @throws Exception
*/
public function handle()
{
$now = new Carbon();
$endOfYesterday = new Carbon('midnight today');
$endOfYesterday->subSecond();
$eventCounts = DB::table('events')->where('occurs_at', '<=', $endOfYesterday)->whereNull('deleted_at')->update(['deleted_at' => $now]);
$this->info($eventCounts . ' Events were soft-deleted.');
$performancesCounts = DB::table('performances')->where('occurs_at', '<=', $endOfYesterday)->whereNull('deleted_at')->update(['deleted_at' => $now]);
$this->info($performancesCounts . ' Performances were soft-deleted.');
}
}
| Improve soft-deleting of past events | Improve soft-deleting of past events
| PHP | mit | jwcobb/tevo-harvester,jwcobb/tevo-harvester | php | ## Code Before:
<?php
namespace App\Console\Commands;
use Exception;
use Illuminate\Console\Command;
use Illuminate\Support\Carbon;
use Illuminate\Support\Facades\DB;
class MarkPastEventsDeleted extends Command
{
/**
* The name and signature of the console command.
*
* @var string
*/
protected $signature = 'harvester:mark-past-events-deleted';
/**
* The console command description.
*
* @var string
*/
protected $description = 'Finds any events and performances that are in the past and soft-deletes them. This is intended to be run at 12:01AM.';
/**
* Create a new command instance.
*
* @return void
*/
public function __construct()
{
parent::__construct();
}
/**
* Execute the console command.
*
* @return mixed
* @throws Exception
*/
public function handle()
{
$now = new Carbon();
$eventCounts = DB::table('events')->where('occurs_at', '<', $now)->whereNull('deleted_at')->update(['deleted_at' => $now]);
$this->info($eventCounts . ' Events were soft-deleted.');
$performancesCounts = DB::table('performances')->where('occurs_at', '<', $now)->whereNull('deleted_at')->update(['deleted_at' => $now]);
$this->info($performancesCounts . ' Performances were soft-deleted.');
}
}
## Instruction:
Improve soft-deleting of past events
## Code After:
<?php
namespace App\Console\Commands;
use Exception;
use Illuminate\Console\Command;
use Illuminate\Support\Carbon;
use Illuminate\Support\Facades\DB;
class MarkPastEventsDeleted extends Command
{
/**
* The name and signature of the console command.
*
* @var string
*/
protected $signature = 'harvester:mark-past-events-deleted';
/**
* The console command description.
*
* @var string
*/
protected $description = 'Finds any events and performances that are in the past and soft-deletes them. This is intended to be run at 12:01AM.';
/**
* Create a new command instance.
*
* @return void
*/
public function __construct()
{
parent::__construct();
}
/**
* Execute the console command.
*
* @return mixed
* @throws Exception
*/
public function handle()
{
$now = new Carbon();
$endOfYesterday = new Carbon('midnight today');
$endOfYesterday->subSecond();
$eventCounts = DB::table('events')->where('occurs_at', '<=', $endOfYesterday)->whereNull('deleted_at')->update(['deleted_at' => $now]);
$this->info($eventCounts . ' Events were soft-deleted.');
$performancesCounts = DB::table('performances')->where('occurs_at', '<=', $endOfYesterday)->whereNull('deleted_at')->update(['deleted_at' => $now]);
$this->info($performancesCounts . ' Performances were soft-deleted.');
}
}
| <?php
namespace App\Console\Commands;
use Exception;
use Illuminate\Console\Command;
use Illuminate\Support\Carbon;
use Illuminate\Support\Facades\DB;
class MarkPastEventsDeleted extends Command
{
/**
* The name and signature of the console command.
*
* @var string
*/
protected $signature = 'harvester:mark-past-events-deleted';
/**
* The console command description.
*
* @var string
*/
protected $description = 'Finds any events and performances that are in the past and soft-deletes them. This is intended to be run at 12:01AM.';
/**
* Create a new command instance.
*
* @return void
*/
public function __construct()
{
parent::__construct();
}
/**
* Execute the console command.
*
* @return mixed
* @throws Exception
*/
public function handle()
{
$now = new Carbon();
+ $endOfYesterday = new Carbon('midnight today');
+ $endOfYesterday->subSecond();
+
- $eventCounts = DB::table('events')->where('occurs_at', '<', $now)->whereNull('deleted_at')->update(['deleted_at' => $now]);
? ^^
+ $eventCounts = DB::table('events')->where('occurs_at', '<=', $endOfYesterday)->whereNull('deleted_at')->update(['deleted_at' => $now]);
? + + ^^^^^^^^^^^^
$this->info($eventCounts . ' Events were soft-deleted.');
- $performancesCounts = DB::table('performances')->where('occurs_at', '<', $now)->whereNull('deleted_at')->update(['deleted_at' => $now]);
? ^^
+ $performancesCounts = DB::table('performances')->where('occurs_at', '<=', $endOfYesterday)->whereNull('deleted_at')->update(['deleted_at' => $now]);
? + + ^^^^^^^^^^^^
$this->info($performancesCounts . ' Performances were soft-deleted.');
}
} | 7 | 0.132075 | 5 | 2 |
eb61e5c989cda3f5e021150f91561a88ba6db73e | setuptools/tests/py26compat.py | setuptools/tests/py26compat.py | import sys
import unittest
import tarfile
try:
# provide skipIf for Python 2.4-2.6
skipIf = unittest.skipIf
except AttributeError:
def skipIf(condition, reason):
def skipper(func):
def skip(*args, **kwargs):
return
if condition:
return skip
return func
return skipper
def _tarfile_open_ex(*args, **kwargs):
"""
Extend result as a context manager.
"""
res = tarfile.open(*args, **kwargs)
res.__exit__ = lambda exc_type, exc_value, traceback: res.close()
res.__enter__ = lambda: res
return res
tarfile_open = _tarfile_open_ex if sys.version_info < (2,7) else tarfile.open
| import sys
import unittest
import tarfile
import contextlib
try:
# provide skipIf for Python 2.4-2.6
skipIf = unittest.skipIf
except AttributeError:
def skipIf(condition, reason):
def skipper(func):
def skip(*args, **kwargs):
return
if condition:
return skip
return func
return skipper
def _tarfile_open_ex(*args, **kwargs):
"""
Extend result as a context manager.
"""
return contextlib.closing(tarfile.open(*args, **kwargs))
tarfile_open = _tarfile_open_ex if sys.version_info < (2,7) else tarfile.open
| Use contextlib.closing on tarfile compat shim | Use contextlib.closing on tarfile compat shim
| Python | mit | pypa/setuptools,pypa/setuptools,pypa/setuptools | python | ## Code Before:
import sys
import unittest
import tarfile
try:
# provide skipIf for Python 2.4-2.6
skipIf = unittest.skipIf
except AttributeError:
def skipIf(condition, reason):
def skipper(func):
def skip(*args, **kwargs):
return
if condition:
return skip
return func
return skipper
def _tarfile_open_ex(*args, **kwargs):
"""
Extend result as a context manager.
"""
res = tarfile.open(*args, **kwargs)
res.__exit__ = lambda exc_type, exc_value, traceback: res.close()
res.__enter__ = lambda: res
return res
tarfile_open = _tarfile_open_ex if sys.version_info < (2,7) else tarfile.open
## Instruction:
Use contextlib.closing on tarfile compat shim
## Code After:
import sys
import unittest
import tarfile
import contextlib
try:
# provide skipIf for Python 2.4-2.6
skipIf = unittest.skipIf
except AttributeError:
def skipIf(condition, reason):
def skipper(func):
def skip(*args, **kwargs):
return
if condition:
return skip
return func
return skipper
def _tarfile_open_ex(*args, **kwargs):
"""
Extend result as a context manager.
"""
return contextlib.closing(tarfile.open(*args, **kwargs))
tarfile_open = _tarfile_open_ex if sys.version_info < (2,7) else tarfile.open
| import sys
import unittest
import tarfile
+ import contextlib
try:
# provide skipIf for Python 2.4-2.6
skipIf = unittest.skipIf
except AttributeError:
def skipIf(condition, reason):
def skipper(func):
def skip(*args, **kwargs):
return
if condition:
return skip
return func
return skipper
def _tarfile_open_ex(*args, **kwargs):
"""
Extend result as a context manager.
"""
+ return contextlib.closing(tarfile.open(*args, **kwargs))
- res = tarfile.open(*args, **kwargs)
- res.__exit__ = lambda exc_type, exc_value, traceback: res.close()
- res.__enter__ = lambda: res
- return res
tarfile_open = _tarfile_open_ex if sys.version_info < (2,7) else tarfile.open | 6 | 0.222222 | 2 | 4 |
367d5d6196c3c21e2d1353b258801e6d5e14e602 | xos/core/models/node.py | xos/core/models/node.py | import os
from django.db import models
from core.models import PlCoreBase
from core.models.plcorebase import StrippedCharField
from core.models import Site, SiteDeployment, SitePrivilege
from core.models import Tag
from django.contrib.contenttypes import generic
# Create your models here.
class Node(PlCoreBase):
name = StrippedCharField(max_length=200, unique=True, help_text="Name of the Node")
site_deployment = models.ForeignKey(SiteDeployment, related_name='nodes')
site = models.ForeignKey(Site, null=True, blank=True, related_name='nodes')
tags = generic.GenericRelation(Tag)
def __unicode__(self): return u'%s' % (self.name)
def save(self, *args, **kwds):
if self.site is None and self.site_deployment is not None:
self.site = self.site_deployment.site
super(Node, self).save(*args, **kwds)
def can_update(self, user):
return user.can_update_site(self.site, allow=['tech'])
| import os
from django.db import models
from core.models import PlCoreBase
from core.models.plcorebase import StrippedCharField
from core.models import Site, SiteDeployment, SitePrivilege
from core.models import Tag
from django.contrib.contenttypes import generic
# Create your models here.
class Node(PlCoreBase):
name = StrippedCharField(max_length=200, unique=True, help_text="Name of the Node")
site_deployment = models.ForeignKey(SiteDeployment, related_name='nodes')
site = models.ForeignKey(Site, null=True, blank=True, related_name='nodes')
tags = generic.GenericRelation(Tag)
def __unicode__(self): return u'%s' % (self.name)
def __init__(self, *args, **kwargs):
super(Node, self).__init__(*args, **kwargs)
self.no_sync=True
def save(self, *args, **kwds):
if self.site is None and self.site_deployment is not None:
self.site = self.site_deployment.site
super(Node, self).save(*args, **kwds)
def can_update(self, user):
return user.can_update_site(self.site, allow=['tech'])
| Make Node syncs a noop | Make Node syncs a noop
| Python | apache-2.0 | jermowery/xos,cboling/xos,cboling/xos,cboling/xos,jermowery/xos,cboling/xos,xmaruto/mcord,jermowery/xos,xmaruto/mcord,xmaruto/mcord,cboling/xos,jermowery/xos,xmaruto/mcord | python | ## Code Before:
import os
from django.db import models
from core.models import PlCoreBase
from core.models.plcorebase import StrippedCharField
from core.models import Site, SiteDeployment, SitePrivilege
from core.models import Tag
from django.contrib.contenttypes import generic
# Create your models here.
class Node(PlCoreBase):
name = StrippedCharField(max_length=200, unique=True, help_text="Name of the Node")
site_deployment = models.ForeignKey(SiteDeployment, related_name='nodes')
site = models.ForeignKey(Site, null=True, blank=True, related_name='nodes')
tags = generic.GenericRelation(Tag)
def __unicode__(self): return u'%s' % (self.name)
def save(self, *args, **kwds):
if self.site is None and self.site_deployment is not None:
self.site = self.site_deployment.site
super(Node, self).save(*args, **kwds)
def can_update(self, user):
return user.can_update_site(self.site, allow=['tech'])
## Instruction:
Make Node syncs a noop
## Code After:
import os
from django.db import models
from core.models import PlCoreBase
from core.models.plcorebase import StrippedCharField
from core.models import Site, SiteDeployment, SitePrivilege
from core.models import Tag
from django.contrib.contenttypes import generic
# Create your models here.
class Node(PlCoreBase):
name = StrippedCharField(max_length=200, unique=True, help_text="Name of the Node")
site_deployment = models.ForeignKey(SiteDeployment, related_name='nodes')
site = models.ForeignKey(Site, null=True, blank=True, related_name='nodes')
tags = generic.GenericRelation(Tag)
def __unicode__(self): return u'%s' % (self.name)
def __init__(self, *args, **kwargs):
super(Node, self).__init__(*args, **kwargs)
self.no_sync=True
def save(self, *args, **kwds):
if self.site is None and self.site_deployment is not None:
self.site = self.site_deployment.site
super(Node, self).save(*args, **kwds)
def can_update(self, user):
return user.can_update_site(self.site, allow=['tech'])
| import os
from django.db import models
from core.models import PlCoreBase
from core.models.plcorebase import StrippedCharField
from core.models import Site, SiteDeployment, SitePrivilege
from core.models import Tag
from django.contrib.contenttypes import generic
# Create your models here.
class Node(PlCoreBase):
name = StrippedCharField(max_length=200, unique=True, help_text="Name of the Node")
site_deployment = models.ForeignKey(SiteDeployment, related_name='nodes')
site = models.ForeignKey(Site, null=True, blank=True, related_name='nodes')
tags = generic.GenericRelation(Tag)
def __unicode__(self): return u'%s' % (self.name)
+ def __init__(self, *args, **kwargs):
+ super(Node, self).__init__(*args, **kwargs)
+ self.no_sync=True
+
def save(self, *args, **kwds):
if self.site is None and self.site_deployment is not None:
self.site = self.site_deployment.site
super(Node, self).save(*args, **kwds)
def can_update(self, user):
return user.can_update_site(self.site, allow=['tech']) | 4 | 0.153846 | 4 | 0 |
4ce98c2768ccc3674f5c51df6e9674a076aa18ab | spec/stack_master/template_compilers/yaml_spec.rb | spec/stack_master/template_compilers/yaml_spec.rb | RSpec.describe StackMaster::TemplateCompilers::Yaml do
describe '.compile' do
def compile
described_class.compile(template_file_path)
end
context 'valid YAML template' do
let(:template_file_path) { 'spec/fixtures/templates/yml/valid_myapp_vpc.yml' }
it 'produces valid JSON' do
valid_myapp_vpc_as_json = File.read('spec/fixtures/templates/json/valid_myapp_vpc.json')
expect(compile).to eq(valid_myapp_vpc_as_json)
end
end
context 'invalid YAML template' do
let(:template_file_path) { 'spec/fixtures/templates/yml/invalid_myapp_vpc.yml' }
it 'returns an error' do
expect(compile).to raise_error(StackMaster::TemplateCompilers::Yaml::CompileError)
end
end
end
end | RSpec.describe StackMaster::TemplateCompilers::Yaml do
describe '.compile' do
def compile
described_class.compile(template_file_path)
end
context 'valid YAML template' do
let(:template_file_path) { 'spec/fixtures/templates/yml/valid_myapp_vpc.yml' }
it 'produces valid JSON' do
valid_myapp_vpc_as_json = File.read('spec/fixtures/templates/json/valid_myapp_vpc.json')
valid_myapp_vpc_as_hash = JSON.parse(valid_myapp_vpc_as_json)
expect(JSON.parse(compile)).to eq(valid_myapp_vpc_as_hash)
end
end
context 'invalid YAML template' do
let(:template_file_path) { 'spec/fixtures/templates/yml/invalid_myapp_vpc.yml' }
it 'returns an error' do
expect(compile).to raise_error(StackMaster::TemplateCompilers::Yaml::CompileError)
end
end
end
end | Make yml_template spec less fragile | Make yml_template spec less fragile
When we compare generated JSON strings, the comparison can fail because
of whitespace differences. But this doesn’t necessarily indicate
incorrect JSON. So compare the parsed Hashs.
Also this allows us to have pretty human readable JSON in our fixtures
while generating plain JSON in our code.
| Ruby | mit | bulletproofnetworks/stack_master,envato/stack_master,envato/stack_master,bulletproofnetworks/stack_master,bulletproofnetworks/stack_master | ruby | ## Code Before:
RSpec.describe StackMaster::TemplateCompilers::Yaml do
describe '.compile' do
def compile
described_class.compile(template_file_path)
end
context 'valid YAML template' do
let(:template_file_path) { 'spec/fixtures/templates/yml/valid_myapp_vpc.yml' }
it 'produces valid JSON' do
valid_myapp_vpc_as_json = File.read('spec/fixtures/templates/json/valid_myapp_vpc.json')
expect(compile).to eq(valid_myapp_vpc_as_json)
end
end
context 'invalid YAML template' do
let(:template_file_path) { 'spec/fixtures/templates/yml/invalid_myapp_vpc.yml' }
it 'returns an error' do
expect(compile).to raise_error(StackMaster::TemplateCompilers::Yaml::CompileError)
end
end
end
end
## Instruction:
Make yml_template spec less fragile
When we compare generated JSON strings, the comparison can fail because
of whitespace differences. But this doesn’t necessarily indicate
incorrect JSON. So compare the parsed Hashs.
Also this allows us to have pretty human readable JSON in our fixtures
while generating plain JSON in our code.
## Code After:
RSpec.describe StackMaster::TemplateCompilers::Yaml do
describe '.compile' do
def compile
described_class.compile(template_file_path)
end
context 'valid YAML template' do
let(:template_file_path) { 'spec/fixtures/templates/yml/valid_myapp_vpc.yml' }
it 'produces valid JSON' do
valid_myapp_vpc_as_json = File.read('spec/fixtures/templates/json/valid_myapp_vpc.json')
valid_myapp_vpc_as_hash = JSON.parse(valid_myapp_vpc_as_json)
expect(JSON.parse(compile)).to eq(valid_myapp_vpc_as_hash)
end
end
context 'invalid YAML template' do
let(:template_file_path) { 'spec/fixtures/templates/yml/invalid_myapp_vpc.yml' }
it 'returns an error' do
expect(compile).to raise_error(StackMaster::TemplateCompilers::Yaml::CompileError)
end
end
end
end | RSpec.describe StackMaster::TemplateCompilers::Yaml do
describe '.compile' do
def compile
described_class.compile(template_file_path)
end
context 'valid YAML template' do
let(:template_file_path) { 'spec/fixtures/templates/yml/valid_myapp_vpc.yml' }
it 'produces valid JSON' do
valid_myapp_vpc_as_json = File.read('spec/fixtures/templates/json/valid_myapp_vpc.json')
+ valid_myapp_vpc_as_hash = JSON.parse(valid_myapp_vpc_as_json)
+
- expect(compile).to eq(valid_myapp_vpc_as_json)
? ^ ^^
+ expect(JSON.parse(compile)).to eq(valid_myapp_vpc_as_hash)
? +++++++++++ + ^^ ^
end
end
context 'invalid YAML template' do
let(:template_file_path) { 'spec/fixtures/templates/yml/invalid_myapp_vpc.yml' }
it 'returns an error' do
expect(compile).to raise_error(StackMaster::TemplateCompilers::Yaml::CompileError)
end
end
end
end | 4 | 0.166667 | 3 | 1 |
418357ead146a98f2318af6c76323e2705b79cec | cvloop/__init__.py | cvloop/__init__.py | """Provides cvloop, a ready to use OpenCV VideoCapture mapper, designed for jupyter notebooks."""
import sys
OPENCV_FOUND = False
OPENCV_VERSION_COMPATIBLE = False
try:
import cv2
OPENCV_FOUND = True
except Exception as e:
# print ("Error:", e)
print('OpenCV is not found (tried importing cv2).', file=sys.stderr)
print('''
Is OpenCV installed and properly added to your path?
you are using a virtual environment, make sure to add the path
to the OpenCV bindings to the environment\'s site-packages.
For example (MacOSX with brew):
echo /usr/local/Cellar/opencv3/HEAD-8d662a1_4/lib/python3.6/site-packages > ./.venv/lib/python3.6/site-packages/opencv.pth
Make sure that the first path contains your cv2.so!
See https://docs.python.org/3/library/site.html
''')
if OPENCV_FOUND:
MAJOR, MINOR, PATCH = cv2.__version__.split('.')
OPENCV_VERSION_COMPATIBLE = int(MAJOR) >= 3 and int(MINOR) >= 1
if not OPENCV_VERSION_COMPATIBLE:
print('OpenCV version {} is lower than 3.1!'.format(cv2.__version__), file=sys.stderr)
if OPENCV_FOUND and OPENCV_VERSION_COMPATIBLE:
from .cvloop import cvloop
from .functions import *
| """Provides cvloop, a ready to use OpenCV VideoCapture mapper, designed for jupyter notebooks."""
import sys
OPENCV_FOUND = False
OPENCV_VERSION_COMPATIBLE = False
try:
import cv2
OPENCV_FOUND = True
except ModuleNotFoundError:
print('OpenCV is not found (tried importing cv2).', file=sys.stderr)
print('''
Is OpenCV installed and properly added to your path?
you are using a virtual environment, make sure to add the path
to the OpenCV bindings to the environment\'s site-packages.
For example (MacOSX with brew):
echo /usr/local/Cellar/opencv3/HEAD-8d662a1_4/lib/python3.6/site-packages > ./.venv/lib/python3.6/site-packages/opencv.pth
Make sure that the first path contains your cv2.so!
See https://docs.python.org/3/library/site.html
''')
if OPENCV_FOUND:
MAJOR, MINOR, PATCH = cv2.__version__.split('.')
OPENCV_VERSION_COMPATIBLE = int(MAJOR) >= 3 and int(MINOR) >= 1
if not OPENCV_VERSION_COMPATIBLE:
print('OpenCV version {} is lower than 3.1!'.format(cv2.__version__), file=sys.stderr)
if OPENCV_FOUND and OPENCV_VERSION_COMPATIBLE:
from .cvloop import cvloop
from .functions import *
| Revert unnecessary change to original | Revert unnecessary change to original
| Python | mit | shoeffner/cvloop | python | ## Code Before:
"""Provides cvloop, a ready to use OpenCV VideoCapture mapper, designed for jupyter notebooks."""
import sys
OPENCV_FOUND = False
OPENCV_VERSION_COMPATIBLE = False
try:
import cv2
OPENCV_FOUND = True
except Exception as e:
# print ("Error:", e)
print('OpenCV is not found (tried importing cv2).', file=sys.stderr)
print('''
Is OpenCV installed and properly added to your path?
you are using a virtual environment, make sure to add the path
to the OpenCV bindings to the environment\'s site-packages.
For example (MacOSX with brew):
echo /usr/local/Cellar/opencv3/HEAD-8d662a1_4/lib/python3.6/site-packages > ./.venv/lib/python3.6/site-packages/opencv.pth
Make sure that the first path contains your cv2.so!
See https://docs.python.org/3/library/site.html
''')
if OPENCV_FOUND:
MAJOR, MINOR, PATCH = cv2.__version__.split('.')
OPENCV_VERSION_COMPATIBLE = int(MAJOR) >= 3 and int(MINOR) >= 1
if not OPENCV_VERSION_COMPATIBLE:
print('OpenCV version {} is lower than 3.1!'.format(cv2.__version__), file=sys.stderr)
if OPENCV_FOUND and OPENCV_VERSION_COMPATIBLE:
from .cvloop import cvloop
from .functions import *
## Instruction:
Revert unnecessary change to original
## Code After:
"""Provides cvloop, a ready to use OpenCV VideoCapture mapper, designed for jupyter notebooks."""
import sys
OPENCV_FOUND = False
OPENCV_VERSION_COMPATIBLE = False
try:
import cv2
OPENCV_FOUND = True
except ModuleNotFoundError:
print('OpenCV is not found (tried importing cv2).', file=sys.stderr)
print('''
Is OpenCV installed and properly added to your path?
you are using a virtual environment, make sure to add the path
to the OpenCV bindings to the environment\'s site-packages.
For example (MacOSX with brew):
echo /usr/local/Cellar/opencv3/HEAD-8d662a1_4/lib/python3.6/site-packages > ./.venv/lib/python3.6/site-packages/opencv.pth
Make sure that the first path contains your cv2.so!
See https://docs.python.org/3/library/site.html
''')
if OPENCV_FOUND:
MAJOR, MINOR, PATCH = cv2.__version__.split('.')
OPENCV_VERSION_COMPATIBLE = int(MAJOR) >= 3 and int(MINOR) >= 1
if not OPENCV_VERSION_COMPATIBLE:
print('OpenCV version {} is lower than 3.1!'.format(cv2.__version__), file=sys.stderr)
if OPENCV_FOUND and OPENCV_VERSION_COMPATIBLE:
from .cvloop import cvloop
from .functions import *
| """Provides cvloop, a ready to use OpenCV VideoCapture mapper, designed for jupyter notebooks."""
import sys
OPENCV_FOUND = False
OPENCV_VERSION_COMPATIBLE = False
try:
import cv2
OPENCV_FOUND = True
+ except ModuleNotFoundError:
- except Exception as e:
- # print ("Error:", e)
print('OpenCV is not found (tried importing cv2).', file=sys.stderr)
print('''
Is OpenCV installed and properly added to your path?
you are using a virtual environment, make sure to add the path
to the OpenCV bindings to the environment\'s site-packages.
For example (MacOSX with brew):
echo /usr/local/Cellar/opencv3/HEAD-8d662a1_4/lib/python3.6/site-packages > ./.venv/lib/python3.6/site-packages/opencv.pth
Make sure that the first path contains your cv2.so!
See https://docs.python.org/3/library/site.html
''')
if OPENCV_FOUND:
MAJOR, MINOR, PATCH = cv2.__version__.split('.')
OPENCV_VERSION_COMPATIBLE = int(MAJOR) >= 3 and int(MINOR) >= 1
if not OPENCV_VERSION_COMPATIBLE:
print('OpenCV version {} is lower than 3.1!'.format(cv2.__version__), file=sys.stderr)
if OPENCV_FOUND and OPENCV_VERSION_COMPATIBLE:
from .cvloop import cvloop
from .functions import *
| 3 | 0.088235 | 1 | 2 |
a78a54ac8c31f926e80d0c6724eaef97362f1315 | README.md | README.md | argg
====
A poor man's test runner for [tap](https://github.com/isaacs/node-tap), [tape](https://github.com/substack/tape), or similar, that also can be used with [istanbul](https://github.com/gotwarlost/istanbul). It's just three lines of code to `require` pathnames from the command line. Shell globbing test runner, if you will, which you can do when your tests are simple scripts.
usage
-----
Install like:
npm i argg --save-dev
…then in your package.json add:
"scripts": {
"test": "node node_modules/argg tests/*.js",
"cover": "istanbul cover node_modules/argg tests/*.js"
},
…so from the command line, you can run tests and get code coverage with istanbul like:
npm test
npm run cover
why
---
Hey ok, tap has a [nice test runner](https://github.com/isaacs/node-tap/blob/master/lib/tap-runner.js), but I would need something [like](https://github.com/substack/tape/pull/19) this for tape by itself, or for either tap or tape with istanbul. I could make a file and explicitly require the files, but that takes typing, and I know I'd miss something.
license
-------
MIT licensed by permission from my employer. See LICENSE.txt.
| argg
====
A poor man's test runner for [tap](https://github.com/isaacs/node-tap), [tape](https://github.com/substack/tape), or similar, that also can be used with [istanbul](https://github.com/gotwarlost/istanbul). It's just three lines of code to `require` pathnames from the command line. Shell globbing test runner, if you will, which you can do when your tests are simple scripts.
note! `argg`'s functionality [now ships with `tape`](https://github.com/substack/tape/blob/master/bin/tape) as of version 0.3.0.
usage
-----
Install like:
npm i argg --save-dev
…then in your package.json add:
"scripts": {
"test": "node node_modules/argg tests/*.js",
"cover": "istanbul cover node_modules/argg tests/*.js"
},
…so from the command line, you can run tests and get code coverage with istanbul like:
npm test
npm run cover
why
---
Hey ok, tap has a [nice test runner](https://github.com/isaacs/node-tap/blob/master/lib/tap-runner.js), but I would need something [like](https://github.com/substack/tape/pull/19) this for tape by itself, or for either tap or tape with istanbul. I could make a file and explicitly require the files, but that takes typing, and I know I'd miss something.
license
-------
MIT licensed by permission from my employer. See LICENSE.txt.
| Add note that argg is now obselete | Add note that argg is now obselete | Markdown | mit | isao/argg | markdown | ## Code Before:
argg
====
A poor man's test runner for [tap](https://github.com/isaacs/node-tap), [tape](https://github.com/substack/tape), or similar, that also can be used with [istanbul](https://github.com/gotwarlost/istanbul). It's just three lines of code to `require` pathnames from the command line. Shell globbing test runner, if you will, which you can do when your tests are simple scripts.
usage
-----
Install like:
npm i argg --save-dev
…then in your package.json add:
"scripts": {
"test": "node node_modules/argg tests/*.js",
"cover": "istanbul cover node_modules/argg tests/*.js"
},
…so from the command line, you can run tests and get code coverage with istanbul like:
npm test
npm run cover
why
---
Hey ok, tap has a [nice test runner](https://github.com/isaacs/node-tap/blob/master/lib/tap-runner.js), but I would need something [like](https://github.com/substack/tape/pull/19) this for tape by itself, or for either tap or tape with istanbul. I could make a file and explicitly require the files, but that takes typing, and I know I'd miss something.
license
-------
MIT licensed by permission from my employer. See LICENSE.txt.
## Instruction:
Add note that argg is now obselete
## Code After:
argg
====
A poor man's test runner for [tap](https://github.com/isaacs/node-tap), [tape](https://github.com/substack/tape), or similar, that also can be used with [istanbul](https://github.com/gotwarlost/istanbul). It's just three lines of code to `require` pathnames from the command line. Shell globbing test runner, if you will, which you can do when your tests are simple scripts.
note! `argg`'s functionality [now ships with `tape`](https://github.com/substack/tape/blob/master/bin/tape) as of version 0.3.0.
usage
-----
Install like:
npm i argg --save-dev
…then in your package.json add:
"scripts": {
"test": "node node_modules/argg tests/*.js",
"cover": "istanbul cover node_modules/argg tests/*.js"
},
…so from the command line, you can run tests and get code coverage with istanbul like:
npm test
npm run cover
why
---
Hey ok, tap has a [nice test runner](https://github.com/isaacs/node-tap/blob/master/lib/tap-runner.js), but I would need something [like](https://github.com/substack/tape/pull/19) this for tape by itself, or for either tap or tape with istanbul. I could make a file and explicitly require the files, but that takes typing, and I know I'd miss something.
license
-------
MIT licensed by permission from my employer. See LICENSE.txt.
| argg
====
A poor man's test runner for [tap](https://github.com/isaacs/node-tap), [tape](https://github.com/substack/tape), or similar, that also can be used with [istanbul](https://github.com/gotwarlost/istanbul). It's just three lines of code to `require` pathnames from the command line. Shell globbing test runner, if you will, which you can do when your tests are simple scripts.
+
+ note! `argg`'s functionality [now ships with `tape`](https://github.com/substack/tape/blob/master/bin/tape) as of version 0.3.0.
usage
-----
Install like:
npm i argg --save-dev
…then in your package.json add:
"scripts": {
"test": "node node_modules/argg tests/*.js",
"cover": "istanbul cover node_modules/argg tests/*.js"
},
…so from the command line, you can run tests and get code coverage with istanbul like:
npm test
npm run cover
why
---
Hey ok, tap has a [nice test runner](https://github.com/isaacs/node-tap/blob/master/lib/tap-runner.js), but I would need something [like](https://github.com/substack/tape/pull/19) this for tape by itself, or for either tap or tape with istanbul. I could make a file and explicitly require the files, but that takes typing, and I know I'd miss something.
license
-------
MIT licensed by permission from my employer. See LICENSE.txt. | 2 | 0.068966 | 2 | 0 |
31dbae6478740c7651f6be44a5c921ea53baa877 | app/views/layouts/application.html.erb | app/views/layouts/application.html.erb | <!DOCTYPE html>
<html>
<head><%= render 'layouts/head' %></head>
<body>
<script type="text/javascript">
// a function to use in .html.erb view to run jquery query when it was loaded
function defer(method) {
if (window.jQuery)
method();
else
setTimeout(function() { defer(method) }, 50);
}
</script>
<%= render 'layouts/navigation' %>
<div id="main-container" class="container">
<%= render 'layouts/flash' %>
<div class="row">
<%= yield %>
</div>
<%= render 'layouts/footer' %>
</div>
<%= javascript_include_tag 'application' %>
<script type="text/javascript" src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/js/bootstrap.min.js"></script>
<noscript id="deferred-styles">
<%= stylesheet_link_tag 'application' , :media => "all" %>
</noscript>
<% if @jsonld %>
<script type="application/ld+json">
<%= @jsonld.to_json.html_safe %>
</script>
<% end %>
</body>
</html> | <!DOCTYPE html>
<html>
<head><%= render 'layouts/head' %></head>
<body>
<script type="text/javascript">
// a function to use in .html.erb view to run jquery query when it was loaded
function defer(method) {
if (window.jQuery)
method();
else
setTimeout(function() { defer(method) }, 50);
}
</script>
<%= render 'layouts/navigation' %>
<div id="main-container" class="container">
<%= render 'layouts/flash' %>
<div class="row">
<%= yield %>
</div>
<%= render 'layouts/footer' %>
</div>
<noscript id="deferred-styles">
<%= stylesheet_link_tag 'application' , :media => "all" %>
</noscript>
<%= javascript_include_tag 'application' %>
<script type="text/javascript" src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/js/bootstrap.min.js"></script>
<% if @jsonld %>
<script type="application/ld+json">
<%= @jsonld.to_json.html_safe %>
</script>
<% end %>
</body>
</html> | Correct first css error loading | Correct first css error loading
| HTML+ERB | mit | madeindjs/raspberry_cook,RaspberryCook/website,madeindjs/raspberry_cook,RaspberryCook/website,RaspberryCook/website,madeindjs/raspberry_cook | html+erb | ## Code Before:
<!DOCTYPE html>
<html>
<head><%= render 'layouts/head' %></head>
<body>
<script type="text/javascript">
// a function to use in .html.erb view to run jquery query when it was loaded
function defer(method) {
if (window.jQuery)
method();
else
setTimeout(function() { defer(method) }, 50);
}
</script>
<%= render 'layouts/navigation' %>
<div id="main-container" class="container">
<%= render 'layouts/flash' %>
<div class="row">
<%= yield %>
</div>
<%= render 'layouts/footer' %>
</div>
<%= javascript_include_tag 'application' %>
<script type="text/javascript" src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/js/bootstrap.min.js"></script>
<noscript id="deferred-styles">
<%= stylesheet_link_tag 'application' , :media => "all" %>
</noscript>
<% if @jsonld %>
<script type="application/ld+json">
<%= @jsonld.to_json.html_safe %>
</script>
<% end %>
</body>
</html>
## Instruction:
Correct first css error loading
## Code After:
<!DOCTYPE html>
<html>
<head><%= render 'layouts/head' %></head>
<body>
<script type="text/javascript">
// a function to use in .html.erb view to run jquery query when it was loaded
function defer(method) {
if (window.jQuery)
method();
else
setTimeout(function() { defer(method) }, 50);
}
</script>
<%= render 'layouts/navigation' %>
<div id="main-container" class="container">
<%= render 'layouts/flash' %>
<div class="row">
<%= yield %>
</div>
<%= render 'layouts/footer' %>
</div>
<noscript id="deferred-styles">
<%= stylesheet_link_tag 'application' , :media => "all" %>
</noscript>
<%= javascript_include_tag 'application' %>
<script type="text/javascript" src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/js/bootstrap.min.js"></script>
<% if @jsonld %>
<script type="application/ld+json">
<%= @jsonld.to_json.html_safe %>
</script>
<% end %>
</body>
</html> | <!DOCTYPE html>
<html>
<head><%= render 'layouts/head' %></head>
<body>
<script type="text/javascript">
// a function to use in .html.erb view to run jquery query when it was loaded
function defer(method) {
if (window.jQuery)
method();
else
setTimeout(function() { defer(method) }, 50);
}
</script>
<%= render 'layouts/navigation' %>
<div id="main-container" class="container">
<%= render 'layouts/flash' %>
<div class="row">
<%= yield %>
</div>
<%= render 'layouts/footer' %>
</div>
- <%= javascript_include_tag 'application' %>
- <script type="text/javascript" src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/js/bootstrap.min.js"></script>
<noscript id="deferred-styles">
<%= stylesheet_link_tag 'application' , :media => "all" %>
</noscript>
+ <%= javascript_include_tag 'application' %>
+ <script type="text/javascript" src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/js/bootstrap.min.js"></script>
<% if @jsonld %>
<script type="application/ld+json">
<%= @jsonld.to_json.html_safe %>
</script>
<% end %>
</body>
</html> | 4 | 0.121212 | 2 | 2 |
38844f28ec6b27f3507a46fe44ce9a045a0f5d79 | .travis.yml | .travis.yml | sudo: required
language: bash
services:
- docker
env:
- PHP_RUNTIME='php:7.1-cli'
- PHP_RUNTIME='php:7.2-cli' PHPUNIT_OPTS='--coverage-text --coverage-clover=coverage.clover'
- PHP_RUNTIME='hhvm/hhvm:nightly'
matrix:
allow_failures:
- env: PHP_RUNTIME='hhvm/hhvm:nightly'
fast_finish: true
install:
- ./dockerfile.sh | tee /dev/tty | docker build -t msgpack -
script:
- docker run --rm -v $(pwd):/msgpack -w /msgpack -e PHPUNIT_OPTS="$PHPUNIT_OPTS" msgpack
after_script:
- if [[ -f coverage.clover ]]; then
curl -sSOL https://scrutinizer-ci.com/ocular.phar &&
docker run --rm -v $(pwd):/coverage -w /coverage msgpack php ocular.phar code-coverage:upload --format=php-clover coverage.clover;
fi
| sudo: required
language: bash
services:
- docker
env:
- PHP_RUNTIME='php:7.1-cli'
- PHP_RUNTIME='php:7.2-cli' PHPUNIT_OPTS='--coverage-text --coverage-clover=coverage.clover'
- PHP_RUNTIME='hhvm/hhvm:nightly'
matrix:
allow_failures:
- env: PHP_RUNTIME='hhvm/hhvm:nightly'
fast_finish: true
install:
- ./dockerfile.sh | tee /dev/tty | docker build -t msgpack -
script:
- docker run --rm -v $(pwd):/msgpack -w /msgpack -e PHPUNIT_OPTS="$PHPUNIT_OPTS" msgpack
after_script:
- if [[ -f coverage.clover ]]; then
curl -sSOL https://scrutinizer-ci.com/ocular.phar &&
docker run --rm -v $(pwd):/msgpack -w /msgpack msgpack php ocular.phar code-coverage:upload --format=php-clover coverage.clover;
fi
| Fix scrutinizer fails parsing a coverage file | Fix scrutinizer fails parsing a coverage file
| YAML | mit | rybakit/msgpack.php,rybakit/msgpack.php | yaml | ## Code Before:
sudo: required
language: bash
services:
- docker
env:
- PHP_RUNTIME='php:7.1-cli'
- PHP_RUNTIME='php:7.2-cli' PHPUNIT_OPTS='--coverage-text --coverage-clover=coverage.clover'
- PHP_RUNTIME='hhvm/hhvm:nightly'
matrix:
allow_failures:
- env: PHP_RUNTIME='hhvm/hhvm:nightly'
fast_finish: true
install:
- ./dockerfile.sh | tee /dev/tty | docker build -t msgpack -
script:
- docker run --rm -v $(pwd):/msgpack -w /msgpack -e PHPUNIT_OPTS="$PHPUNIT_OPTS" msgpack
after_script:
- if [[ -f coverage.clover ]]; then
curl -sSOL https://scrutinizer-ci.com/ocular.phar &&
docker run --rm -v $(pwd):/coverage -w /coverage msgpack php ocular.phar code-coverage:upload --format=php-clover coverage.clover;
fi
## Instruction:
Fix scrutinizer fails parsing a coverage file
## Code After:
sudo: required
language: bash
services:
- docker
env:
- PHP_RUNTIME='php:7.1-cli'
- PHP_RUNTIME='php:7.2-cli' PHPUNIT_OPTS='--coverage-text --coverage-clover=coverage.clover'
- PHP_RUNTIME='hhvm/hhvm:nightly'
matrix:
allow_failures:
- env: PHP_RUNTIME='hhvm/hhvm:nightly'
fast_finish: true
install:
- ./dockerfile.sh | tee /dev/tty | docker build -t msgpack -
script:
- docker run --rm -v $(pwd):/msgpack -w /msgpack -e PHPUNIT_OPTS="$PHPUNIT_OPTS" msgpack
after_script:
- if [[ -f coverage.clover ]]; then
curl -sSOL https://scrutinizer-ci.com/ocular.phar &&
docker run --rm -v $(pwd):/msgpack -w /msgpack msgpack php ocular.phar code-coverage:upload --format=php-clover coverage.clover;
fi
| sudo: required
language: bash
services:
- docker
env:
- PHP_RUNTIME='php:7.1-cli'
- PHP_RUNTIME='php:7.2-cli' PHPUNIT_OPTS='--coverage-text --coverage-clover=coverage.clover'
- PHP_RUNTIME='hhvm/hhvm:nightly'
matrix:
allow_failures:
- env: PHP_RUNTIME='hhvm/hhvm:nightly'
fast_finish: true
install:
- ./dockerfile.sh | tee /dev/tty | docker build -t msgpack -
script:
- docker run --rm -v $(pwd):/msgpack -w /msgpack -e PHPUNIT_OPTS="$PHPUNIT_OPTS" msgpack
after_script:
- if [[ -f coverage.clover ]]; then
curl -sSOL https://scrutinizer-ci.com/ocular.phar &&
- docker run --rm -v $(pwd):/coverage -w /coverage msgpack php ocular.phar code-coverage:upload --format=php-clover coverage.clover;
? ^^^^^^^ ^^^^^^^
+ docker run --rm -v $(pwd):/msgpack -w /msgpack msgpack php ocular.phar code-coverage:upload --format=php-clover coverage.clover;
? +++++ ^ +++++ ^
fi | 2 | 0.071429 | 1 | 1 |
6ea8681b9224beb02aed1b7d0b61ad00880517c5 | index.js | index.js | 'use strict';
var objectToString = Object.prototype.toString;
var ERROR_TYPE = '[object Error]';
module.exports = isError;
function isError(err) {
return objectToString.call(err) === ERROR_TYPE;
}
| 'use strict';
var objectToString = Object.prototype.toString;
var getPrototypeOf = Object.getPrototypeOf;
var ERROR_TYPE = '[object Error]';
module.exports = isError;
function isError(err) {
while (err) {
if (objectToString.call(err) === ERROR_TYPE) {
return true;
}
err = getPrototypeOf(err);
}
return false;
}
| Fix for all combinations of foreign and inherited | Fix for all combinations of foreign and inherited
| JavaScript | mit | Raynos/is-error | javascript | ## Code Before:
'use strict';
var objectToString = Object.prototype.toString;
var ERROR_TYPE = '[object Error]';
module.exports = isError;
function isError(err) {
return objectToString.call(err) === ERROR_TYPE;
}
## Instruction:
Fix for all combinations of foreign and inherited
## Code After:
'use strict';
var objectToString = Object.prototype.toString;
var getPrototypeOf = Object.getPrototypeOf;
var ERROR_TYPE = '[object Error]';
module.exports = isError;
function isError(err) {
while (err) {
if (objectToString.call(err) === ERROR_TYPE) {
return true;
}
err = getPrototypeOf(err);
}
return false;
}
| 'use strict';
var objectToString = Object.prototype.toString;
+ var getPrototypeOf = Object.getPrototypeOf;
var ERROR_TYPE = '[object Error]';
module.exports = isError;
function isError(err) {
+ while (err) {
- return objectToString.call(err) === ERROR_TYPE;
? ------ ^
+ if (objectToString.call(err) === ERROR_TYPE) {
? +++++++ ^^^
+ return true;
+ }
+ err = getPrototypeOf(err);
+ }
+ return false;
} | 9 | 0.9 | 8 | 1 |
e289089052968b838b895e0d9f1df984772970b5 | orm/model_map.go | orm/model_map.go | package orm
import (
"github.com/go-pg/pg/v10/types"
)
type mapModel struct {
hookStubs
ptr *map[string]interface{}
m map[string]interface{}
}
var _ Model = (*mapModel)(nil)
func newMapModel(ptr *map[string]interface{}) *mapModel {
return &mapModel{
ptr: ptr,
}
}
func (mapModel) Init() error {
return nil
}
func (m mapModel) NextColumnScanner() ColumnScanner {
return m
}
func (m mapModel) AddColumnScanner(ColumnScanner) error {
return nil
}
func (m mapModel) ScanColumn(col types.ColumnInfo, rd types.Reader, n int) error {
val, err := types.ReadColumnValue(col, rd, n)
if err != nil {
return err
}
if m.m == nil {
m.m = make(map[string]interface{})
*m.ptr = m.m
}
m.m[col.Name] = val
return nil
}
func (mapModel) useQueryOne() bool {
return true
}
| package orm
import (
"github.com/go-pg/pg/v10/types"
)
type mapModel struct {
hookStubs
ptr *map[string]interface{}
m map[string]interface{}
}
var _ Model = (*mapModel)(nil)
func newMapModel(ptr *map[string]interface{}) *mapModel {
model := &mapModel{
ptr: ptr,
}
if ptr != nil {
model.m = *ptr
}
return model
}
func (mapModel) Init() error {
return nil
}
func (m *mapModel) NextColumnScanner() ColumnScanner {
return m
}
func (m mapModel) AddColumnScanner(ColumnScanner) error {
return nil
}
func (m *mapModel) ScanColumn(col types.ColumnInfo, rd types.Reader, n int) error {
val, err := types.ReadColumnValue(col, rd, n)
if err != nil {
return err
}
if m.m == nil {
m.m = make(map[string]interface{})
*m.ptr = m.m
}
m.m[col.Name] = val
return nil
}
func (mapModel) useQueryOne() bool {
return true
}
| Fix map update and insert | Fix map update and insert
| Go | bsd-2-clause | go-pg/pg,vmihailenco/pg,go-pg/pg | go | ## Code Before:
package orm
import (
"github.com/go-pg/pg/v10/types"
)
type mapModel struct {
hookStubs
ptr *map[string]interface{}
m map[string]interface{}
}
var _ Model = (*mapModel)(nil)
func newMapModel(ptr *map[string]interface{}) *mapModel {
return &mapModel{
ptr: ptr,
}
}
func (mapModel) Init() error {
return nil
}
func (m mapModel) NextColumnScanner() ColumnScanner {
return m
}
func (m mapModel) AddColumnScanner(ColumnScanner) error {
return nil
}
func (m mapModel) ScanColumn(col types.ColumnInfo, rd types.Reader, n int) error {
val, err := types.ReadColumnValue(col, rd, n)
if err != nil {
return err
}
if m.m == nil {
m.m = make(map[string]interface{})
*m.ptr = m.m
}
m.m[col.Name] = val
return nil
}
func (mapModel) useQueryOne() bool {
return true
}
## Instruction:
Fix map update and insert
## Code After:
package orm
import (
"github.com/go-pg/pg/v10/types"
)
type mapModel struct {
hookStubs
ptr *map[string]interface{}
m map[string]interface{}
}
var _ Model = (*mapModel)(nil)
func newMapModel(ptr *map[string]interface{}) *mapModel {
model := &mapModel{
ptr: ptr,
}
if ptr != nil {
model.m = *ptr
}
return model
}
func (mapModel) Init() error {
return nil
}
func (m *mapModel) NextColumnScanner() ColumnScanner {
return m
}
func (m mapModel) AddColumnScanner(ColumnScanner) error {
return nil
}
func (m *mapModel) ScanColumn(col types.ColumnInfo, rd types.Reader, n int) error {
val, err := types.ReadColumnValue(col, rd, n)
if err != nil {
return err
}
if m.m == nil {
m.m = make(map[string]interface{})
*m.ptr = m.m
}
m.m[col.Name] = val
return nil
}
func (mapModel) useQueryOne() bool {
return true
}
| package orm
import (
"github.com/go-pg/pg/v10/types"
)
type mapModel struct {
hookStubs
ptr *map[string]interface{}
m map[string]interface{}
}
var _ Model = (*mapModel)(nil)
func newMapModel(ptr *map[string]interface{}) *mapModel {
- return &mapModel{
+ model := &mapModel{
ptr: ptr,
}
+ if ptr != nil {
+ model.m = *ptr
+ }
+ return model
}
func (mapModel) Init() error {
return nil
}
- func (m mapModel) NextColumnScanner() ColumnScanner {
+ func (m *mapModel) NextColumnScanner() ColumnScanner {
? +
return m
}
func (m mapModel) AddColumnScanner(ColumnScanner) error {
return nil
}
- func (m mapModel) ScanColumn(col types.ColumnInfo, rd types.Reader, n int) error {
+ func (m *mapModel) ScanColumn(col types.ColumnInfo, rd types.Reader, n int) error {
? +
val, err := types.ReadColumnValue(col, rd, n)
if err != nil {
return err
}
if m.m == nil {
m.m = make(map[string]interface{})
*m.ptr = m.m
}
m.m[col.Name] = val
return nil
}
func (mapModel) useQueryOne() bool {
return true
} | 10 | 0.2 | 7 | 3 |
3fcc8a1217de1f163431aeb235ca6a819e5a683a | test/browser/nightwatch.json | test/browser/nightwatch.json | {
"src_folders": ["."],
"output_folder": "./reports",
"custom_commands_path": "",
"custom_assertions_path": "",
"globals_path": "",
"selenium": {
"start_process": false,
"server_path": "",
"log_path": "",
"host": "127.0.0.1",
"port": 4444,
"cli_args": {
"webdriver.chrome.driver": "",
"webdriver.ie.driver": ""
}
},
"test_settings": {
"default": {
"launch_url": "http://localhost:44199",
"selenium_port": 4444,
"selenium_host": "localhost",
"silent": true,
"screenshots": {
"enabled": false,
"path": ""
},
"desiredCapabilities": {
"browserName": "firefox",
"javascriptEnabled": true,
"acceptSslCerts": true
}
},
"chrome": {
"desiredCapabilities": {
"browserName": "chrome",
"javascriptEnabled": true,
"acceptSslCerts": true
}
},
"saucelabs": {
"selenium_port": 80,
"selenium_host": "ondemand.saucelabs.com",
"username": "mas-ci",
"access_key": "${SAUCE_ACCESS_KEY}",
"desiredCapabilities": {
"tunnel-identifier": "${TRAVIS_JOB_NUMBER}"
}
}
}
}
| {
"src_folders": ["."],
"output_folder": "./reports",
"custom_commands_path": "",
"custom_assertions_path": "",
"globals_path": "",
"selenium": {
"start_process": false,
"server_path": "",
"log_path": "",
"host": "127.0.0.1",
"port": 4444,
"cli_args": {
"webdriver.chrome.driver": "",
"webdriver.ie.driver": "",
"build": "travis-build-${TRAVIS_JOB_NUMBER}"
}
},
"test_settings": {
"default": {
"launch_url": "http://localhost:44199",
"selenium_port": 4444,
"selenium_host": "localhost",
"silent": true,
"screenshots": {
"enabled": false,
"path": ""
},
"desiredCapabilities": {
"browserName": "firefox",
"javascriptEnabled": true,
"acceptSslCerts": true
}
},
"chrome": {
"desiredCapabilities": {
"browserName": "chrome",
"javascriptEnabled": true,
"acceptSslCerts": true
}
},
"saucelabs": {
"selenium_port": 80,
"selenium_host": "ondemand.saucelabs.com",
"username": "mas-ci",
"access_key": "${SAUCE_ACCESS_KEY}",
"desiredCapabilities": {
"tunnel-identifier": "${TRAVIS_JOB_NUMBER}"
}
}
}
}
| Add build number for Sauce Labs | Add build number for Sauce Labs
| JSON | apache-2.0 | ilkkao/mas,ilkkao/mas,ilkkao/mas,ilkkao/mas | json | ## Code Before:
{
"src_folders": ["."],
"output_folder": "./reports",
"custom_commands_path": "",
"custom_assertions_path": "",
"globals_path": "",
"selenium": {
"start_process": false,
"server_path": "",
"log_path": "",
"host": "127.0.0.1",
"port": 4444,
"cli_args": {
"webdriver.chrome.driver": "",
"webdriver.ie.driver": ""
}
},
"test_settings": {
"default": {
"launch_url": "http://localhost:44199",
"selenium_port": 4444,
"selenium_host": "localhost",
"silent": true,
"screenshots": {
"enabled": false,
"path": ""
},
"desiredCapabilities": {
"browserName": "firefox",
"javascriptEnabled": true,
"acceptSslCerts": true
}
},
"chrome": {
"desiredCapabilities": {
"browserName": "chrome",
"javascriptEnabled": true,
"acceptSslCerts": true
}
},
"saucelabs": {
"selenium_port": 80,
"selenium_host": "ondemand.saucelabs.com",
"username": "mas-ci",
"access_key": "${SAUCE_ACCESS_KEY}",
"desiredCapabilities": {
"tunnel-identifier": "${TRAVIS_JOB_NUMBER}"
}
}
}
}
## Instruction:
Add build number for Sauce Labs
## Code After:
{
"src_folders": ["."],
"output_folder": "./reports",
"custom_commands_path": "",
"custom_assertions_path": "",
"globals_path": "",
"selenium": {
"start_process": false,
"server_path": "",
"log_path": "",
"host": "127.0.0.1",
"port": 4444,
"cli_args": {
"webdriver.chrome.driver": "",
"webdriver.ie.driver": "",
"build": "travis-build-${TRAVIS_JOB_NUMBER}"
}
},
"test_settings": {
"default": {
"launch_url": "http://localhost:44199",
"selenium_port": 4444,
"selenium_host": "localhost",
"silent": true,
"screenshots": {
"enabled": false,
"path": ""
},
"desiredCapabilities": {
"browserName": "firefox",
"javascriptEnabled": true,
"acceptSslCerts": true
}
},
"chrome": {
"desiredCapabilities": {
"browserName": "chrome",
"javascriptEnabled": true,
"acceptSslCerts": true
}
},
"saucelabs": {
"selenium_port": 80,
"selenium_host": "ondemand.saucelabs.com",
"username": "mas-ci",
"access_key": "${SAUCE_ACCESS_KEY}",
"desiredCapabilities": {
"tunnel-identifier": "${TRAVIS_JOB_NUMBER}"
}
}
}
}
| {
"src_folders": ["."],
"output_folder": "./reports",
"custom_commands_path": "",
"custom_assertions_path": "",
"globals_path": "",
"selenium": {
"start_process": false,
"server_path": "",
"log_path": "",
"host": "127.0.0.1",
"port": 4444,
"cli_args": {
"webdriver.chrome.driver": "",
- "webdriver.ie.driver": ""
+ "webdriver.ie.driver": "",
? +
+ "build": "travis-build-${TRAVIS_JOB_NUMBER}"
}
},
"test_settings": {
"default": {
"launch_url": "http://localhost:44199",
"selenium_port": 4444,
"selenium_host": "localhost",
"silent": true,
"screenshots": {
"enabled": false,
"path": ""
},
"desiredCapabilities": {
"browserName": "firefox",
"javascriptEnabled": true,
"acceptSslCerts": true
}
},
"chrome": {
"desiredCapabilities": {
"browserName": "chrome",
"javascriptEnabled": true,
"acceptSslCerts": true
}
},
"saucelabs": {
"selenium_port": 80,
"selenium_host": "ondemand.saucelabs.com",
"username": "mas-ci",
"access_key": "${SAUCE_ACCESS_KEY}",
"desiredCapabilities": {
"tunnel-identifier": "${TRAVIS_JOB_NUMBER}"
}
}
}
} | 3 | 0.054545 | 2 | 1 |
0a8a271ba720287686bc2a57bf618655c4487626 | app/views/drones/show.html.erb | app/views/drones/show.html.erb | <div id=top-bar>
<div class=help-status id=help-not-connected>
No one watching...
</div>
</div>
<div id="main-content no-bottom">
<div id="walk-button">
Walk <!-- placeholder for button to hit to keep walking -->
</div>
<div id="home-safely">
Safe <!-- need proper route -->
</div>
</div>
<script>
var address = window.location.href;
var split_address = address.split('/');
var event_id = split_address[split_address.length-3];
function statusCheck() {
$.ajax({
url: "/events/"+event_id+"/status"
})
.done(function(serverResponse) {
console.log("success");
console.log(serverResponse.event_status);
if (serverResponse.event_status == "connected") {
$(".help-status").removeAttr("id", "help-not-connected");
$(".help-status").attr("id", "help-connected");
$(".help-status").html("Connected")
clearInterval(refreshElement);
}
})
.fail(function(serverResponse) {
console.log("fail")
console.log(serverResponse)
})
}
var refreshElement = setInterval(statusCheck, 5000);
</script>
| <div id=top-bar>
<div class=help-status id=help-not-connected>
No one watching...
</div>
</div>
<div id="main-content no-bottom">
<div id="walk-button">
Walk <!-- placeholder for button to hit to keep walking -->
</div>
<div class="spinner" id=hidden-div>
<i class="fa fa-spinner fa-pulse fa-3x fa-fw"></i>
<span class="sr-only">Walking...</span>
</div>
<div id="home-safely">
Safe <!-- need proper route -->
</div>
</div>
<script>
var address = window.location.href;
var split_address = address.split('/');
var event_id = split_address[split_address.length-3];
function statusCheck() {
$.ajax({
url: "/events/"+event_id+"/status"
})
.done(function(serverResponse) {
console.log("success");
console.log(serverResponse.event_status);
if (serverResponse.event_status == "connected") {
$(".help-status").removeAttr("id", "help-not-connected");
$(".help-status").attr("id", "help-connected");
$(".help-status").html("Connected")
clearInterval(refreshElement);
}
})
.fail(function(serverResponse) {
console.log("fail")
console.log(serverResponse)
})
}
var refreshElement = setInterval(statusCheck, 5000);
</script>
| Add spinner div under walk button | Add spinner div under walk button
| HTML+ERB | mit | ShawnTe/guardian,ShawnTe/guardian,ShawnTe/guardian | html+erb | ## Code Before:
<div id=top-bar>
<div class=help-status id=help-not-connected>
No one watching...
</div>
</div>
<div id="main-content no-bottom">
<div id="walk-button">
Walk <!-- placeholder for button to hit to keep walking -->
</div>
<div id="home-safely">
Safe <!-- need proper route -->
</div>
</div>
<script>
var address = window.location.href;
var split_address = address.split('/');
var event_id = split_address[split_address.length-3];
function statusCheck() {
$.ajax({
url: "/events/"+event_id+"/status"
})
.done(function(serverResponse) {
console.log("success");
console.log(serverResponse.event_status);
if (serverResponse.event_status == "connected") {
$(".help-status").removeAttr("id", "help-not-connected");
$(".help-status").attr("id", "help-connected");
$(".help-status").html("Connected")
clearInterval(refreshElement);
}
})
.fail(function(serverResponse) {
console.log("fail")
console.log(serverResponse)
})
}
var refreshElement = setInterval(statusCheck, 5000);
</script>
## Instruction:
Add spinner div under walk button
## Code After:
<div id=top-bar>
<div class=help-status id=help-not-connected>
No one watching...
</div>
</div>
<div id="main-content no-bottom">
<div id="walk-button">
Walk <!-- placeholder for button to hit to keep walking -->
</div>
<div class="spinner" id=hidden-div>
<i class="fa fa-spinner fa-pulse fa-3x fa-fw"></i>
<span class="sr-only">Walking...</span>
</div>
<div id="home-safely">
Safe <!-- need proper route -->
</div>
</div>
<script>
var address = window.location.href;
var split_address = address.split('/');
var event_id = split_address[split_address.length-3];
function statusCheck() {
$.ajax({
url: "/events/"+event_id+"/status"
})
.done(function(serverResponse) {
console.log("success");
console.log(serverResponse.event_status);
if (serverResponse.event_status == "connected") {
$(".help-status").removeAttr("id", "help-not-connected");
$(".help-status").attr("id", "help-connected");
$(".help-status").html("Connected")
clearInterval(refreshElement);
}
})
.fail(function(serverResponse) {
console.log("fail")
console.log(serverResponse)
})
}
var refreshElement = setInterval(statusCheck, 5000);
</script>
| <div id=top-bar>
-
+
<div class=help-status id=help-not-connected>
No one watching...
</div>
-
+
</div>
- <div id="main-content no-bottom">
? --
+ <div id="main-content no-bottom">
<div id="walk-button">
Walk <!-- placeholder for button to hit to keep walking -->
+ </div>
+ <div class="spinner" id=hidden-div>
+ <i class="fa fa-spinner fa-pulse fa-3x fa-fw"></i>
+ <span class="sr-only">Walking...</span>
</div>
<div id="home-safely">
Safe <!-- need proper route -->
</div>
</div>
<script>
var address = window.location.href;
var split_address = address.split('/');
var event_id = split_address[split_address.length-3];
function statusCheck() {
$.ajax({
url: "/events/"+event_id+"/status"
})
.done(function(serverResponse) {
console.log("success");
console.log(serverResponse.event_status);
if (serverResponse.event_status == "connected") {
$(".help-status").removeAttr("id", "help-not-connected");
$(".help-status").attr("id", "help-connected");
$(".help-status").html("Connected")
clearInterval(refreshElement);
}
})
.fail(function(serverResponse) {
console.log("fail")
console.log(serverResponse)
})
}
var refreshElement = setInterval(statusCheck, 5000);
</script> | 10 | 0.227273 | 7 | 3 |
0d78aa85a045385e71e55d2401f48f7517d653cd | tests/TemplateOverrideExtensionTest.php | tests/TemplateOverrideExtensionTest.php | <?php
class TemplateOverrideExtensionTest extends SapphireTest {
protected static $fixture_file = 'template-override/tests/pages.yml';
public function setUpOnce() {
parent::setupOnce();
}
public function testUpdateCMSFields() {
$page = $this->objFromFixture('Page', 'page1');
$fields = $page->getCMSFields();
$tab = $fields->findOrMakeTab('Root.Template');
$fields = $tab->FieldList();
$names = array();
foreach ($fields as $field) {
$names[] = $field->getName();
}
$expected = array('AlternativeTemplate', 'infofield');
$this->assertEquals($expected, $names);
}
}
| <?php
class TemplateOverrideExtensionTest extends SapphireTest {
protected static $fixture_file = 'template-override/tests/pages.yml';
public function testUpdateCMSFields() {
$page = $this->objFromFixture('Page', 'page1');
$fields = $page->getCMSFields();
$tab = $fields->findOrMakeTab('Root.Template');
$fields = $tab->FieldList();
$names = array();
foreach ($fields as $field) {
$names[] = $field->getName();
}
$expected = array('AlternativeTemplate', 'infofield');
$this->assertEquals($expected, $names);
}
}
| Remove setup method that was not showing in coverage tests | MINOR: Remove setup method that was not showing in coverage tests
| PHP | bsd-3-clause | gordonbanderson/template-override | php | ## Code Before:
<?php
class TemplateOverrideExtensionTest extends SapphireTest {
protected static $fixture_file = 'template-override/tests/pages.yml';
public function setUpOnce() {
parent::setupOnce();
}
public function testUpdateCMSFields() {
$page = $this->objFromFixture('Page', 'page1');
$fields = $page->getCMSFields();
$tab = $fields->findOrMakeTab('Root.Template');
$fields = $tab->FieldList();
$names = array();
foreach ($fields as $field) {
$names[] = $field->getName();
}
$expected = array('AlternativeTemplate', 'infofield');
$this->assertEquals($expected, $names);
}
}
## Instruction:
MINOR: Remove setup method that was not showing in coverage tests
## Code After:
<?php
class TemplateOverrideExtensionTest extends SapphireTest {
protected static $fixture_file = 'template-override/tests/pages.yml';
public function testUpdateCMSFields() {
$page = $this->objFromFixture('Page', 'page1');
$fields = $page->getCMSFields();
$tab = $fields->findOrMakeTab('Root.Template');
$fields = $tab->FieldList();
$names = array();
foreach ($fields as $field) {
$names[] = $field->getName();
}
$expected = array('AlternativeTemplate', 'infofield');
$this->assertEquals($expected, $names);
}
}
| <?php
class TemplateOverrideExtensionTest extends SapphireTest {
protected static $fixture_file = 'template-override/tests/pages.yml';
-
- public function setUpOnce() {
- parent::setupOnce();
- }
-
public function testUpdateCMSFields() {
$page = $this->objFromFixture('Page', 'page1');
$fields = $page->getCMSFields();
$tab = $fields->findOrMakeTab('Root.Template');
$fields = $tab->FieldList();
$names = array();
foreach ($fields as $field) {
$names[] = $field->getName();
}
$expected = array('AlternativeTemplate', 'infofield');
$this->assertEquals($expected, $names);
}
} | 5 | 0.208333 | 0 | 5 |
6fbdecb3e5d5d82fe5753cfef35c2a1cbc7a94c6 | locales/kab/event-resources.properties | locales/kab/event-resources.properties | host_maker_party=Suddes Maker Party
| host_maker_party=Suddes Maker Party
host_maker_party_paragraph=Suddes tadyant inek Maker Party s useqdec n ifecka akked urmud ilaqen. Sendeh udiɣ selmed tamezdagnut inek akken ad tesnulfu. Ak-d-mudd tiɣbula ilaqen i usɣiwes, tuddsa akked ummeslay ɣef tedyant inek udiɣ ak-d-mudd afus di yal amecwaṛ.
maker_party_activities=Armud n Marker Party
maker_party_activity_1_title=Aɣerbaz n imimesisen
maker_party_activity_1_body=Rnu imimesisen udiɣ meslay ɣef wayen ad d-yeglu usaḍuf yefnan ɣef izerfan n umeskar.
maker_party_activity_2_title=Attekki deg ayen yelhan i tudert tunnimt
maker_party_activity_2_body=Mudd afus i yemdanen ad issinen azal n ugbur i nezmer ad nseqdec s wudem ilelli s tmerna n ugbur inek daw n turagt Creative Commons.
maker_party_activity_3_title=Rnu aɣawas n uweẓlu
| Update Kabyle (kab) localization of Mozilla Learning Network | Pontoon: Update Kabyle (kab) localization of Mozilla Learning Network
Localization authors:
- belkacem77 <belkacem77@gmail.com>
| INI | mpl-2.0 | mozilla/teach.webmaker.org,mozilla/learning.mozilla.org,mozilla/teach.webmaker.org,mozilla/teach.mozilla.org,mozilla/teach.mozilla.org | ini | ## Code Before:
host_maker_party=Suddes Maker Party
## Instruction:
Pontoon: Update Kabyle (kab) localization of Mozilla Learning Network
Localization authors:
- belkacem77 <belkacem77@gmail.com>
## Code After:
host_maker_party=Suddes Maker Party
host_maker_party_paragraph=Suddes tadyant inek Maker Party s useqdec n ifecka akked urmud ilaqen. Sendeh udiɣ selmed tamezdagnut inek akken ad tesnulfu. Ak-d-mudd tiɣbula ilaqen i usɣiwes, tuddsa akked ummeslay ɣef tedyant inek udiɣ ak-d-mudd afus di yal amecwaṛ.
maker_party_activities=Armud n Marker Party
maker_party_activity_1_title=Aɣerbaz n imimesisen
maker_party_activity_1_body=Rnu imimesisen udiɣ meslay ɣef wayen ad d-yeglu usaḍuf yefnan ɣef izerfan n umeskar.
maker_party_activity_2_title=Attekki deg ayen yelhan i tudert tunnimt
maker_party_activity_2_body=Mudd afus i yemdanen ad issinen azal n ugbur i nezmer ad nseqdec s wudem ilelli s tmerna n ugbur inek daw n turagt Creative Commons.
maker_party_activity_3_title=Rnu aɣawas n uweẓlu
| host_maker_party=Suddes Maker Party
+ host_maker_party_paragraph=Suddes tadyant inek Maker Party s useqdec n ifecka akked urmud ilaqen. Sendeh udiɣ selmed tamezdagnut inek akken ad tesnulfu. Ak-d-mudd tiɣbula ilaqen i usɣiwes, tuddsa akked ummeslay ɣef tedyant inek udiɣ ak-d-mudd afus di yal amecwaṛ.
+ maker_party_activities=Armud n Marker Party
+ maker_party_activity_1_title=Aɣerbaz n imimesisen
+ maker_party_activity_1_body=Rnu imimesisen udiɣ meslay ɣef wayen ad d-yeglu usaḍuf yefnan ɣef izerfan n umeskar.
+ maker_party_activity_2_title=Attekki deg ayen yelhan i tudert tunnimt
+ maker_party_activity_2_body=Mudd afus i yemdanen ad issinen azal n ugbur i nezmer ad nseqdec s wudem ilelli s tmerna n ugbur inek daw n turagt Creative Commons.
+ maker_party_activity_3_title=Rnu aɣawas n uweẓlu | 7 | 7 | 7 | 0 |
552c1bb3dec2fc1ac270be9a219f8fb50ca1510d | src/mmw/js/src/modeling/templates/scenarioTabPanel.html | src/mmw/js/src/modeling/templates/scenarioTabPanel.html | <a href="#{{ cid }}" data-scenario-cid="{{ cid }}" aria-controls="home" role="tab" data-toggle="tab" class="tab-name"><span>{{ name }}</span></a>
<div class="scenario-btn-dropdown">
<div class="dropdown">
<button class="btn btn-sm btn-icon dark dropdown-toggle" type="button" data-toggle="dropdown" aria-expanded="true">
<i class="fa fa-caret-down"></i>
</button>
<ul class="dropdown-menu menu-right" role="menu">
{% if editable %}
<li role="presentation"><a role="menuitem" tabindex="-1" data-action="share">Share</a></li>
{% endif %}
<li role="presentation"><a role="menuitem" tabindex="-1" data-action="print">Print</a></li>
{% if not is_current_conditions %}
{% if editable %}
<li role="presentation"><a role="menuitem" tabindex="-1" data-action="rename">Rename</a></li>
<li role="presentation"><a role="menuitem" tabindex="-1" data-action="duplicate">Duplicate</a></li>
<li role="presentation"><a role="menuitem" tabindex="-1" data-action="delete">Delete</a></li>
{% endif %}
{% endif %}
</ul>
</div>
</div>
| <a href="#{{ cid }}" data-scenario-cid="{{ cid }}" aria-controls="home" role="tab" data-toggle="tab" class="tab-name"><span>{{ name }}</span></a>
{% if active %}
<div class="scenario-btn-dropdown">
<div class="dropdown">
<button class="btn btn-sm btn-icon dark dropdown-toggle" type="button" data-toggle="dropdown" aria-expanded="true">
<i class="fa fa-caret-down"></i>
</button>
<ul class="dropdown-menu menu-right" role="menu">
{% if editable %}
<li role="presentation"><a role="menuitem" tabindex="-1" data-action="share">Share</a></li>
{% endif %}
<li role="presentation"><a role="menuitem" tabindex="-1" data-action="print">Print</a></li>
{% if not is_current_conditions %}
{% if editable %}
<li role="presentation"><a role="menuitem" tabindex="-1" data-action="rename">Rename</a></li>
<li role="presentation"><a role="menuitem" tabindex="-1" data-action="duplicate">Duplicate</a></li>
<li role="presentation"><a role="menuitem" tabindex="-1" data-action="delete">Delete</a></li>
{% endif %}
{% endif %}
</ul>
</div>
</div>
{% endif %}
| Hide dropdown button on inactive scenario tabs | Hide dropdown button on inactive scenario tabs
This is more in line with conventions of Google Sheets, and allows us to
be more confident in the assumption that the current URL (used in the
Share modal) is of the active scenario.
Refs #378
| HTML | apache-2.0 | WikiWatershed/model-my-watershed,lliss/model-my-watershed,kdeloach/model-my-watershed,kdeloach/model-my-watershed,kdeloach/model-my-watershed,lliss/model-my-watershed,mmcfarland/model-my-watershed,project-icp/bee-pollinator-app,lliss/model-my-watershed,WikiWatershed/model-my-watershed,mmcfarland/model-my-watershed,WikiWatershed/model-my-watershed,lliss/model-my-watershed,lliss/model-my-watershed,WikiWatershed/model-my-watershed,mmcfarland/model-my-watershed,project-icp/bee-pollinator-app,kdeloach/model-my-watershed,project-icp/bee-pollinator-app,mmcfarland/model-my-watershed,kdeloach/model-my-watershed,mmcfarland/model-my-watershed,project-icp/bee-pollinator-app,WikiWatershed/model-my-watershed | html | ## Code Before:
<a href="#{{ cid }}" data-scenario-cid="{{ cid }}" aria-controls="home" role="tab" data-toggle="tab" class="tab-name"><span>{{ name }}</span></a>
<div class="scenario-btn-dropdown">
<div class="dropdown">
<button class="btn btn-sm btn-icon dark dropdown-toggle" type="button" data-toggle="dropdown" aria-expanded="true">
<i class="fa fa-caret-down"></i>
</button>
<ul class="dropdown-menu menu-right" role="menu">
{% if editable %}
<li role="presentation"><a role="menuitem" tabindex="-1" data-action="share">Share</a></li>
{% endif %}
<li role="presentation"><a role="menuitem" tabindex="-1" data-action="print">Print</a></li>
{% if not is_current_conditions %}
{% if editable %}
<li role="presentation"><a role="menuitem" tabindex="-1" data-action="rename">Rename</a></li>
<li role="presentation"><a role="menuitem" tabindex="-1" data-action="duplicate">Duplicate</a></li>
<li role="presentation"><a role="menuitem" tabindex="-1" data-action="delete">Delete</a></li>
{% endif %}
{% endif %}
</ul>
</div>
</div>
## Instruction:
Hide dropdown button on inactive scenario tabs
This is more in line with conventions of Google Sheets, and allows us to
be more confident in the assumption that the current URL (used in the
Share modal) is of the active scenario.
Refs #378
## Code After:
<a href="#{{ cid }}" data-scenario-cid="{{ cid }}" aria-controls="home" role="tab" data-toggle="tab" class="tab-name"><span>{{ name }}</span></a>
{% if active %}
<div class="scenario-btn-dropdown">
<div class="dropdown">
<button class="btn btn-sm btn-icon dark dropdown-toggle" type="button" data-toggle="dropdown" aria-expanded="true">
<i class="fa fa-caret-down"></i>
</button>
<ul class="dropdown-menu menu-right" role="menu">
{% if editable %}
<li role="presentation"><a role="menuitem" tabindex="-1" data-action="share">Share</a></li>
{% endif %}
<li role="presentation"><a role="menuitem" tabindex="-1" data-action="print">Print</a></li>
{% if not is_current_conditions %}
{% if editable %}
<li role="presentation"><a role="menuitem" tabindex="-1" data-action="rename">Rename</a></li>
<li role="presentation"><a role="menuitem" tabindex="-1" data-action="duplicate">Duplicate</a></li>
<li role="presentation"><a role="menuitem" tabindex="-1" data-action="delete">Delete</a></li>
{% endif %}
{% endif %}
</ul>
</div>
</div>
{% endif %}
| <a href="#{{ cid }}" data-scenario-cid="{{ cid }}" aria-controls="home" role="tab" data-toggle="tab" class="tab-name"><span>{{ name }}</span></a>
+ {% if active %}
- <div class="scenario-btn-dropdown">
+ <div class="scenario-btn-dropdown">
? ++++
- <div class="dropdown">
+ <div class="dropdown">
? ++++
- <button class="btn btn-sm btn-icon dark dropdown-toggle" type="button" data-toggle="dropdown" aria-expanded="true">
+ <button class="btn btn-sm btn-icon dark dropdown-toggle" type="button" data-toggle="dropdown" aria-expanded="true">
? ++++
- <i class="fa fa-caret-down"></i>
+ <i class="fa fa-caret-down"></i>
? ++++
- </button>
+ </button>
? ++++
- <ul class="dropdown-menu menu-right" role="menu">
+ <ul class="dropdown-menu menu-right" role="menu">
? ++++
- {% if editable %}
- <li role="presentation"><a role="menuitem" tabindex="-1" data-action="share">Share</a></li>
- {% endif %}
- <li role="presentation"><a role="menuitem" tabindex="-1" data-action="print">Print</a></li>
- {% if not is_current_conditions %}
{% if editable %}
- <li role="presentation"><a role="menuitem" tabindex="-1" data-action="rename">Rename</a></li>
? ---- ^^^ ^
+ <li role="presentation"><a role="menuitem" tabindex="-1" data-action="share">Share</a></li>
? ++++ +++ ^^ ^
- <li role="presentation"><a role="menuitem" tabindex="-1" data-action="duplicate">Duplicate</a></li>
- <li role="presentation"><a role="menuitem" tabindex="-1" data-action="delete">Delete</a></li>
{% endif %}
+ <li role="presentation"><a role="menuitem" tabindex="-1" data-action="print">Print</a></li>
+ {% if not is_current_conditions %}
+ {% if editable %}
+ <li role="presentation"><a role="menuitem" tabindex="-1" data-action="rename">Rename</a></li>
+ <li role="presentation"><a role="menuitem" tabindex="-1" data-action="duplicate">Duplicate</a></li>
+ <li role="presentation"><a role="menuitem" tabindex="-1" data-action="delete">Delete</a></li>
+ {% endif %}
- {% endif %}
+ {% endif %}
? ++++
- </ul>
+ </ul>
? ++++
+ </div>
</div>
- </div>
+ {% endif %} | 36 | 1.714286 | 19 | 17 |
f0649ac71ba84a5fbbda2a0076a289cc4fc220d7 | lib/webfontloader/demo/public/fontdeck.html | lib/webfontloader/demo/public/fontdeck.html | <!doctype html>
<html>
<head>
<link href="/basic.css" rel="stylesheet" type="text/css">
<script type="text/javascript" src="/webfont.js"></script>
<script type="text/javascript">
WebFont.load({
fontdeck: {
id: '161',
fonts: "Fertigo Pro Regular"
}
});
</script>
<style type="text/css">
/* Use classes to prove that Typekit triggers the event system correctly */
h1 {
font-family: 'Fertigo Pro Regular';
visibility: hidden;
}
.wf-fertigoproregular-n4-active h1 {
visibility: visible;
}
</style>
</head>
<body>
<h1>
Hello World. I am Arial Black.
</h1>
<hr>
<p>
<a href="#" onclick="document.getElementsByTagName('body')[0].style.color = '#fff';return false;">Hide Page</a> |
<a href="/typekit.html">Reload Cached</a>
</p>
<p>
The goal of this page is to show how Fontdeck fonts load.
</p>
</body>
</html>
| <!doctype html>
<html>
<head>
<link href="/basic.css" rel="stylesheet" type="text/css">
<script type="text/javascript" src="/webfont.js"></script>
<script type="text/javascript">
WebFont.load({
fontdeck: {
id: '161',
api: 'http://dev.int.fontdeck.com/api/v1/project-info?'
}
});
</script>
<style type="text/css">
/* Use classes to prove that Typekit triggers the event system correctly */
h1 {
font-family: 'Fertigo Pro Regular';
visibility: hidden;
}
.wf-fertigoproregular-n4-active h1 {
visibility: visible;
}
</style>
</head>
<body>
<h1>
Hello World. I am Arial Black.
</h1>
<hr>
<p>
<a href="#" onclick="document.getElementsByTagName('body')[0].style.color = '#fff';return false;">Hide Page</a> |
<a href="/fontdeck.html">Reload Cached</a>
</p>
<p>
The goal of this page is to show how Fontdeck fonts load.
</p>
</body>
</html>
| Use dev API at the moment, until in production. | Use dev API at the moment, until in production.
| HTML | apache-2.0 | sapics/webfontloader,Monotype/webfontloader,zeixcom/webfontloader,zeixcom/webfontloader,kevinrodbe/webfontloader,sapics/webfontloader,joelrich/webfontloader,digideskio/webfontloader,exsodus3249/webfontloader,omo/webfontloader,ahmadruhaifi/webfontloader,omo/webfontloader,sapics/webfontloader,digideskio/webfontloader,JBusch/webfontloader,typekit/webfontloader,digideskio/webfontloader,exsodus3249/webfontloader,mettjus/webfontloader,typekit/webfontloader,JBusch/webfontloader,mettjus/webfontloader,Monotype/webfontloader,armandocanals/webfontloader,ramghaju/webfontloader,exsodus3249/webfontloader,zeixcom/webfontloader,ramghaju/webfontloader,Monotype/webfontloader,ramghaju/webfontloader,kevinrodbe/webfontloader,joelrich/webfontloader,kevinrodbe/webfontloader,mettjus/webfontloader,omo/webfontloader,ahmadruhaifi/webfontloader,typekit/webfontloader,JBusch/webfontloader,armandocanals/webfontloader,joelrich/webfontloader,ahmadruhaifi/webfontloader,armandocanals/webfontloader | html | ## Code Before:
<!doctype html>
<html>
<head>
<link href="/basic.css" rel="stylesheet" type="text/css">
<script type="text/javascript" src="/webfont.js"></script>
<script type="text/javascript">
WebFont.load({
fontdeck: {
id: '161',
fonts: "Fertigo Pro Regular"
}
});
</script>
<style type="text/css">
/* Use classes to prove that Typekit triggers the event system correctly */
h1 {
font-family: 'Fertigo Pro Regular';
visibility: hidden;
}
.wf-fertigoproregular-n4-active h1 {
visibility: visible;
}
</style>
</head>
<body>
<h1>
Hello World. I am Arial Black.
</h1>
<hr>
<p>
<a href="#" onclick="document.getElementsByTagName('body')[0].style.color = '#fff';return false;">Hide Page</a> |
<a href="/typekit.html">Reload Cached</a>
</p>
<p>
The goal of this page is to show how Fontdeck fonts load.
</p>
</body>
</html>
## Instruction:
Use dev API at the moment, until in production.
## Code After:
<!doctype html>
<html>
<head>
<link href="/basic.css" rel="stylesheet" type="text/css">
<script type="text/javascript" src="/webfont.js"></script>
<script type="text/javascript">
WebFont.load({
fontdeck: {
id: '161',
api: 'http://dev.int.fontdeck.com/api/v1/project-info?'
}
});
</script>
<style type="text/css">
/* Use classes to prove that Typekit triggers the event system correctly */
h1 {
font-family: 'Fertigo Pro Regular';
visibility: hidden;
}
.wf-fertigoproregular-n4-active h1 {
visibility: visible;
}
</style>
</head>
<body>
<h1>
Hello World. I am Arial Black.
</h1>
<hr>
<p>
<a href="#" onclick="document.getElementsByTagName('body')[0].style.color = '#fff';return false;">Hide Page</a> |
<a href="/fontdeck.html">Reload Cached</a>
</p>
<p>
The goal of this page is to show how Fontdeck fonts load.
</p>
</body>
</html>
| <!doctype html>
<html>
<head>
<link href="/basic.css" rel="stylesheet" type="text/css">
<script type="text/javascript" src="/webfont.js"></script>
<script type="text/javascript">
WebFont.load({
fontdeck: {
id: '161',
- fonts: "Fertigo Pro Regular"
+ api: 'http://dev.int.fontdeck.com/api/v1/project-info?'
}
});
</script>
<style type="text/css">
/* Use classes to prove that Typekit triggers the event system correctly */
h1 {
font-family: 'Fertigo Pro Regular';
visibility: hidden;
}
.wf-fertigoproregular-n4-active h1 {
visibility: visible;
}
</style>
</head>
<body>
<h1>
Hello World. I am Arial Black.
</h1>
<hr>
<p>
<a href="#" onclick="document.getElementsByTagName('body')[0].style.color = '#fff';return false;">Hide Page</a> |
- <a href="/typekit.html">Reload Cached</a>
? ^^ --
+ <a href="/fontdeck.html">Reload Cached</a>
? +++ ^ +
</p>
<p>
The goal of this page is to show how Fontdeck fonts load.
</p>
</body>
</html> | 4 | 0.105263 | 2 | 2 |
36d6fbdd774d314c6e37dc0c8f9233e42ac8a59f | resources/create-module/templates/module_template.html | resources/create-module/templates/module_template.html | <!DOCTYPE html>
<html>
<head>
<script src="../../build/yui/yui.js" type="text/javascript"></script>
<script src="../../build/aui-base/aui-base.js" type="text/javascript"></script>
<link rel="stylesheet" href="../../build/aui-skins/core/css/main.css" type="text/css" media="screen" title="no title" charset="utf-8" />
<style type="text/css" media="screen">
body {
font-size: 12px;
}
#wrapper {
padding: 10px;
}
</style>
</head>
<body>
<div id="wrapper">
<h1>Alloy - @module.base.name.camelcase@ Demo</h1>
<div id="demo"></div>
</div>
<script type="text/javascript" charset="utf-8">
AUI().ready('@module.name@', function(A) {
});
</script>
</body>
</html> | <!DOCTYPE html>
<html>
<head>
<script src="../../build/yui/yui.js" type="text/javascript"></script>
<script src="../../build/aui-base/aui-base.js" type="text/javascript"></script>
<link rel="stylesheet" href="../../build/aui-skins/core/css/main.css" type="text/css" media="screen" title="no title" charset="utf-8" />
<style type="text/css" media="screen">
body {
font-size: 12px;
}
#wrapper {
padding: 10px;
}
</style>
</head>
<body>
<div id="wrapper">
<h1>Alloy - @module.base.name.camelcase@ Demo</h1>
<div id="demo"></div>
</div>
<script type="text/javascript" charset="utf-8">
AUI().ready('@module.name@', function(A) {
var component = new A.@module.base.name.camelcase@(
{
}
)
.render();
});
</script>
</body>
</html> | Add class instantiation on the module template html | Add class instantiation on the module template html
git-svn-id: eaac6977d0b6fa3343c402860739874625caf655@46499 05bdf26c-840f-0410-9ced-eb539d925f36
| HTML | bsd-3-clause | giros/alloy-ui,giros/alloy-ui,giros/alloy-ui,giros/alloy-ui | html | ## Code Before:
<!DOCTYPE html>
<html>
<head>
<script src="../../build/yui/yui.js" type="text/javascript"></script>
<script src="../../build/aui-base/aui-base.js" type="text/javascript"></script>
<link rel="stylesheet" href="../../build/aui-skins/core/css/main.css" type="text/css" media="screen" title="no title" charset="utf-8" />
<style type="text/css" media="screen">
body {
font-size: 12px;
}
#wrapper {
padding: 10px;
}
</style>
</head>
<body>
<div id="wrapper">
<h1>Alloy - @module.base.name.camelcase@ Demo</h1>
<div id="demo"></div>
</div>
<script type="text/javascript" charset="utf-8">
AUI().ready('@module.name@', function(A) {
});
</script>
</body>
</html>
## Instruction:
Add class instantiation on the module template html
git-svn-id: eaac6977d0b6fa3343c402860739874625caf655@46499 05bdf26c-840f-0410-9ced-eb539d925f36
## Code After:
<!DOCTYPE html>
<html>
<head>
<script src="../../build/yui/yui.js" type="text/javascript"></script>
<script src="../../build/aui-base/aui-base.js" type="text/javascript"></script>
<link rel="stylesheet" href="../../build/aui-skins/core/css/main.css" type="text/css" media="screen" title="no title" charset="utf-8" />
<style type="text/css" media="screen">
body {
font-size: 12px;
}
#wrapper {
padding: 10px;
}
</style>
</head>
<body>
<div id="wrapper">
<h1>Alloy - @module.base.name.camelcase@ Demo</h1>
<div id="demo"></div>
</div>
<script type="text/javascript" charset="utf-8">
AUI().ready('@module.name@', function(A) {
var component = new A.@module.base.name.camelcase@(
{
}
)
.render();
});
</script>
</body>
</html> | <!DOCTYPE html>
<html>
<head>
<script src="../../build/yui/yui.js" type="text/javascript"></script>
<script src="../../build/aui-base/aui-base.js" type="text/javascript"></script>
<link rel="stylesheet" href="../../build/aui-skins/core/css/main.css" type="text/css" media="screen" title="no title" charset="utf-8" />
<style type="text/css" media="screen">
body {
font-size: 12px;
}
#wrapper {
padding: 10px;
}
</style>
</head>
<body>
<div id="wrapper">
<h1>Alloy - @module.base.name.camelcase@ Demo</h1>
<div id="demo"></div>
</div>
<script type="text/javascript" charset="utf-8">
AUI().ready('@module.name@', function(A) {
+ var component = new A.@module.base.name.camelcase@(
+ {
+
+ }
+ )
+ .render();
+
});
</script>
</body>
</html> | 7 | 0.179487 | 7 | 0 |
352ec1ebbeb0f9e96e72ce7ae50c67b2e935aab6 | app/assets/javascripts/lib/models/gquery.coffee | app/assets/javascripts/lib/models/gquery.coffee | class @Gquery extends Backbone.Model
initialize : ->
window.gqueries.add(this)
@references = 1
future_value: -> @get('future')
present_value: -> @get 'present'
safe_present_value: =>
x = @present_value()
# if @get('key') == 'chp_heat_and_electricity_from_wet_biomass'
# return (if @is_acceptable_value(x) then -x else 0)
if @is_acceptable_value(x) then x else 0
safe_future_value: =>
x = @future_value()
if @is_acceptable_value(x) then x else 0
handle_api_result : (result) ->
@set
present: result.present
future: result.future
unit : result.unit
is_acceptable_value : (n) ->
return true if _.isBoolean(n)
x = parseInt(n, 10)
_.isNumber(x) && !_.isNaN(x)
# cocoa retain-release clone
retain: => @references += 1
release: => @references -= 1
class GqueryList extends Backbone.Collection
model : Gquery
with_key: (key) => @find (g) -> g.get('key') == key
keys: => _.compact(@pluck('key'))
find_or_create_by_key: (key, owner) =>
if g = @with_key key
g.retain()
return g
else
return new Gquery
key: key
cleanup: =>
@each (g) =>
@remove g if g.references == 0
window.gqueries = new GqueryList
| class @Gquery extends Backbone.Model
initialize : ->
window.gqueries.add(this)
@references = 1
future_value: -> @get('future')
present_value: -> @get 'present'
safe_present_value: =>
x = @present_value()
if @is_acceptable_value(x) then x else 0
safe_future_value: =>
x = @future_value()
if @is_acceptable_value(x) then x else 0
handle_api_result : (result) ->
@set
present: result.present
future: result.future
unit : result.unit
is_acceptable_value : (n) ->
return true if _.isBoolean(n)
x = parseInt(n, 10)
_.isNumber(x) && !_.isNaN(x)
# cocoa retain-release clone
retain: => @references += 1
release: => @references -= 1
class GqueryList extends Backbone.Collection
model : Gquery
with_key: (key) => @find (g) -> g.get('key') == key
keys: => _.compact(@pluck('key'))
find_or_create_by_key: (key, owner) =>
if g = @with_key key
g.retain()
return g
else
return new Gquery
key: key
cleanup: =>
@each (g) =>
@remove g if g.references == 0
window.gqueries = new GqueryList
| Remove commented code from Gquery | Remove commented code from Gquery
| CoffeeScript | mit | quintel/etmodel,quintel/etmodel,quintel/etmodel,quintel/etmodel | coffeescript | ## Code Before:
class @Gquery extends Backbone.Model
initialize : ->
window.gqueries.add(this)
@references = 1
future_value: -> @get('future')
present_value: -> @get 'present'
safe_present_value: =>
x = @present_value()
# if @get('key') == 'chp_heat_and_electricity_from_wet_biomass'
# return (if @is_acceptable_value(x) then -x else 0)
if @is_acceptable_value(x) then x else 0
safe_future_value: =>
x = @future_value()
if @is_acceptable_value(x) then x else 0
handle_api_result : (result) ->
@set
present: result.present
future: result.future
unit : result.unit
is_acceptable_value : (n) ->
return true if _.isBoolean(n)
x = parseInt(n, 10)
_.isNumber(x) && !_.isNaN(x)
# cocoa retain-release clone
retain: => @references += 1
release: => @references -= 1
class GqueryList extends Backbone.Collection
model : Gquery
with_key: (key) => @find (g) -> g.get('key') == key
keys: => _.compact(@pluck('key'))
find_or_create_by_key: (key, owner) =>
if g = @with_key key
g.retain()
return g
else
return new Gquery
key: key
cleanup: =>
@each (g) =>
@remove g if g.references == 0
window.gqueries = new GqueryList
## Instruction:
Remove commented code from Gquery
## Code After:
class @Gquery extends Backbone.Model
initialize : ->
window.gqueries.add(this)
@references = 1
future_value: -> @get('future')
present_value: -> @get 'present'
safe_present_value: =>
x = @present_value()
if @is_acceptable_value(x) then x else 0
safe_future_value: =>
x = @future_value()
if @is_acceptable_value(x) then x else 0
handle_api_result : (result) ->
@set
present: result.present
future: result.future
unit : result.unit
is_acceptable_value : (n) ->
return true if _.isBoolean(n)
x = parseInt(n, 10)
_.isNumber(x) && !_.isNaN(x)
# cocoa retain-release clone
retain: => @references += 1
release: => @references -= 1
class GqueryList extends Backbone.Collection
model : Gquery
with_key: (key) => @find (g) -> g.get('key') == key
keys: => _.compact(@pluck('key'))
find_or_create_by_key: (key, owner) =>
if g = @with_key key
g.retain()
return g
else
return new Gquery
key: key
cleanup: =>
@each (g) =>
@remove g if g.references == 0
window.gqueries = new GqueryList
| class @Gquery extends Backbone.Model
initialize : ->
window.gqueries.add(this)
@references = 1
future_value: -> @get('future')
present_value: -> @get 'present'
safe_present_value: =>
x = @present_value()
- # if @get('key') == 'chp_heat_and_electricity_from_wet_biomass'
- # return (if @is_acceptable_value(x) then -x else 0)
if @is_acceptable_value(x) then x else 0
safe_future_value: =>
x = @future_value()
if @is_acceptable_value(x) then x else 0
handle_api_result : (result) ->
@set
present: result.present
future: result.future
unit : result.unit
is_acceptable_value : (n) ->
return true if _.isBoolean(n)
x = parseInt(n, 10)
_.isNumber(x) && !_.isNaN(x)
# cocoa retain-release clone
retain: => @references += 1
release: => @references -= 1
class GqueryList extends Backbone.Collection
model : Gquery
with_key: (key) => @find (g) -> g.get('key') == key
keys: => _.compact(@pluck('key'))
find_or_create_by_key: (key, owner) =>
if g = @with_key key
g.retain()
return g
else
return new Gquery
key: key
cleanup: =>
@each (g) =>
@remove g if g.references == 0
window.gqueries = new GqueryList | 2 | 0.037736 | 0 | 2 |
f1a7775cec1e4614de38262e63e8cbe337cf05b2 | .travis/before_install.sh | .travis/before_install.sh |
set -ev
composer global require friendsofphp/php-cs-fixer
export PATH="${PATH}:${HOME}/.composer/vendor/bin" |
set -ev
composer global require friendsofphp/php-cs-fixer
export PATH="$PATH:$HOME/.config/composer/vendor/bin:$HOME/.composer/vendor/bin" | Add composer bin to PATH | Add composer bin to PATH
| Shell | mit | leroy0211/Slim-Symfony-Dependency-Injection-Bridge,leroy0211/Slim-Symfony-Dependency-Injection-Bridge | shell | ## Code Before:
set -ev
composer global require friendsofphp/php-cs-fixer
export PATH="${PATH}:${HOME}/.composer/vendor/bin"
## Instruction:
Add composer bin to PATH
## Code After:
set -ev
composer global require friendsofphp/php-cs-fixer
export PATH="$PATH:$HOME/.config/composer/vendor/bin:$HOME/.composer/vendor/bin" |
set -ev
composer global require friendsofphp/php-cs-fixer
- export PATH="${PATH}:${HOME}/.composer/vendor/bin"
+ export PATH="$PATH:$HOME/.config/composer/vendor/bin:$HOME/.composer/vendor/bin" | 2 | 0.333333 | 1 | 1 |
b94c79fce89abe8eb182c2fb92c7df0007cb196c | circle.yml | circle.yml | test:
override:
- tox
dependencies:
override:
- pip install tox tox-pyenv
- export PY_V='from platform import python_version as v; print(v())'
- pyenv local $(python2.7 -c "$PY_V") $(python3.5 -c "$PY_V")
| test:
override:
- tox
dependencies:
override:
- pip install tox tox-pyenv
- pyenv local 2.7.12 3.5.2
| Revert "Automate pyenv version detection" | Revert "Automate pyenv version detection"
This reverts commit 210beb632385e2cd65b74eb054b2f452d578556b.
| YAML | apache-2.0 | kislyuk/aegea,wholebiome/aegea,wholebiome/aegea,kislyuk/aegea,kislyuk/aegea,wholebiome/aegea | yaml | ## Code Before:
test:
override:
- tox
dependencies:
override:
- pip install tox tox-pyenv
- export PY_V='from platform import python_version as v; print(v())'
- pyenv local $(python2.7 -c "$PY_V") $(python3.5 -c "$PY_V")
## Instruction:
Revert "Automate pyenv version detection"
This reverts commit 210beb632385e2cd65b74eb054b2f452d578556b.
## Code After:
test:
override:
- tox
dependencies:
override:
- pip install tox tox-pyenv
- pyenv local 2.7.12 3.5.2
| test:
override:
- tox
dependencies:
override:
- pip install tox tox-pyenv
+ - pyenv local 2.7.12 3.5.2
- - export PY_V='from platform import python_version as v; print(v())'
- - pyenv local $(python2.7 -c "$PY_V") $(python3.5 -c "$PY_V") | 3 | 0.333333 | 1 | 2 |
6d36617c74af0fbd3b8a0f69ebf9d78c9c004029 | app/models/import/central_heat_network_builder.rb | app/models/import/central_heat_network_builder.rb | class Import
class CentralHeatNetworkBuilder
TECHNOLOGIES = %w(central_heat_network_dispatchable
central_heat_network_must_run)
ATTRIBUTES = Hash[
[ CentralHeatNetworkDispatchableCapacityAttribute,
CentralHeatNetworkMustRunHeatProductionAttribute].map do |attr|
[attr.remote_name, attr]
end
]
def initialize(scenario_id)
@scenario_id = scenario_id
end
def self.build(scenario_id)
self.new(scenario_id).build_technologies
end
def build_technologies
technologies.map(&method(:build_technology))
end
private
def build_technology(technology)
defaults = technology.defaults.merge('key' => technology.key)
attributes = technology.importable_gqueries
.each_with_object(defaults) do |(attr, query), hash|
hash[attr] = ATTRIBUTES[query].call(gqueries)
end
# Must-runs need a profile; since there is normally only one instance of
# each must-run, assign the first suitable profile.
if (profile = TechnologyProfile.where(technology: technology.key).first)
attributes['units'] = 1.0
attributes['profile'] = profile.load_profile_id
end
attributes
end
def technologies
@technologies ||= Technology.all.select do |technology|
TECHNOLOGIES.include?(technology.key)
end
end
def gqueries
@gqueries ||= GqueryRequester.new(TECHNOLOGIES).request(id: @scenario_id)
end
end
end
| class Import
class CentralHeatNetworkBuilder
TECHNOLOGIES = %w(
central_heat_network_dispatchable
central_heat_network_must_run
).freeze
ATTRIBUTES = Hash[
[ CentralHeatNetworkDispatchableCapacityAttribute,
CentralHeatNetworkMustRunHeatProductionAttribute].map do |attr|
[attr.remote_name, attr]
end
]
def initialize(scenario_id)
@scenario_id = scenario_id
end
def self.build(scenario_id)
new(scenario_id).build_technologies
end
def build_technologies
technologies.map(&method(:build_technology))
end
private
def build_technology(technology)
defaults = technology.defaults.merge('key' => technology.key)
attributes = technology.importable_gqueries
.each_with_object(defaults) do |(attr, query), hash|
hash[attr] = ATTRIBUTES[query].call(gqueries)
end
# Must-runs need a profile; since there is normally only one instance of
# each must-run, assign the first suitable profile.
if (profile = TechnologyProfile.where(technology: technology.key).first)
attributes['units'] = 1.0
attributes['profile'] = profile.load_profile_id
end
attributes
end
def technologies
@technologies ||= Technology.all.select do |technology|
TECHNOLOGIES.include?(technology.key)
end
end
def gqueries
@gqueries ||= GqueryRequester.new(TECHNOLOGIES).request(id: @scenario_id)
end
end
end
| Adjust central heat builder per RuboCop | Adjust central heat builder per RuboCop
| Ruby | mit | quintel/etmoses,quintel/etmoses,quintel/etmoses,quintel/etmoses,quintel/etmoses | ruby | ## Code Before:
class Import
class CentralHeatNetworkBuilder
TECHNOLOGIES = %w(central_heat_network_dispatchable
central_heat_network_must_run)
ATTRIBUTES = Hash[
[ CentralHeatNetworkDispatchableCapacityAttribute,
CentralHeatNetworkMustRunHeatProductionAttribute].map do |attr|
[attr.remote_name, attr]
end
]
def initialize(scenario_id)
@scenario_id = scenario_id
end
def self.build(scenario_id)
self.new(scenario_id).build_technologies
end
def build_technologies
technologies.map(&method(:build_technology))
end
private
def build_technology(technology)
defaults = technology.defaults.merge('key' => technology.key)
attributes = technology.importable_gqueries
.each_with_object(defaults) do |(attr, query), hash|
hash[attr] = ATTRIBUTES[query].call(gqueries)
end
# Must-runs need a profile; since there is normally only one instance of
# each must-run, assign the first suitable profile.
if (profile = TechnologyProfile.where(technology: technology.key).first)
attributes['units'] = 1.0
attributes['profile'] = profile.load_profile_id
end
attributes
end
def technologies
@technologies ||= Technology.all.select do |technology|
TECHNOLOGIES.include?(technology.key)
end
end
def gqueries
@gqueries ||= GqueryRequester.new(TECHNOLOGIES).request(id: @scenario_id)
end
end
end
## Instruction:
Adjust central heat builder per RuboCop
## Code After:
class Import
class CentralHeatNetworkBuilder
TECHNOLOGIES = %w(
central_heat_network_dispatchable
central_heat_network_must_run
).freeze
ATTRIBUTES = Hash[
[ CentralHeatNetworkDispatchableCapacityAttribute,
CentralHeatNetworkMustRunHeatProductionAttribute].map do |attr|
[attr.remote_name, attr]
end
]
def initialize(scenario_id)
@scenario_id = scenario_id
end
def self.build(scenario_id)
new(scenario_id).build_technologies
end
def build_technologies
technologies.map(&method(:build_technology))
end
private
def build_technology(technology)
defaults = technology.defaults.merge('key' => technology.key)
attributes = technology.importable_gqueries
.each_with_object(defaults) do |(attr, query), hash|
hash[attr] = ATTRIBUTES[query].call(gqueries)
end
# Must-runs need a profile; since there is normally only one instance of
# each must-run, assign the first suitable profile.
if (profile = TechnologyProfile.where(technology: technology.key).first)
attributes['units'] = 1.0
attributes['profile'] = profile.load_profile_id
end
attributes
end
def technologies
@technologies ||= Technology.all.select do |technology|
TECHNOLOGIES.include?(technology.key)
end
end
def gqueries
@gqueries ||= GqueryRequester.new(TECHNOLOGIES).request(id: @scenario_id)
end
end
end
| class Import
class CentralHeatNetworkBuilder
+ TECHNOLOGIES = %w(
- TECHNOLOGIES = %w(central_heat_network_dispatchable
? ------------ - ---
+ central_heat_network_dispatchable
- central_heat_network_must_run)
? ---------------- -
+ central_heat_network_must_run
+ ).freeze
ATTRIBUTES = Hash[
[ CentralHeatNetworkDispatchableCapacityAttribute,
CentralHeatNetworkMustRunHeatProductionAttribute].map do |attr|
- [attr.remote_name, attr]
? --
+ [attr.remote_name, attr]
- end
? --
+ end
]
def initialize(scenario_id)
@scenario_id = scenario_id
end
def self.build(scenario_id)
- self.new(scenario_id).build_technologies
? -----
+ new(scenario_id).build_technologies
end
def build_technologies
technologies.map(&method(:build_technology))
end
private
def build_technology(technology)
defaults = technology.defaults.merge('key' => technology.key)
attributes = technology.importable_gqueries
.each_with_object(defaults) do |(attr, query), hash|
hash[attr] = ATTRIBUTES[query].call(gqueries)
end
# Must-runs need a profile; since there is normally only one instance of
# each must-run, assign the first suitable profile.
if (profile = TechnologyProfile.where(technology: technology.key).first)
attributes['units'] = 1.0
attributes['profile'] = profile.load_profile_id
end
attributes
end
def technologies
@technologies ||= Technology.all.select do |technology|
TECHNOLOGIES.include?(technology.key)
end
end
def gqueries
@gqueries ||= GqueryRequester.new(TECHNOLOGIES).request(id: @scenario_id)
end
end
end | 12 | 0.218182 | 7 | 5 |
9cdaaa37c68472b496f804cf5d9b72bc4e57981d | bot/bot.go | bot/bot.go | package bot
import (
"regexp"
)
type Bot struct {
adapter Adapter
Handlers map[*regexp.Regexp]func(msg *Message)
}
func New(adapter Adapter) *Bot {
return &Bot{
adapter,
map[*regexp.Regexp]func(msg *Message){},
}
}
func (b *Bot) Handle(expr string, handler func(msg *Message)) {
b.Handlers[regexp.MustCompile(expr)] = handler
}
func (b *Bot) Listen() {
for {
msg := b.adapter.Listen()
for expr, handler := range b.Handlers {
if expr.MatchString(msg.Text) {
handler(msg)
break
}
}
}
}
func (b *Bot) Reply(msg *Message, text string) {
b.adapter.Reply(msg, text)
}
| package bot
import (
"regexp"
)
type Bot struct {
adapter Adapter
Handlers map[*regexp.Regexp]func(msg *Message)
}
func New(adapter Adapter) *Bot {
return &Bot{
adapter,
map[*regexp.Regexp]func(msg *Message){},
}
}
func (b *Bot) Handle(expr string, handler func(msg *Message)) {
b.Handlers[regexp.MustCompile(`(?i)`+expr)] = handler
}
func (b *Bot) Listen() {
for {
msg := b.adapter.Listen()
for re, handler := range b.Handlers {
if re.MatchString(msg.Text) {
handler(msg)
break
}
}
}
}
func (b *Bot) Reply(msg *Message, text string) {
b.adapter.Reply(msg, text)
}
| Make handlers expr case insensitive | Make handlers expr case insensitive
| Go | mit | esoui/lexicon,esoui/lexicon | go | ## Code Before:
package bot
import (
"regexp"
)
type Bot struct {
adapter Adapter
Handlers map[*regexp.Regexp]func(msg *Message)
}
func New(adapter Adapter) *Bot {
return &Bot{
adapter,
map[*regexp.Regexp]func(msg *Message){},
}
}
func (b *Bot) Handle(expr string, handler func(msg *Message)) {
b.Handlers[regexp.MustCompile(expr)] = handler
}
func (b *Bot) Listen() {
for {
msg := b.adapter.Listen()
for expr, handler := range b.Handlers {
if expr.MatchString(msg.Text) {
handler(msg)
break
}
}
}
}
func (b *Bot) Reply(msg *Message, text string) {
b.adapter.Reply(msg, text)
}
## Instruction:
Make handlers expr case insensitive
## Code After:
package bot
import (
"regexp"
)
type Bot struct {
adapter Adapter
Handlers map[*regexp.Regexp]func(msg *Message)
}
func New(adapter Adapter) *Bot {
return &Bot{
adapter,
map[*regexp.Regexp]func(msg *Message){},
}
}
func (b *Bot) Handle(expr string, handler func(msg *Message)) {
b.Handlers[regexp.MustCompile(`(?i)`+expr)] = handler
}
func (b *Bot) Listen() {
for {
msg := b.adapter.Listen()
for re, handler := range b.Handlers {
if re.MatchString(msg.Text) {
handler(msg)
break
}
}
}
}
func (b *Bot) Reply(msg *Message, text string) {
b.adapter.Reply(msg, text)
}
| package bot
import (
"regexp"
)
type Bot struct {
adapter Adapter
Handlers map[*regexp.Regexp]func(msg *Message)
}
func New(adapter Adapter) *Bot {
return &Bot{
adapter,
map[*regexp.Regexp]func(msg *Message){},
}
}
func (b *Bot) Handle(expr string, handler func(msg *Message)) {
- b.Handlers[regexp.MustCompile(expr)] = handler
+ b.Handlers[regexp.MustCompile(`(?i)`+expr)] = handler
? +++++++
}
func (b *Bot) Listen() {
for {
msg := b.adapter.Listen()
- for expr, handler := range b.Handlers {
? ---
+ for re, handler := range b.Handlers {
? +
- if expr.MatchString(msg.Text) {
? ---
+ if re.MatchString(msg.Text) {
? +
handler(msg)
break
}
}
}
}
func (b *Bot) Reply(msg *Message, text string) {
b.adapter.Reply(msg, text)
} | 6 | 0.162162 | 3 | 3 |
fc3e3937f52da79d8e394ff24ee89ee881f159ba | app/assets/stylesheets/spree/frontend/spree_static_content.css | app/assets/stylesheets/spree/frontend/spree_static_content.css | /*
*= require spree/frontend
*/
/* Sidebar - .list-group-item styles */
.pages-list.list-group > li:last-child {
margin-bottom: 0;
border-bottom-right-radius: 4px;
border-bottom-left-radius: 4px;
}
.pages-list.list-group > li {
position: relative;
display: block;
padding: 10px 15px;
margin-bottom: -1px;
background-color: #FFF;
border: 1px solid #DDD;
}
| /* Sidebar - .list-group-item styles */
.pages-list.list-group > li:last-child {
margin-bottom: 0;
border-bottom-right-radius: 4px;
border-bottom-left-radius: 4px;
}
.pages-list.list-group > li {
position: relative;
display: block;
padding: 10px 15px;
margin-bottom: -1px;
background-color: #FFF;
border: 1px solid #DDD;
}
| Remove the "require spree/frontend" directive from the main css. This will allow Spree applications that use different front end stylesheet to install the extension | Remove the "require spree/frontend" directive from the main css.
This will allow Spree applications that use different front end stylesheet to install the extension
| CSS | bsd-3-clause | adavanisanti/spree_static_content,srna/spree_static_content,dgross881/spree_static_content,adavanisanti/spree_static_content,Whelton/spree_static_content,macmm/spree_static_content,macmm/spree_static_content,APohio/spree_static_content,firmanm/spree_static_content,moneyspyder/spree_static_content,srna/spree_static_content,odk211/spree_static_content,dgross881/spree_static_content,DynamoMTL/spree_static_content,moneyspyder/spree_static_content,pero-ict-solutions/spree-static-content,dgross881/spree_static_content,macmm/spree_static_content,moneyspyder/spree_static_content,Partywirks/spree_static_content,APohio/spree_static_content,extendi/spree_static_content,extendi/spree_static_content,DynamoMTL/spree_static_content,APohio/spree_static_content,tbhatia/spree-static-pages,extendi/spree_static_content,karlitxo/spree_static_content,Whelton/spree_static_content,odk211/spree_static_content,tbhatia/spree-static-pages,adavanisanti/spree_static_content,odk211/spree_static_content,firmanm/spree_static_content,pero-ict-solutions/spree-static-content,karlitxo/spree_static_content,Partywirks/spree_static_content,karlitxo/spree_static_content,tbhatia/spree-static-pages,spree-contrib/spree_static_content,firmanm/spree_static_content,srna/spree_static_content,DynamoMTL/spree_static_content,spree-contrib/spree_static_content,Whelton/spree_static_content,Partywirks/spree_static_content,spree-contrib/spree_static_content | css | ## Code Before:
/*
*= require spree/frontend
*/
/* Sidebar - .list-group-item styles */
.pages-list.list-group > li:last-child {
margin-bottom: 0;
border-bottom-right-radius: 4px;
border-bottom-left-radius: 4px;
}
.pages-list.list-group > li {
position: relative;
display: block;
padding: 10px 15px;
margin-bottom: -1px;
background-color: #FFF;
border: 1px solid #DDD;
}
## Instruction:
Remove the "require spree/frontend" directive from the main css.
This will allow Spree applications that use different front end stylesheet to install the extension
## Code After:
/* Sidebar - .list-group-item styles */
.pages-list.list-group > li:last-child {
margin-bottom: 0;
border-bottom-right-radius: 4px;
border-bottom-left-radius: 4px;
}
.pages-list.list-group > li {
position: relative;
display: block;
padding: 10px 15px;
margin-bottom: -1px;
background-color: #FFF;
border: 1px solid #DDD;
}
| - /*
- *= require spree/frontend
- */
-
/* Sidebar - .list-group-item styles */
.pages-list.list-group > li:last-child {
margin-bottom: 0;
border-bottom-right-radius: 4px;
border-bottom-left-radius: 4px;
}
.pages-list.list-group > li {
position: relative;
display: block;
padding: 10px 15px;
margin-bottom: -1px;
background-color: #FFF;
border: 1px solid #DDD;
} | 4 | 0.210526 | 0 | 4 |
7c8bbe32fab41051e8b0eb5a3ca15053bbc800dc | test/Tests.hx | test/Tests.hx | package ;
import buddy.Buddy;
class Tests implements Buddy<[BaseTest, InheritanceTest, InlineTest, MetaTest, SignalTest, TestProperty, ChainBindTest, ExprBindTest]> {} | package ;
import buddy.BuddySuite;
import buddy.SuitesRunner;
@:build(buddy.GenerateMain.withSuites([
new BaseTest(),
new InheritanceTest(),
new InlineTest(),
new MetaTest(),
new SignalTest(),
new TestProperty(),
new ChainBindTest(),
new ExprBindTest(),
]))
class Tests extends BuddySuite {} | Revert "buddy tests file fix" | Revert "buddy tests file fix"
This reverts commit e972488062f7e18a8302bfce08e8155851aaf4ee.
| Haxe | mit | profelis/bindx2 | haxe | ## Code Before:
package ;
import buddy.Buddy;
class Tests implements Buddy<[BaseTest, InheritanceTest, InlineTest, MetaTest, SignalTest, TestProperty, ChainBindTest, ExprBindTest]> {}
## Instruction:
Revert "buddy tests file fix"
This reverts commit e972488062f7e18a8302bfce08e8155851aaf4ee.
## Code After:
package ;
import buddy.BuddySuite;
import buddy.SuitesRunner;
@:build(buddy.GenerateMain.withSuites([
new BaseTest(),
new InheritanceTest(),
new InlineTest(),
new MetaTest(),
new SignalTest(),
new TestProperty(),
new ChainBindTest(),
new ExprBindTest(),
]))
class Tests extends BuddySuite {} | package ;
- import buddy.Buddy;
+ import buddy.BuddySuite;
? +++++
+ import buddy.SuitesRunner;
- class Tests implements Buddy<[BaseTest, InheritanceTest, InlineTest, MetaTest, SignalTest, TestProperty, ChainBindTest, ExprBindTest]> {}
+ @:build(buddy.GenerateMain.withSuites([
+ new BaseTest(),
+ new InheritanceTest(),
+ new InlineTest(),
+ new MetaTest(),
+ new SignalTest(),
+ new TestProperty(),
+ new ChainBindTest(),
+ new ExprBindTest(),
+ ]))
+ class Tests extends BuddySuite {} | 15 | 3 | 13 | 2 |
7ecab2d7891686a748e5d1245f216ac3fcbf9a79 | README.md | README.md |
My dotfiles.
|
These are my dotfiles.
What used to be a fork of [holman/dotfiles](https://github.com/holman/dotfiles) has now become a standalone set, rebuilt from scratch.
## what do we have here?
Lots of stuff, but the core is zsh, vim, and tmux.
## how does it work?
Inspired by Zach Holman's structure, my dotfiles are organized by "topics". Each topic is a folder, and a topic is usually a piece of software. Within each folder are files related to the topic, which often include:
- `install.sh`
- `link.sh`
- `*.symlink`
- `*.zsh`
`script/install` does the majority of the work, first installing Homebrew (if needed), then finding all of the various `install.sh` scripts and running them.
`script/link` first symlinks the dotfiles folder to `~/.dotfiles`. Then it finds all of the `*.symlink` files and symlinks them to the home directory, renaming each to remove `.symlink` and place a dot in front. Lastly, it finds all of the various `link.sh` scripts and runs them.
`zsh/zshrc.symlink` handles sourcing the various `*.zsh` files, including aliases and completions.
## how do I get started?
```
git clone https://github.com/redhotvengeance/dotfiles.git
cd dotfiles
./bootstrap
```
You can clone the repo anywhere you want. The install process will symlink it to `~/.dotfiles` for you.
`./bootstrap` should handle everything, including the installing and the linking. It'll even set defaults for macOS and check for updates in the Mac App Store. It's worthwhile to reboot the machine after bootstrapping so that all of the settings take effect.
## can I help?
I always welcome pull requests and suggestions, but keep in mind that these are _my_ dotfiles. This repo is not setup like Holman's—it is not meant to be forked for your own usage. Forking dotfiles got me started, but I've arrived at the conclusion that dotfiles are better when created from scratch. Borrow, steal, and get inspired by other dotfiles, but make sure you understand each piece that makes _your_ dotfiles, well, _yours_. It’s worth it.
## many thanks
These dotfiles are highly inspired by Zach Holman's, in addition to [Nick Nisi's](https://github.com/nicknisi/dotfiles). | Update readme with basic info | Update readme with basic info
| Markdown | mit | redhotvengeance/dotfiles | markdown | ## Code Before:
My dotfiles.
## Instruction:
Update readme with basic info
## Code After:
These are my dotfiles.
What used to be a fork of [holman/dotfiles](https://github.com/holman/dotfiles) has now become a standalone set, rebuilt from scratch.
## what do we have here?
Lots of stuff, but the core is zsh, vim, and tmux.
## how does it work?
Inspired by Zach Holman's structure, my dotfiles are organized by "topics". Each topic is a folder, and a topic is usually a piece of software. Within each folder are files related to the topic, which often include:
- `install.sh`
- `link.sh`
- `*.symlink`
- `*.zsh`
`script/install` does the majority of the work, first installing Homebrew (if needed), then finding all of the various `install.sh` scripts and running them.
`script/link` first symlinks the dotfiles folder to `~/.dotfiles`. Then it finds all of the `*.symlink` files and symlinks them to the home directory, renaming each to remove `.symlink` and place a dot in front. Lastly, it finds all of the various `link.sh` scripts and runs them.
`zsh/zshrc.symlink` handles sourcing the various `*.zsh` files, including aliases and completions.
## how do I get started?
```
git clone https://github.com/redhotvengeance/dotfiles.git
cd dotfiles
./bootstrap
```
You can clone the repo anywhere you want. The install process will symlink it to `~/.dotfiles` for you.
`./bootstrap` should handle everything, including the installing and the linking. It'll even set defaults for macOS and check for updates in the Mac App Store. It's worthwhile to reboot the machine after bootstrapping so that all of the settings take effect.
## can I help?
I always welcome pull requests and suggestions, but keep in mind that these are _my_ dotfiles. This repo is not setup like Holman's—it is not meant to be forked for your own usage. Forking dotfiles got me started, but I've arrived at the conclusion that dotfiles are better when created from scratch. Borrow, steal, and get inspired by other dotfiles, but make sure you understand each piece that makes _your_ dotfiles, well, _yours_. It’s worth it.
## many thanks
These dotfiles are highly inspired by Zach Holman's, in addition to [Nick Nisi's](https://github.com/nicknisi/dotfiles). |
+ These are my dotfiles.
+
+ What used to be a fork of [holman/dotfiles](https://github.com/holman/dotfiles) has now become a standalone set, rebuilt from scratch.
+
+ ## what do we have here?
+
+ Lots of stuff, but the core is zsh, vim, and tmux.
+
+ ## how does it work?
+
+ Inspired by Zach Holman's structure, my dotfiles are organized by "topics". Each topic is a folder, and a topic is usually a piece of software. Within each folder are files related to the topic, which often include:
+
+ - `install.sh`
+ - `link.sh`
+ - `*.symlink`
+ - `*.zsh`
+
+ `script/install` does the majority of the work, first installing Homebrew (if needed), then finding all of the various `install.sh` scripts and running them.
+
+ `script/link` first symlinks the dotfiles folder to `~/.dotfiles`. Then it finds all of the `*.symlink` files and symlinks them to the home directory, renaming each to remove `.symlink` and place a dot in front. Lastly, it finds all of the various `link.sh` scripts and runs them.
+
+ `zsh/zshrc.symlink` handles sourcing the various `*.zsh` files, including aliases and completions.
+
+ ## how do I get started?
+
+ ```
+ git clone https://github.com/redhotvengeance/dotfiles.git
- My dotfiles.
? ^^ -
+ cd dotfiles
? ^^
+ ./bootstrap
+ ```
+
+ You can clone the repo anywhere you want. The install process will symlink it to `~/.dotfiles` for you.
+
+ `./bootstrap` should handle everything, including the installing and the linking. It'll even set defaults for macOS and check for updates in the Mac App Store. It's worthwhile to reboot the machine after bootstrapping so that all of the settings take effect.
+
+ ## can I help?
+
+ I always welcome pull requests and suggestions, but keep in mind that these are _my_ dotfiles. This repo is not setup like Holman's—it is not meant to be forked for your own usage. Forking dotfiles got me started, but I've arrived at the conclusion that dotfiles are better when created from scratch. Borrow, steal, and get inspired by other dotfiles, but make sure you understand each piece that makes _your_ dotfiles, well, _yours_. It’s worth it.
+
+ ## many thanks
+
+ These dotfiles are highly inspired by Zach Holman's, in addition to [Nick Nisi's](https://github.com/nicknisi/dotfiles). | 43 | 21.5 | 42 | 1 |
9b4e2dbe974f6759f7922ea68e3ded4292122393 | coding/emacs/NOTES.txt | coding/emacs/NOTES.txt | ________________________________________________________________________
This file is part of Logtalk <http://logtalk.org/>
Logtalk is free software. You can redistribute it and/or modify it under
the terms of the FSF GNU General Public License 3 (plus some additional
terms per section 7). Consult the `LICENSE.txt` file for details.
________________________________________________________________________
This directory contains a "logtalk.el" file that provides syntax
highlighting for editing Logtalk source files with the Emacs text
editor:
http://www.gnu.org/software/emacs/emacs.html
This support file is dual-licensed under the GNU General Public
License 3 and the Emacs license.
To install follow the instructions contained in the file itself.
THIS FONT-LOCK CONFIGURATION FILE IS UNDER DEVELOPMENT.
| ________________________________________________________________________
This file is part of Logtalk <http://logtalk.org/>
Logtalk is free software. You can redistribute it and/or modify it under
the terms of the FSF GNU General Public License 3 (plus some additional
terms per section 7). Consult the `LICENSE.txt` file for details.
________________________________________________________________________
This directory contains a `logtalk.el` file that provides syntax
highlighting for editing Logtalk source files with the Emacs text
editor:
http://www.gnu.org/software/emacs/emacs.html
This support file is dual-licensed under the GNU General Public
License 3 and the Emacs license.
To install follow the instructions contained in the file itself.
Emacs regular expressions don't support look-ahead assertions, which
result is syntax coloring errors in valid code such as the `0'"` term. | Add note on Emacs regular expression limitations for syntax coloring | Add note on Emacs regular expression limitations for syntax coloring
| Text | apache-2.0 | LogtalkDotOrg/logtalk3,LogtalkDotOrg/logtalk3,LogtalkDotOrg/logtalk3,LogtalkDotOrg/logtalk3,LogtalkDotOrg/logtalk3,LogtalkDotOrg/logtalk3,LogtalkDotOrg/logtalk3 | text | ## Code Before:
________________________________________________________________________
This file is part of Logtalk <http://logtalk.org/>
Logtalk is free software. You can redistribute it and/or modify it under
the terms of the FSF GNU General Public License 3 (plus some additional
terms per section 7). Consult the `LICENSE.txt` file for details.
________________________________________________________________________
This directory contains a "logtalk.el" file that provides syntax
highlighting for editing Logtalk source files with the Emacs text
editor:
http://www.gnu.org/software/emacs/emacs.html
This support file is dual-licensed under the GNU General Public
License 3 and the Emacs license.
To install follow the instructions contained in the file itself.
THIS FONT-LOCK CONFIGURATION FILE IS UNDER DEVELOPMENT.
## Instruction:
Add note on Emacs regular expression limitations for syntax coloring
## Code After:
________________________________________________________________________
This file is part of Logtalk <http://logtalk.org/>
Logtalk is free software. You can redistribute it and/or modify it under
the terms of the FSF GNU General Public License 3 (plus some additional
terms per section 7). Consult the `LICENSE.txt` file for details.
________________________________________________________________________
This directory contains a `logtalk.el` file that provides syntax
highlighting for editing Logtalk source files with the Emacs text
editor:
http://www.gnu.org/software/emacs/emacs.html
This support file is dual-licensed under the GNU General Public
License 3 and the Emacs license.
To install follow the instructions contained in the file itself.
Emacs regular expressions don't support look-ahead assertions, which
result is syntax coloring errors in valid code such as the `0'"` term. | ________________________________________________________________________
This file is part of Logtalk <http://logtalk.org/>
Logtalk is free software. You can redistribute it and/or modify it under
the terms of the FSF GNU General Public License 3 (plus some additional
terms per section 7). Consult the `LICENSE.txt` file for details.
________________________________________________________________________
- This directory contains a "logtalk.el" file that provides syntax
? ^ ^
+ This directory contains a `logtalk.el` file that provides syntax
? ^ ^
highlighting for editing Logtalk source files with the Emacs text
editor:
http://www.gnu.org/software/emacs/emacs.html
This support file is dual-licensed under the GNU General Public
License 3 and the Emacs license.
To install follow the instructions contained in the file itself.
-
- THIS FONT-LOCK CONFIGURATION FILE IS UNDER DEVELOPMENT.
+ Emacs regular expressions don't support look-ahead assertions, which
+ result is syntax coloring errors in valid code such as the `0'"` term. | 6 | 0.26087 | 3 | 3 |
88045d0d3641dc5236a7696b4b40379169134ccb | docs/source/tips.rst | docs/source/tips.rst | Tips and FAQs
=============
| Tips and FAQs
=============
It takes too long time to compile a computational graph. Can I skip it?
-----------------------------------------------------------------------
Chainer does not compile computational graphs, so you cannot skip it, or, I mean, you have already skipped it :).
It seems you have actually seen on-the-fly compilations of CUDA kernels.
CuPy compiles kernels on demand to make kernels optimized to the number of dimensions and element types of input arguments.
Precompilation is not available, because we have to compile an exponential number of kernels to support all CuPy functionalities.
This restriction is unavoidable because Python cannot call CUDA/C++ template functions in generic way.
Note that every framework using CUDA require compilation at some point; the difference between other statically-compiled frameworks (such as cutorch) and Chainer is whether a kernel is compiled at installtion or at the first use.
These compilations should run only at the first use of the kernels.
The compiled binaries are cached to the ``$(HOME)/.cupy/kernel_cache`` directory by default.
If you see that compilations run everytime you run the same script, then the caching is failed.
Please check that the directory is kept as is between multiple executions of the script.
If your home directory is not suited to caching the kernels (e.g. in case that it uses NFS), change the kernel caching directory by setting the ``CUPY_CACHE_DIR`` environment variable to an appropriate path.
| Add a faq on compilation time | Add a faq on compilation time
| reStructuredText | mit | keisuke-umezawa/chainer,wkentaro/chainer,kashif/chainer,niboshi/chainer,muupan/chainer,cemoody/chainer,ktnyt/chainer,truongdq/chainer,tigerneil/chainer,ytoyama/yans_chainer_hackathon,minhpqn/chainer,jnishi/chainer,1986ks/chainer,cupy/cupy,ktnyt/chainer,Kaisuke5/chainer,benob/chainer,tkerola/chainer,aonotas/chainer,hvy/chainer,ronekko/chainer,jnishi/chainer,chainer/chainer,hvy/chainer,keisuke-umezawa/chainer,chainer/chainer,kikusu/chainer,ysekky/chainer,muupan/chainer,tscohen/chainer,niboshi/chainer,hvy/chainer,pfnet/chainer,keisuke-umezawa/chainer,ktnyt/chainer,rezoo/chainer,ktnyt/chainer,wkentaro/chainer,cupy/cupy,AlpacaDB/chainer,wkentaro/chainer,chainer/chainer,wkentaro/chainer,sinhrks/chainer,jnishi/chainer,keisuke-umezawa/chainer,niboshi/chainer,AlpacaDB/chainer,t-abe/chainer,hvy/chainer,delta2323/chainer,t-abe/chainer,anaruse/chainer,cupy/cupy,okuta/chainer,benob/chainer,sinhrks/chainer,okuta/chainer,chainer/chainer,kiyukuta/chainer,sou81821/chainer,laysakura/chainer,okuta/chainer,cupy/cupy,niboshi/chainer,truongdq/chainer,kikusu/chainer,jnishi/chainer,okuta/chainer | restructuredtext | ## Code Before:
Tips and FAQs
=============
## Instruction:
Add a faq on compilation time
## Code After:
Tips and FAQs
=============
It takes too long time to compile a computational graph. Can I skip it?
-----------------------------------------------------------------------
Chainer does not compile computational graphs, so you cannot skip it, or, I mean, you have already skipped it :).
It seems you have actually seen on-the-fly compilations of CUDA kernels.
CuPy compiles kernels on demand to make kernels optimized to the number of dimensions and element types of input arguments.
Precompilation is not available, because we have to compile an exponential number of kernels to support all CuPy functionalities.
This restriction is unavoidable because Python cannot call CUDA/C++ template functions in generic way.
Note that every framework using CUDA require compilation at some point; the difference between other statically-compiled frameworks (such as cutorch) and Chainer is whether a kernel is compiled at installtion or at the first use.
These compilations should run only at the first use of the kernels.
The compiled binaries are cached to the ``$(HOME)/.cupy/kernel_cache`` directory by default.
If you see that compilations run everytime you run the same script, then the caching is failed.
Please check that the directory is kept as is between multiple executions of the script.
If your home directory is not suited to caching the kernels (e.g. in case that it uses NFS), change the kernel caching directory by setting the ``CUPY_CACHE_DIR`` environment variable to an appropriate path.
| Tips and FAQs
=============
+
+ It takes too long time to compile a computational graph. Can I skip it?
+ -----------------------------------------------------------------------
+
+ Chainer does not compile computational graphs, so you cannot skip it, or, I mean, you have already skipped it :).
+
+ It seems you have actually seen on-the-fly compilations of CUDA kernels.
+ CuPy compiles kernels on demand to make kernels optimized to the number of dimensions and element types of input arguments.
+ Precompilation is not available, because we have to compile an exponential number of kernels to support all CuPy functionalities.
+ This restriction is unavoidable because Python cannot call CUDA/C++ template functions in generic way.
+ Note that every framework using CUDA require compilation at some point; the difference between other statically-compiled frameworks (such as cutorch) and Chainer is whether a kernel is compiled at installtion or at the first use.
+
+ These compilations should run only at the first use of the kernels.
+ The compiled binaries are cached to the ``$(HOME)/.cupy/kernel_cache`` directory by default.
+ If you see that compilations run everytime you run the same script, then the caching is failed.
+ Please check that the directory is kept as is between multiple executions of the script.
+ If your home directory is not suited to caching the kernels (e.g. in case that it uses NFS), change the kernel caching directory by setting the ``CUPY_CACHE_DIR`` environment variable to an appropriate path. | 17 | 8.5 | 17 | 0 |
7fcd118c1db1580d09af3cbff2f8d0cd392127dd | docker-compose.yml | docker-compose.yml | version: '2.1'
services:
web:
image: "ket4yii/php-censor:web"
ports:
- "80:80"
networks:
- default
depends_on:
psql:
condition: service_healthy
queue:
condition: service_started
env_file:
- ./env/phpcensor.env
worker:
image: "ket4yii/php-censor:worker"
networks:
- default
depends_on:
psql:
condition: service_healthy
queue:
condition: service_started
env_file:
- ./env/phpcensor.env
psql:
image: "postgres"
user: postgres
networks:
default:
aliases:
- db
env_file:
- ./env/database.env
healthcheck:
test: psql -lqt | cut -d \| -f 1 | grep -qw $$POSTGRES_DB
interval: 10s
timeout: 10s
retries: 3
queue:
image: "schickling/beanstalkd"
networks:
default:
aliases:
- beanstalk
| version: '2.1'
services:
web:
image: "ket4yii/php-censor:web"
ports:
- "80:80"
depends_on:
- db
- queue
env_file:
- ./env/phpcensor.env
worker:
image: "ket4yii/php-censor:worker"
depends_on:
- db
- queue
env_file:
- ./env/phpcensor.env
db:
image: "postgres"
user: postgres
env_file:
- ./env/database.env
queue:
image: "schickling/beanstalkd"
networks:
default:
aliases:
- beanstalk
| Delete healthchecks. Delete redundant things | [compose] Delete healthchecks. Delete redundant things
| YAML | mit | ket4yii/docker-php-censor | yaml | ## Code Before:
version: '2.1'
services:
web:
image: "ket4yii/php-censor:web"
ports:
- "80:80"
networks:
- default
depends_on:
psql:
condition: service_healthy
queue:
condition: service_started
env_file:
- ./env/phpcensor.env
worker:
image: "ket4yii/php-censor:worker"
networks:
- default
depends_on:
psql:
condition: service_healthy
queue:
condition: service_started
env_file:
- ./env/phpcensor.env
psql:
image: "postgres"
user: postgres
networks:
default:
aliases:
- db
env_file:
- ./env/database.env
healthcheck:
test: psql -lqt | cut -d \| -f 1 | grep -qw $$POSTGRES_DB
interval: 10s
timeout: 10s
retries: 3
queue:
image: "schickling/beanstalkd"
networks:
default:
aliases:
- beanstalk
## Instruction:
[compose] Delete healthchecks. Delete redundant things
## Code After:
version: '2.1'
services:
web:
image: "ket4yii/php-censor:web"
ports:
- "80:80"
depends_on:
- db
- queue
env_file:
- ./env/phpcensor.env
worker:
image: "ket4yii/php-censor:worker"
depends_on:
- db
- queue
env_file:
- ./env/phpcensor.env
db:
image: "postgres"
user: postgres
env_file:
- ./env/database.env
queue:
image: "schickling/beanstalkd"
networks:
default:
aliases:
- beanstalk
| version: '2.1'
services:
web:
image: "ket4yii/php-censor:web"
ports:
- "80:80"
- networks:
- - default
depends_on:
+ - db
- psql:
- condition: service_healthy
- queue:
? ^ -
+ - queue
? ^
- condition: service_started
env_file:
- ./env/phpcensor.env
worker:
image: "ket4yii/php-censor:worker"
- networks:
- - default
depends_on:
+ - db
- psql:
- condition: service_healthy
- queue:
? ^ -
+ - queue
? ^
- condition: service_started
env_file:
- ./env/phpcensor.env
- psql:
+ db:
image: "postgres"
user: postgres
- networks:
- default:
- aliases:
- - db
env_file:
- ./env/database.env
- healthcheck:
- test: psql -lqt | cut -d \| -f 1 | grep -qw $$POSTGRES_DB
- interval: 10s
- timeout: 10s
- retries: 3
queue:
image: "schickling/beanstalkd"
networks:
default:
aliases:
- beanstalk
| 27 | 0.574468 | 5 | 22 |
81fecc378da0a39ea87b75be0323b028349de792 | app/views/downloads/download_linux.html.erb | app/views/downloads/download_linux.html.erb | <%- @section = "downloads" %>
<%- @subsection = "" %>
<div id="main">
<h1>Download for Linux and Unix</h1>
<p>It is easiest to install Git on Linux using the preferred package manager of your Linux distribution.</p>
<h3>Debian/Ubuntu</h3>
<code>$ apt-get install git</code>
<h3>Fedora</h3>
<code>$ yum install git</code> (up to Fedora 21)<br>
<code>$ dnf install git</code> (Fedora 22 and later)
<h3>Gentoo</h3>
<code>$ emerge --ask --verbose dev-vcs/git</code>
<h3>Arch Linux</h3>
<code>$ pacman -S git</code>
<h3>openSUSE</h3>
<code>$ zypper install git</code>
<h3>FreeBSD</h3>
<code>$ cd /usr/ports/devel/git</code><br><code>$ make install</code>
<h3>Solaris 11 Express</h3>
<code>$ pkg install developer/versioning/git</code>
<h3>OpenBSD</h3>
<code>$ pkg_add git</code>
</div>
| <%- @section = "downloads" %>
<%- @subsection = "" %>
<div id="main">
<h1>Download for Linux and Unix</h1>
<p>It is easiest to install Git on Linux using the preferred package manager of your Linux distribution.</p>
<h3>Debian/Ubuntu</h3>
<code>$ apt-get install git</code>
<h3>Fedora</h3>
<code>$ yum install git</code> (up to Fedora 21)<br>
<code>$ dnf install git</code> (Fedora 22 and later)
<h3>Gentoo</h3>
<code>$ emerge --ask --verbose dev-vcs/git</code>
<h3>Arch Linux</h3>
<code>$ pacman -S git</code>
<h3>openSUSE</h3>
<code>$ zypper install git</code>
<h3>FreeBSD</h3>
<code>$ cd /usr/ports/devel/git</code><br><code>$ make install</code>
<h3>Solaris 11 Express</h3>
<code>$ pkg install developer/versioning/git</code>
<h3>OpenBSD</h3>
<code>$ pkg_add git</code>
<h3>Slitaz</h3>
<code>$ tazpkg get-install git</code>
</div>
| Add install instructions for Slitaz | Add install instructions for Slitaz
| HTML+ERB | mit | Mokolea/git-scm.com,mosoft521/gitscm-next,jasonlong/git-scm.com,mosoft521/gitscm-next,git/git-scm.com,git/git-scm.com,jasonlong/git-scm.com,jasonlong/git-scm.com,Mokolea/git-scm.com,Mokolea/git-scm.com,git/git-scm.com,git/git-scm.com,mosoft521/gitscm-next,mosoft521/gitscm-next,Mokolea/git-scm.com,jasonlong/git-scm.com | html+erb | ## Code Before:
<%- @section = "downloads" %>
<%- @subsection = "" %>
<div id="main">
<h1>Download for Linux and Unix</h1>
<p>It is easiest to install Git on Linux using the preferred package manager of your Linux distribution.</p>
<h3>Debian/Ubuntu</h3>
<code>$ apt-get install git</code>
<h3>Fedora</h3>
<code>$ yum install git</code> (up to Fedora 21)<br>
<code>$ dnf install git</code> (Fedora 22 and later)
<h3>Gentoo</h3>
<code>$ emerge --ask --verbose dev-vcs/git</code>
<h3>Arch Linux</h3>
<code>$ pacman -S git</code>
<h3>openSUSE</h3>
<code>$ zypper install git</code>
<h3>FreeBSD</h3>
<code>$ cd /usr/ports/devel/git</code><br><code>$ make install</code>
<h3>Solaris 11 Express</h3>
<code>$ pkg install developer/versioning/git</code>
<h3>OpenBSD</h3>
<code>$ pkg_add git</code>
</div>
## Instruction:
Add install instructions for Slitaz
## Code After:
<%- @section = "downloads" %>
<%- @subsection = "" %>
<div id="main">
<h1>Download for Linux and Unix</h1>
<p>It is easiest to install Git on Linux using the preferred package manager of your Linux distribution.</p>
<h3>Debian/Ubuntu</h3>
<code>$ apt-get install git</code>
<h3>Fedora</h3>
<code>$ yum install git</code> (up to Fedora 21)<br>
<code>$ dnf install git</code> (Fedora 22 and later)
<h3>Gentoo</h3>
<code>$ emerge --ask --verbose dev-vcs/git</code>
<h3>Arch Linux</h3>
<code>$ pacman -S git</code>
<h3>openSUSE</h3>
<code>$ zypper install git</code>
<h3>FreeBSD</h3>
<code>$ cd /usr/ports/devel/git</code><br><code>$ make install</code>
<h3>Solaris 11 Express</h3>
<code>$ pkg install developer/versioning/git</code>
<h3>OpenBSD</h3>
<code>$ pkg_add git</code>
<h3>Slitaz</h3>
<code>$ tazpkg get-install git</code>
</div>
| <%- @section = "downloads" %>
<%- @subsection = "" %>
<div id="main">
<h1>Download for Linux and Unix</h1>
<p>It is easiest to install Git on Linux using the preferred package manager of your Linux distribution.</p>
<h3>Debian/Ubuntu</h3>
<code>$ apt-get install git</code>
<h3>Fedora</h3>
<code>$ yum install git</code> (up to Fedora 21)<br>
<code>$ dnf install git</code> (Fedora 22 and later)
<h3>Gentoo</h3>
<code>$ emerge --ask --verbose dev-vcs/git</code>
<h3>Arch Linux</h3>
<code>$ pacman -S git</code>
<h3>openSUSE</h3>
<code>$ zypper install git</code>
<h3>FreeBSD</h3>
<code>$ cd /usr/ports/devel/git</code><br><code>$ make install</code>
<h3>Solaris 11 Express</h3>
<code>$ pkg install developer/versioning/git</code>
<h3>OpenBSD</h3>
<code>$ pkg_add git</code>
+ <h3>Slitaz</h3>
+ <code>$ tazpkg get-install git</code>
+
</div> | 3 | 0.090909 | 3 | 0 |
44f1116cdc412225d04203e3ecec75ebe65ecbea | django_olcc/olcc/templates/olcc/store_list.html | django_olcc/olcc/templates/olcc/store_list.html | {% extends 'site_base.html' %}
{% load i18n %}
{% block title %}
Stores
{% endblock %}
{% block content %}
<h2>Found {{ stores.count }} store{{ stores|pluralize }} in {{ county|capfirst }} county</h2>
{% for s in stores %}
<section class="store">
<div class="details">
<span class="name">{{ s.name }}</span><br />
<span class="adr">{{ s.address }}</span><br />
<div class="tel">
<a href="tel:{{ s.tel }}">{{ s.phone }}</a>
</div>
<div class="hrs">
{% for h in s.hours_list %}
{{ h }}<br />
{% endfor %}
</div>
</div>
<div class="map">
<a href="//maps.google.com/?q={{ s.address|urlencode }}@{{ s.latitude }},{{ s.longitude }}">
<img
width="350"
height=200"
src="//maps.googleapis.com/maps/api/staticmap?size=350x200&sensor=false®ion=us&markers={{ s.address|urlencode }}" />
</a>
</div>
</section>
{% empty %}
<p>No stores found!</p>
{% endfor %}
{% endblock %}
| {% extends 'site_base.html' %}
{% load i18n %}
{% block title %}
Stores
{% endblock %}
{% block content %}
<h2>Found {{ stores.count }} store{{ stores|pluralize }} in {{ county|capfirst }} county</h2>
{% for s in stores %}
<section class="store">
<div class="details">
<span class="name">{{ s.name }}</span><br />
<span class="adr">{{ s.address }}</span><br />
<div class="tel">
<a href="tel:{{ s.tel }}">{{ s.phone }}</a>
</div>
<div class="hrs">
{% for h in s.hours_list %}
{{ h }}<br />
{% endfor %}
</div>
</div>
<div class="map">
<a href="//maps.google.com/?ll={{ s.latitude }},{{ s.longitude }}&f=d&saddr=My%20Location&daddr={{ s.address|urlencode }}">
<img
width="350"
height=200"
src="//maps.googleapis.com/maps/api/staticmap?size=350x200&sensor=false®ion=us&markers={{ s.address|urlencode }}" />
</a>
</div>
</section>
{% empty %}
<p>No stores found!</p>
{% endfor %}
{% endblock %}
| Update the store list template to link to Google Maps with directions for each map link. | Update the store list template to link to Google Maps with directions for each map link.
| HTML | mit | twaddington/django-olcc,twaddington/django-olcc,twaddington/django-olcc | html | ## Code Before:
{% extends 'site_base.html' %}
{% load i18n %}
{% block title %}
Stores
{% endblock %}
{% block content %}
<h2>Found {{ stores.count }} store{{ stores|pluralize }} in {{ county|capfirst }} county</h2>
{% for s in stores %}
<section class="store">
<div class="details">
<span class="name">{{ s.name }}</span><br />
<span class="adr">{{ s.address }}</span><br />
<div class="tel">
<a href="tel:{{ s.tel }}">{{ s.phone }}</a>
</div>
<div class="hrs">
{% for h in s.hours_list %}
{{ h }}<br />
{% endfor %}
</div>
</div>
<div class="map">
<a href="//maps.google.com/?q={{ s.address|urlencode }}@{{ s.latitude }},{{ s.longitude }}">
<img
width="350"
height=200"
src="//maps.googleapis.com/maps/api/staticmap?size=350x200&sensor=false®ion=us&markers={{ s.address|urlencode }}" />
</a>
</div>
</section>
{% empty %}
<p>No stores found!</p>
{% endfor %}
{% endblock %}
## Instruction:
Update the store list template to link to Google Maps with directions for each map link.
## Code After:
{% extends 'site_base.html' %}
{% load i18n %}
{% block title %}
Stores
{% endblock %}
{% block content %}
<h2>Found {{ stores.count }} store{{ stores|pluralize }} in {{ county|capfirst }} county</h2>
{% for s in stores %}
<section class="store">
<div class="details">
<span class="name">{{ s.name }}</span><br />
<span class="adr">{{ s.address }}</span><br />
<div class="tel">
<a href="tel:{{ s.tel }}">{{ s.phone }}</a>
</div>
<div class="hrs">
{% for h in s.hours_list %}
{{ h }}<br />
{% endfor %}
</div>
</div>
<div class="map">
<a href="//maps.google.com/?ll={{ s.latitude }},{{ s.longitude }}&f=d&saddr=My%20Location&daddr={{ s.address|urlencode }}">
<img
width="350"
height=200"
src="//maps.googleapis.com/maps/api/staticmap?size=350x200&sensor=false®ion=us&markers={{ s.address|urlencode }}" />
</a>
</div>
</section>
{% empty %}
<p>No stores found!</p>
{% endfor %}
{% endblock %}
| {% extends 'site_base.html' %}
{% load i18n %}
{% block title %}
Stores
{% endblock %}
{% block content %}
<h2>Found {{ stores.count }} store{{ stores|pluralize }} in {{ county|capfirst }} county</h2>
{% for s in stores %}
<section class="store">
<div class="details">
<span class="name">{{ s.name }}</span><br />
<span class="adr">{{ s.address }}</span><br />
<div class="tel">
<a href="tel:{{ s.tel }}">{{ s.phone }}</a>
</div>
<div class="hrs">
{% for h in s.hours_list %}
{{ h }}<br />
{% endfor %}
</div>
</div>
<div class="map">
- <a href="//maps.google.com/?q={{ s.address|urlencode }}@{{ s.latitude }},{{ s.longitude }}">
+ <a href="//maps.google.com/?ll={{ s.latitude }},{{ s.longitude }}&f=d&saddr=My%20Location&daddr={{ s.address|urlencode }}">
<img
width="350"
height=200"
src="//maps.googleapis.com/maps/api/staticmap?size=350x200&sensor=false®ion=us&markers={{ s.address|urlencode }}" />
</a>
</div>
</section>
{% empty %}
<p>No stores found!</p>
{% endfor %}
{% endblock %} | 2 | 0.05 | 1 | 1 |
ccf6ce4c719bcea0cb3d951f6dc992baed6ce7c4 | metadata/com.frozendevs.cache.cleaner.txt | metadata/com.frozendevs.cache.cleaner.txt | Categories:System
License:MIT
Web Site:http://www.frozendevs.com
Source Code:https://github.com/Frozen-Developers/android-cache-cleaner
Issue Tracker:https://github.com/Frozen-Developers/android-cache-cleaner/issues
Auto Name:Cache Cleaner
Summary:Clean the cache
Description:
Cache cleaning tool which does not require a rooted device. Features
native Holo look and feel and fast operation.
.
Repo Type:git
Repo:https://github.com/Frozen-Developers/android-cache-cleaner
Build:1.1.7,10
commit=9cf2c6030617570f343c85b471f2b840e896cea0
subdir=CacheCleaner
gradle=main
Auto Update Mode:Version %v
Update Check Mode:Tags
Current Version:1.1.7
Current Version Code:10
| Categories:System
License:MIT
Web Site:http://www.frozendevs.com
Source Code:https://github.com/Frozen-Developers/android-cache-cleaner
Issue Tracker:https://github.com/Frozen-Developers/android-cache-cleaner/issues
Auto Name:Cache Cleaner
Summary:Clean the cache
Description:
Cache cleaning tool which does not require a rooted device. Features
native Holo look and feel and fast operation.
.
Repo Type:git
Repo:https://github.com/Frozen-Developers/android-cache-cleaner
Build:1.1.7,10
commit=9cf2c6030617570f343c85b471f2b840e896cea0
subdir=CacheCleaner
gradle=main
Build:1.1.8,11
commit=1.1.8
subdir=CacheCleaner
gradle=main
Auto Update Mode:Version %v
Update Check Mode:Tags
Current Version:1.1.8
Current Version Code:11
| Update Cache Cleaner to 1.1.8 (11) | Update Cache Cleaner to 1.1.8 (11)
| Text | agpl-3.0 | f-droid/fdroid-data,f-droid/fdroiddata,f-droid/fdroiddata | text | ## Code Before:
Categories:System
License:MIT
Web Site:http://www.frozendevs.com
Source Code:https://github.com/Frozen-Developers/android-cache-cleaner
Issue Tracker:https://github.com/Frozen-Developers/android-cache-cleaner/issues
Auto Name:Cache Cleaner
Summary:Clean the cache
Description:
Cache cleaning tool which does not require a rooted device. Features
native Holo look and feel and fast operation.
.
Repo Type:git
Repo:https://github.com/Frozen-Developers/android-cache-cleaner
Build:1.1.7,10
commit=9cf2c6030617570f343c85b471f2b840e896cea0
subdir=CacheCleaner
gradle=main
Auto Update Mode:Version %v
Update Check Mode:Tags
Current Version:1.1.7
Current Version Code:10
## Instruction:
Update Cache Cleaner to 1.1.8 (11)
## Code After:
Categories:System
License:MIT
Web Site:http://www.frozendevs.com
Source Code:https://github.com/Frozen-Developers/android-cache-cleaner
Issue Tracker:https://github.com/Frozen-Developers/android-cache-cleaner/issues
Auto Name:Cache Cleaner
Summary:Clean the cache
Description:
Cache cleaning tool which does not require a rooted device. Features
native Holo look and feel and fast operation.
.
Repo Type:git
Repo:https://github.com/Frozen-Developers/android-cache-cleaner
Build:1.1.7,10
commit=9cf2c6030617570f343c85b471f2b840e896cea0
subdir=CacheCleaner
gradle=main
Build:1.1.8,11
commit=1.1.8
subdir=CacheCleaner
gradle=main
Auto Update Mode:Version %v
Update Check Mode:Tags
Current Version:1.1.8
Current Version Code:11
| Categories:System
License:MIT
Web Site:http://www.frozendevs.com
Source Code:https://github.com/Frozen-Developers/android-cache-cleaner
Issue Tracker:https://github.com/Frozen-Developers/android-cache-cleaner/issues
Auto Name:Cache Cleaner
Summary:Clean the cache
Description:
Cache cleaning tool which does not require a rooted device. Features
native Holo look and feel and fast operation.
.
Repo Type:git
Repo:https://github.com/Frozen-Developers/android-cache-cleaner
Build:1.1.7,10
commit=9cf2c6030617570f343c85b471f2b840e896cea0
subdir=CacheCleaner
gradle=main
+ Build:1.1.8,11
+ commit=1.1.8
+ subdir=CacheCleaner
+ gradle=main
+
Auto Update Mode:Version %v
Update Check Mode:Tags
- Current Version:1.1.7
? ^
+ Current Version:1.1.8
? ^
- Current Version Code:10
? ^
+ Current Version Code:11
? ^
| 9 | 0.346154 | 7 | 2 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.